code
stringlengths
5
1M
repo_name
stringlengths
5
109
path
stringlengths
6
208
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
5
1M
package typeclass.instances import typeclass.MonoidLaws import typeclass.instances.string._ import scalaprops.{Gen, Scalaprops} object StringTest extends Scalaprops { implicit val genString: Gen[String] = Gen.asciiString val monoid = MonoidLaws[String].all }
julien-truffaut/Typeclass
answer/src/test/scala/typeclass/instances/StringTest.scala
Scala
mit
269
/** * Copyright (C) 2015 Stratio (http://stratio.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.stratio.datasource.mongodb import com.stratio.datasource.util.Config import org.apache.spark.sql.{DataFrame, SQLContext} import org.apache.spark.sql.types.StructType import scala.language.implicitConversions /** * @param sqlContext Spark SQLContext */ class MongodbContext(sqlContext: SQLContext) { /** * It retrieves a bunch of MongoDB objects * given a MongDB configuration object. * @param config MongoDB configuration object * @return A dataFrame */ def fromMongoDB(config: Config,schema:Option[StructType]=None): DataFrame = sqlContext.baseRelationToDataFrame( new MongodbRelation(config, schema)(sqlContext)) }
darroyocazorla/spark-mongodb
spark-mongodb/src/main/scala/com/stratio/datasource/mongodb/MongodbContext.scala
Scala
apache-2.0
1,287
package extruder.circe import extruder.core.Decode import extruder.data.Validation import io.circe.Json trait CirceDecoder extends Decode { self: CirceDataSource => override type InputData = Json override type DecodeData = Json override type DecodeDefault[A] = Validation[A] }
janstenpickle/extruder
circe/src/main/scala/extruder/circe/CirceDecoder.scala
Scala
mit
285
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.deploy.rest import java.lang.Boolean import scala.util.Properties.versionNumberString import org.json4s.jackson.JsonMethods._ import org.apache.spark.{SparkConf, SparkFunSuite} import org.apache.spark.internal.config._ import org.apache.spark.util.Utils /** * Tests for the REST application submission protocol. */ class SubmitRestProtocolSuite extends SparkFunSuite { test("validate") { val request = new DummyRequest intercept[SubmitRestProtocolException] { request.validate() } // missing everything request.clientSparkVersion = "1.2.3" intercept[SubmitRestProtocolException] { request.validate() } // missing name and age request.name = "something" intercept[SubmitRestProtocolException] { request.validate() } // missing only age request.age = 2 intercept[SubmitRestProtocolException] { request.validate() } // age too low request.age = 10 request.validate() // everything is set properly request.clientSparkVersion = null intercept[SubmitRestProtocolException] { request.validate() } // missing only Spark version request.clientSparkVersion = "1.2.3" request.name = null intercept[SubmitRestProtocolException] { request.validate() } // missing only name request.message = "not-setting-name" intercept[SubmitRestProtocolException] { request.validate() } // still missing name } test("request to and from JSON") { val request = new DummyRequest intercept[SubmitRestProtocolException] { request.toJson } // implicit validation request.clientSparkVersion = "1.2.3" request.active = true request.age = 25 request.name = "jung" val json = request.toJson assertJsonEquals(json, dummyRequestJson) val newRequest = SubmitRestProtocolMessage.fromJson(json, classOf[DummyRequest]) assert(newRequest.clientSparkVersion === "1.2.3") assert(newRequest.clientSparkVersion === "1.2.3") assert(newRequest.active) assert(newRequest.age === 25) assert(newRequest.name === "jung") assert(newRequest.message === null) } test("response to and from JSON") { val response = new DummyResponse response.serverSparkVersion = "3.3.4" response.success = true val json = response.toJson assertJsonEquals(json, dummyResponseJson) val newResponse = SubmitRestProtocolMessage.fromJson(json, classOf[DummyResponse]) assert(newResponse.serverSparkVersion === "3.3.4") assert(newResponse.serverSparkVersion === "3.3.4") assert(newResponse.success) assert(newResponse.message === null) } test("CreateSubmissionRequest") { val message = new CreateSubmissionRequest intercept[SubmitRestProtocolException] { message.validate() } message.clientSparkVersion = "1.2.3" message.appResource = "honey-walnut-cherry.jar" message.mainClass = "org.apache.spark.examples.SparkPie" message.appArgs = Array("two slices") message.environmentVariables = Map("PATH" -> "/dev/null") val conf = new SparkConf(false) conf.set("spark.app.name", "SparkPie") message.sparkProperties = conf.getAll.toMap message.validate() // optional fields conf.set(JARS, Seq("mayonnaise.jar", "ketchup.jar")) conf.set(FILES.key, "fireball.png") conf.set(ARCHIVES.key, "fireballs.zip") conf.set("spark.driver.memory", s"${Utils.DEFAULT_DRIVER_MEM_MB}m") conf.set(DRIVER_CORES, 180) conf.set("spark.driver.extraJavaOptions", " -Dslices=5 -Dcolor=mostly_red") conf.set("spark.driver.extraClassPath", "food-coloring.jar") conf.set("spark.driver.extraLibraryPath", "pickle.jar") conf.set(DRIVER_SUPERVISE, false) conf.set("spark.executor.memory", "256m") conf.set(CORES_MAX, 10000) message.sparkProperties = conf.getAll.toMap message.appArgs = Array("two slices", "a hint of cinnamon") message.environmentVariables = Map("PATH" -> "/dev/null") message.validate() // bad fields var badConf = conf.clone().set(DRIVER_CORES.key, "one hundred feet") message.sparkProperties = badConf.getAll.toMap intercept[SubmitRestProtocolException] { message.validate() } badConf = conf.clone().set(DRIVER_SUPERVISE.key, "nope, never") message.sparkProperties = badConf.getAll.toMap intercept[SubmitRestProtocolException] { message.validate() } badConf = conf.clone().set(CORES_MAX.key, "two men") message.sparkProperties = badConf.getAll.toMap intercept[SubmitRestProtocolException] { message.validate() } message.sparkProperties = conf.getAll.toMap // test JSON val json = message.toJson assertJsonEquals(json, submitDriverRequestJson) val newMessage = SubmitRestProtocolMessage.fromJson(json, classOf[CreateSubmissionRequest]) assert(newMessage.clientSparkVersion === "1.2.3") assert(newMessage.appResource === "honey-walnut-cherry.jar") assert(newMessage.mainClass === "org.apache.spark.examples.SparkPie") assert(newMessage.sparkProperties("spark.app.name") === "SparkPie") assert(newMessage.sparkProperties(JARS.key) === "mayonnaise.jar,ketchup.jar") assert(newMessage.sparkProperties(FILES.key) === "fireball.png") assert(newMessage.sparkProperties("spark.driver.memory") === s"${Utils.DEFAULT_DRIVER_MEM_MB}m") assert(newMessage.sparkProperties(DRIVER_CORES.key) === "180") assert(newMessage.sparkProperties("spark.driver.extraJavaOptions") === " -Dslices=5 -Dcolor=mostly_red") assert(newMessage.sparkProperties("spark.driver.extraClassPath") === "food-coloring.jar") assert(newMessage.sparkProperties("spark.driver.extraLibraryPath") === "pickle.jar") assert(newMessage.sparkProperties(DRIVER_SUPERVISE.key) === "false") assert(newMessage.sparkProperties("spark.executor.memory") === "256m") assert(newMessage.sparkProperties(CORES_MAX.key) === "10000") assert(newMessage.appArgs === message.appArgs) assert(newMessage.sparkProperties === message.sparkProperties) assert(newMessage.environmentVariables === message.environmentVariables) } test("CreateSubmissionResponse") { val message = new CreateSubmissionResponse intercept[SubmitRestProtocolException] { message.validate() } message.serverSparkVersion = "1.2.3" message.submissionId = "driver_123" message.success = true message.validate() // test JSON val json = message.toJson assertJsonEquals(json, submitDriverResponseJson) val newMessage = SubmitRestProtocolMessage.fromJson(json, classOf[CreateSubmissionResponse]) assert(newMessage.serverSparkVersion === "1.2.3") assert(newMessage.submissionId === "driver_123") assert(newMessage.success) } test("KillSubmissionResponse") { val message = new KillSubmissionResponse intercept[SubmitRestProtocolException] { message.validate() } message.serverSparkVersion = "1.2.3" message.submissionId = "driver_123" message.success = true message.validate() // test JSON val json = message.toJson assertJsonEquals(json, killDriverResponseJson) val newMessage = SubmitRestProtocolMessage.fromJson(json, classOf[KillSubmissionResponse]) assert(newMessage.serverSparkVersion === "1.2.3") assert(newMessage.submissionId === "driver_123") assert(newMessage.success) } test("SubmissionStatusResponse") { val message = new SubmissionStatusResponse intercept[SubmitRestProtocolException] { message.validate() } message.serverSparkVersion = "1.2.3" message.submissionId = "driver_123" message.success = true message.validate() // optional fields message.driverState = "RUNNING" message.workerId = "worker_123" message.workerHostPort = "1.2.3.4:7780" // test JSON val json = message.toJson assertJsonEquals(json, driverStatusResponseJson) val newMessage = SubmitRestProtocolMessage.fromJson(json, classOf[SubmissionStatusResponse]) assert(newMessage.serverSparkVersion === "1.2.3") assert(newMessage.submissionId === "driver_123") assert(newMessage.driverState === "RUNNING") assert(newMessage.success) assert(newMessage.workerId === "worker_123") assert(newMessage.workerHostPort === "1.2.3.4:7780") } test("ErrorResponse") { val message = new ErrorResponse intercept[SubmitRestProtocolException] { message.validate() } message.serverSparkVersion = "1.2.3" message.message = "Field not found in submit request: X" message.validate() // test JSON val json = message.toJson assertJsonEquals(json, errorJson) val newMessage = SubmitRestProtocolMessage.fromJson(json, classOf[ErrorResponse]) assert(newMessage.serverSparkVersion === "1.2.3") assert(newMessage.message === "Field not found in submit request: X") } private val dummyRequestJson = """ |{ | "action" : "DummyRequest", | "active" : true, | "age" : 25, | "clientSparkVersion" : "1.2.3", | "name" : "jung" |} """.stripMargin private val dummyResponseJson = """ |{ | "action" : "DummyResponse", | "serverSparkVersion" : "3.3.4", | "success": true |} """.stripMargin private lazy val submitDriverRequestJson = if (versionNumberString.startsWith("2.12")) { s""" |{ | "action" : "CreateSubmissionRequest", | "appArgs" : [ "two slices", "a hint of cinnamon" ], | "appResource" : "honey-walnut-cherry.jar", | "clientSparkVersion" : "1.2.3", | "environmentVariables" : { | "PATH" : "/dev/null" | }, | "mainClass" : "org.apache.spark.examples.SparkPie", | "sparkProperties" : { | "spark.archives" : "fireballs.zip", | "spark.driver.extraLibraryPath" : "pickle.jar", | "spark.jars" : "mayonnaise.jar,ketchup.jar", | "spark.driver.supervise" : "false", | "spark.app.name" : "SparkPie", | "spark.cores.max" : "10000", | "spark.driver.memory" : "${Utils.DEFAULT_DRIVER_MEM_MB}m", | "spark.files" : "fireball.png", | "spark.driver.cores" : "180", | "spark.driver.extraJavaOptions" : " -Dslices=5 -Dcolor=mostly_red", | "spark.executor.memory" : "256m", | "spark.driver.extraClassPath" : "food-coloring.jar" | } |} """.stripMargin } else { s""" |{ | "action" : "CreateSubmissionRequest", | "appArgs" : [ "two slices", "a hint of cinnamon" ], | "appResource" : "honey-walnut-cherry.jar", | "clientSparkVersion" : "1.2.3", | "environmentVariables" : { | "PATH" : "/dev/null" | }, | "mainClass" : "org.apache.spark.examples.SparkPie", | "sparkProperties" : { | "spark.archives" : "fireballs.zip", | "spark.driver.extraLibraryPath" : "pickle.jar", | "spark.jars" : "mayonnaise.jar,ketchup.jar", | "spark.driver.supervise" : "false", | "spark.driver.memory" : "${Utils.DEFAULT_DRIVER_MEM_MB}m", | "spark.files" : "fireball.png", | "spark.driver.cores" : "180", | "spark.driver.extraJavaOptions" : " -Dslices=5 -Dcolor=mostly_red", | "spark.app.name" : "SparkPie", | "spark.cores.max" : "10000", | "spark.executor.memory" : "256m", | "spark.driver.extraClassPath" : "food-coloring.jar" | } |} """.stripMargin } private val submitDriverResponseJson = """ |{ | "action" : "CreateSubmissionResponse", | "serverSparkVersion" : "1.2.3", | "submissionId" : "driver_123", | "success" : true |} """.stripMargin private val killDriverResponseJson = """ |{ | "action" : "KillSubmissionResponse", | "serverSparkVersion" : "1.2.3", | "submissionId" : "driver_123", | "success" : true |} """.stripMargin private val driverStatusResponseJson = """ |{ | "action" : "SubmissionStatusResponse", | "driverState" : "RUNNING", | "serverSparkVersion" : "1.2.3", | "submissionId" : "driver_123", | "success" : true, | "workerHostPort" : "1.2.3.4:7780", | "workerId" : "worker_123" |} """.stripMargin private val errorJson = """ |{ | "action" : "ErrorResponse", | "message" : "Field not found in submit request: X", | "serverSparkVersion" : "1.2.3" |} """.stripMargin /** Assert that the contents in the two JSON strings are equal after ignoring whitespace. */ private def assertJsonEquals(jsonString1: String, jsonString2: String): Unit = { val trimmedJson1 = jsonString1.trim val trimmedJson2 = jsonString2.trim val json1 = compact(render(parse(trimmedJson1))) val json2 = compact(render(parse(trimmedJson2))) // Put this on a separate line to avoid printing comparison twice when test fails val equals = json1 == json2 assert(equals, "\"[%s]\" did not equal \"[%s]\"".format(trimmedJson1, trimmedJson2)) } } private class DummyResponse extends SubmitRestProtocolResponse private class DummyRequest extends SubmitRestProtocolRequest { var active: Boolean = null var age: Integer = null var name: String = null protected override def doValidate(): Unit = { super.doValidate() assertFieldIsSet(name, "name") assertFieldIsSet(age, "age") assert(age > 5, "Not old enough!") } }
maropu/spark
core/src/test/scala/org/apache/spark/deploy/rest/SubmitRestProtocolSuite.scala
Scala
apache-2.0
14,195
package at.iem.sysson import de.sciss.synth import synth._ import ugen._ object MySession extends SessionLike { def run(): Unit = { import Ops._ val x = play { FreeSelf.kr(MouseButton.kr) WhiteNoise.ar(SinOsc.ar(MouseX.kr.linExp(0, 1, 1, 1000)) * 0.5) } println(x) x.onEnd { quit() } } }
iem-projects/sysson
src/test/scala/at/iem/sysson/MySession.scala
Scala
gpl-3.0
326
/* * Copyright 2022 HM Revenue & Customs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package model import model.persisted.Phase1TestProfile import org.joda.time.DateTime object Phase1TestProfileExamples { def psiProfile(implicit now: DateTime) = Phase1TestProfile(now, List(Phase1TestExamples.firstPsiTest)) }
hmrc/fset-faststream
test/model/Phase1TestProfileExamples.scala
Scala
apache-2.0
835
/* * Copyright 2017-2018 Iaroslav Zeigerman * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package akkeeper.master.service import akka.actor.{ActorRef, ActorSystem, Address, Props, Terminated} import akka.cluster.ClusterEvent.{MemberRemoved, ReachableMember} import akka.cluster.{MemberStatus, UniqueAddress} import akka.testkit.{ImplicitSender, TestKit} import akkeeper.ActorTestUtils import akkeeper.api._ import akkeeper.storage.{InstanceStorage, RecordNotFoundException} import org.scalamock.scalatest.MockFactory import org.scalatest.{BeforeAndAfterAll, FlatSpecLike, Matchers} import scala.concurrent.Future import scala.concurrent.duration._ class MemberAutoDownServiceSpec(system: ActorSystem) extends TestKit(system) with FlatSpecLike with Matchers with ImplicitSender with MockFactory with ActorTestUtils with BeforeAndAfterAll { def this() = this(ActorSystem("MemberAutoDownServiceSpec")) override def afterAll(): Unit = { system.terminate() super.afterAll() } private def createMemberAutdownService(targetAddress: UniqueAddress, targetInstanceId: InstanceId, instanceStorage: InstanceStorage, pollInterval: FiniteDuration = 30 seconds): ActorRef = { childActorOf(Props(classOf[MemberAutoDownService], targetAddress, targetInstanceId, instanceStorage, pollInterval), s"autoDown-$targetInstanceId") } "A Member Auto Down Service" should "exclude a dead instance from the cluster" in { val port = 12345 val address = UniqueAddress(Address("akka.tcp", "MemberAutoDownServiceSpec", "localhost", port), 1L) val instanceId = InstanceId("container") val storage = mock[InstanceStorage] (storage.getInstance _).expects(instanceId).returns(Future failed RecordNotFoundException("")) val service = createMemberAutdownService(address, instanceId, storage) watch(service) service ! MemberAutoDownService.PollInstanceStatus expectMsgClass(classOf[Terminated]) } it should "periodically poll the instance status" in { val port = 12345 val address = UniqueAddress(Address("akka.tcp", "MemberAutoDownServiceSpec", "localhost", port), 1L) val instanceId = InstanceId("container") val info = InstanceInfo(instanceId, InstanceUp, "", Set.empty, None, Set.empty) val storage = mock[InstanceStorage] (storage.getInstance _).expects(instanceId).returns(Future successful info).atLeastTwice() val service = createMemberAutdownService(address, instanceId, storage, 1 second) service ! MemberAutoDownService.PollInstanceStatus val timeout = 2000 Thread.sleep(timeout) gracefulActorStop(service) } it should "stop when the target became reachable again" in { val port = 12345 val address = UniqueAddress(Address("akka.tcp", "MemberAutoDownServiceSpec", "localhost", port), 1L) val member = createTestMember(address) val instanceId = InstanceId("container") val info = InstanceInfo(instanceId, InstanceUp, "", Set.empty, None, Set.empty) val storage = mock[InstanceStorage] (storage.getInstance _).expects(instanceId).returns(Future successful info) val service = createMemberAutdownService(address, instanceId, storage) watch(service) service ! MemberAutoDownService.PollInstanceStatus service ! ReachableMember(member) expectMsgClass(classOf[Terminated]) } it should "stop when the target left the cluster" in { val port = 12345 val address = UniqueAddress(Address("akka.tcp", "MemberAutoDownServiceSpec", "localhost", port), 1L) val member = createTestMember(address, MemberStatus.Removed) val instanceId = InstanceId("container") val info = InstanceInfo(instanceId, InstanceUp, "", Set.empty, None, Set.empty) val storage = mock[InstanceStorage] (storage.getInstance _).expects(instanceId).returns(Future successful info) val service = createMemberAutdownService(address, instanceId, storage) watch(service) service ! MemberAutoDownService.PollInstanceStatus service ! MemberRemoved(member, MemberStatus.exiting) expectMsgClass(classOf[Terminated]) } it should "should retry on error" in { val port = 12345 val address = UniqueAddress(Address("akka.tcp", "MemberAutoDownServiceSpec", "localhost", port), 1L) val instanceId = InstanceId("container") val storage = mock[InstanceStorage] (storage.getInstance _).expects(instanceId).returns(Future failed new Exception("")).atLeastTwice() val service = createMemberAutdownService(address, instanceId, storage, 1 second) service ! MemberAutoDownService.PollInstanceStatus val timeout = 2000 Thread.sleep(timeout) gracefulActorStop(service) } }
akkeeper-project/akkeeper
akkeeper/src/test/scala/akkeeper/master/service/MemberAutoDownServiceSpec.scala
Scala
apache-2.0
5,309
package i.g.f.s import java.net.InetSocketAddress import java.util.concurrent.TimeUnit import com.twitter.finagle.{Client, Server, Service, ThriftMux} import com.twitter.util.{Closable, Await, Future} import org.openjdk.jmh.annotations._ /** * run -i 10 -wi 7 -f 2 -t 1 i.g.f.s.RoundTripThriftSmallBenchmark */ @State(Scope.Thread) class RoundTripThriftSmallBenchmark { private val smallSize = 20 val small = thriftscala.Small((for (i <- 1 to smallSize) yield i % 2 == 0).toList, "foo bar baz") val echo = new thriftscala.EchoService.FutureIface { def echo(small: thriftscala.Small) = Future.value(small) } var s: Closable = _ var c: thriftscala.EchoService.FutureIface = _ @Setup def setUp(): Unit = { s = ThriftMux.serveIface(new InetSocketAddress(8124), echo) c = ThriftMux.newIface[thriftscala.EchoService.FutureIface]("localhost:8124") } @TearDown def tearDown(): Unit = { Await.ready(s.close()) } @Benchmark @BenchmarkMode(Array(Mode.Throughput)) @OutputTimeUnit(TimeUnit.SECONDS) def test = Await.result(c.echo(small)) }
travisbrown/finagle-serial
benchmark/src/main/scala/i.g.f.s/RoundTripThriftSmallBenchmark.scala
Scala
apache-2.0
1,087
package org.broadinstitute.clio.client import akka.actor.ActorSystem import akka.stream.scaladsl.{Sink, Source} import akka.stream.{ActorMaterializer, Materializer} import akka.{Done, NotUsed} import caseapp.core.help.{Help, WithHelp} import caseapp.core.parser.Parser import caseapp.core.{Error, RemainingArgs} import cats.syntax.either._ import com.typesafe.scalalogging.LazyLogging import io.circe.Json import org.broadinstitute.clio.client.commands._ import org.broadinstitute.clio.client.dispatch._ import org.broadinstitute.clio.client.util.IoUtil import org.broadinstitute.clio.client.webclient.ClioWebClient import org.broadinstitute.clio.util.auth.ClioCredentials import scala.concurrent.duration._ import scala.concurrent.{Await, ExecutionContext} import scala.util.{Failure, Success, Try} /** * The command-line entry point for the clio-client. * * We want most actual logic to happen inside this * object's companion class for maximum testability, * so this object should only handle: * * 1. Setting up and tearing down the actor system * infrastructure needed by the client. * * 2. Exiting the program with appropriate return * codes. */ object ClioClient extends LazyLogging { /** * Model representing termination of the client before * it even gets to running a sub-command. * * This could be because of an input error, or because * the user asked for help / usage. * * We model the two cases differently so we can exit with * different return codes accordingly. */ sealed trait EarlyReturn final case class ParsingError(error: Error) extends EarlyReturn final case class UsageOrHelpAsked(message: String) extends EarlyReturn val progName = "clio-client" def main(args: Array[String]): Unit = { implicit val system: ActorSystem = ActorSystem(progName) implicit val mat: Materializer = ActorMaterializer() import system.dispatcher sys.addShutdownHook({ val _ = system.terminate() }) val baseCreds = Either .catchNonFatal(new ClioCredentials(ClioClientConfig.serviceAccountJson)) .valueOr { err => logger.error("Failed to read credentials", err) sys.exit(1) } def early(source: ClioClient.EarlyReturn): Unit = { source match { case UsageOrHelpAsked(message) => { println(message) sys.exit(0) } case ParsingError(error) => { System.err.println(error.message) sys.exit(1) } } } // 23.seconds is arbitrary. // Try increasing it should this ever fail. // def complete(done: Try[Done]): Unit = { val status = done match { case Success(_) => 0 case Failure(ex) => logger.error("Failed to execute command", ex) 1 } Await.result(system.terminate(), 23.seconds) sys.exit(status) } def otherwise(source: Source[Json, NotUsed]): Unit = { source .runWith(Sink.ignore) .onComplete(complete) } new ClioClient(ClioWebClient(baseCreds), IoUtil(baseCreds)) .instanceMain(args) .fold(early, otherwise) } } /** * The main clio-client application. * * Inspired by / partially copy-pasted from the * [[caseapp.core.app.CommandAppWithPreCommand]], which demonstrates * how to use caseapp's parsers to build a client with * subcommands sharing common options. This class * reimplements that class instead of extending it because * that class is designed to be the top-level entry point of * a program, with a main method and calls to sys.exit. * * Reimplementing the class also lets us tweak its logic so * that common options are provided to the sub-command-handling * method, rather than kept separate. * * @param webClient client handling HTTP communication with * the clio-server * @param ioUtil utility handling all file operations, * both local and in cloud storage */ class ClioClient(webClient: ClioWebClient, ioUtil: IoUtil)( implicit ec: ExecutionContext ) { import ClioClient.{EarlyReturn, ParsingError, UsageOrHelpAsked} /** * Common option messages, updated to include our program * name and version info. * * caseapp supports setting these through annotations, but * only if the values are constant strings. */ private val beforeCommandHelp = Help( appName = "Clio Client", appVersion = ClioClientConfig.Version.value, progName = ClioClient.progName, optionsDesc = "[command] [command-options]", args = Seq.empty, argsNameOption = None ) /** Names of all valid sub-commands. */ private val commands: Seq[String] = ClioCommand.help.messagesMap.keys.toSeq.sorted /** Top-level help message to display on --help. */ private val helpMessage: String = s"""${beforeCommandHelp.help.stripLineEnd} |Available commands: |${commands.map(commandHelp).mkString("\\n\\n")} """.stripMargin /** * Top-level usage message to display on --usage, * or when users give no sub-command. */ private val usageMessage: String = s"""${beforeCommandHelp.usage} |Available commands: | ${commands.mkString(", ")} """.stripMargin /** Help message for a specific command. */ private def commandHelp(command: String): String = { ClioCommand.help .messagesMap(command) .helpMessage(s"${beforeCommandHelp.progName} [options]", command) .trim() } /** Usage message for a specific command. */ private def commandUsage(command: String): String = { ClioCommand.help .messagesMap(command) .usageMessage( s"${beforeCommandHelp.progName} [options]", s"$command [command-options]" ) } /** * The client's entry point. * * Note: this can't be named `main` because if it is, the compiler will * try to be helpful and generate a forwarder method also named `main` * in the companion object that will do nothing but call this method * (allowing this class to serve as a top-level entry-point), conflicting * with the main method already present in the companion object. */ def instanceMain( args: Array[String], print: Any => Unit = Predef.print ): Either[EarlyReturn, Source[Json, NotUsed]] = { val maybeParse = ClioCommand.parser.withHelp .detailedParse(args)(Parser[None.type].withHelp) wrapError(maybeParse).flatMap { case (commonParse, commonArgs, maybeCommandParse) => { for { _ <- messageIfAsked(helpMessage, commonParse.help) _ <- messageIfAsked(usageMessage, commonParse.usage) _ <- wrapError(commonParse.baseOrError) _ <- checkRemainingArgs(commonArgs) response <- maybeCommandParse.map { commandParse => wrapError(commandParse) .flatMap(commandParse => commandMain(commandParse, print)) }.getOrElse(Left(ParsingError(Error.Other(usageMessage)))) } yield { response } } } } /** * Utility for wrapping a parsing error in our type infrastructure, * for cleaner use in for-comprehensions. */ private def wrapError[X](either: Either[Error, X]): Either[EarlyReturn, X] = { either.leftMap(ParsingError.apply) } /** * Utility for wrapping a help / usage message in our type * infrastructure, for cleaner use in for-comprehensions. */ private def messageIfAsked( message: String, asked: Boolean ): Either[EarlyReturn, Unit] = { Either.cond(!asked, (), UsageOrHelpAsked(message)) } /** * Utility for wrapping a check for extra arguments in our * type infrastructure, for cleaner use in for-comprehensions. */ private def checkRemainingArgs( remainingArgs: Seq[String] ): Either[EarlyReturn, Unit] = { Either.cond( remainingArgs.isEmpty, (), ParsingError( Error.Other(s"Found extra arguments: ${remainingArgs.mkString(" ")}") ) ) } /** * Handle the result of a sub-command parse. */ private def commandMain( commandParseWithArgs: (String, WithHelp[ClioCommand], RemainingArgs), print: Any => Unit ): Either[EarlyReturn, Source[Json, NotUsed]] = { val (commandName, commandParse, args) = commandParseWithArgs for { _ <- messageIfAsked(commandHelp(commandName), commandParse.help) _ <- messageIfAsked(commandUsage(commandName), commandParse.usage) command <- wrapError(commandParse.baseOrError) _ <- checkRemainingArgs(args.remaining) } yield { val executor = command match { case deliverCommand: DeliverCommand[_] => new DeliverExecutor(deliverCommand) case undeliverCommand: UndeliverCommand[_] => new UndeliverExecutor(undeliverCommand) case addCommand: AddCommand[_] => new AddExecutor(addCommand) case moveCommand: MoveCommand[_] => new MoveExecutor(moveCommand) case deleteCommand: DeleteCommand[_] => new DeleteExecutor(deleteCommand) case patchCommand: PatchCommand[_] => new PatchExecutor(patchCommand) case markExternalCommand: MarkExternalCommand[_] => new MarkExternalExecutor(markExternalCommand) case retrieveAndPrint: RetrieveAndPrintCommand => new RetrieveAndPrintExecutor(retrieveAndPrint, print) } executor.execute(webClient, ioUtil) } } }
broadinstitute/clio
clio-client/src/main/scala/org/broadinstitute/clio/client/ClioClient.scala
Scala
bsd-3-clause
9,509
package no.uio.musit.functional import scala.annotation.implicitNotFound import scala.concurrent.{ExecutionContext, Future} object Implicits { /** * Implicit converter to wrap a {{{no.uio.musit.functional.Monad}}} around a * {{{scala.concurrent.Future}}}. This allows for composition of Monads using * Monad transformers. * * @param ec The ExecutionContext for mapping on the Future type * @return a Monad of type Future */ @implicitNotFound( "A Future[Monad] needs an implicit ExecutionContext in " + "scope. If you are using the Play! Framework, consider using the " + "frameworks default context:\\n" + "import play.api.libs.concurrent.Execution.Implicits.defaultContext\\n\\n" + "Otherwise, use the ExecutionContext that best suits your needs." ) implicit def futureMonad(implicit ec: ExecutionContext) = new Monad[Future] { override def map[A, B](value: Future[A])(f: (A) => B) = value.map(f) override def flatMap[A, B](value: Future[A])(f: (A) => Future[B]) = value.flatMap(f) override def pure[A](x: A): Future[A] = Future(x) } }
MUSIT-Norway/musit
musit-models/src/main/scala/no/uio/musit/functional/Implicits.scala
Scala
gpl-2.0
1,116
package collins.controllers.actions import play.api.mvc.AnyContent import play.api.mvc.AnyContentAsEmpty import play.api.mvc.Headers import play.api.mvc.Request object ActionHelper { val DummyRequest = new Request[AnyContent] { def id: Long = 1L def tags: Map[String,String] = Map() def version: String = "HTTP/1.0" val uri = "/" val path = "/" val method = "GET" val queryString = Map.empty[String, Seq[String]] val remoteAddress = "127.0.0.1" val headers = new Headers { protected val data = Seq.empty[(String, Seq[String])] } val body = AnyContentAsEmpty val secure = false } def createRequest(req: Request[AnyContent], finalMap: Map[String, Seq[String]]) = new Request[AnyContent] { def id: Long = 1L def tags: Map[String,String] = Map() def version: String = "HTTP/1.0" def uri = req.uri def path = req.path def method = req.method def queryString = finalMap def headers = req.headers def body = req.body def remoteAddress = req.remoteAddress val secure = false } }
byxorna/collins
app/collins/controllers/actions/ActionHelper.scala
Scala
apache-2.0
1,108
/* * Copyright 2015 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.atlas.core.model import java.math.BigInteger import java.nio.ByteBuffer import java.nio.CharBuffer import java.nio.charset.Charset import java.nio.charset.CharsetEncoder import java.security.MessageDigest import java.util import com.netflix.atlas.core.util.Hash import com.netflix.atlas.core.util.InternMap import com.netflix.atlas.core.util.Interner import com.netflix.atlas.core.util.SmallHashMap /** * Helper functions for manipulating tagged items. */ object TaggedItem { import java.util.Comparator type Pair = (String, String) private val emptyId = Hash.sha1("") private val initCapacity = 1000000 private val idInterner = InternMap.concurrent[BigInteger](initCapacity) private val tagsInterner = InternMap.concurrent[Map[String, String]](initCapacity) private val keyComparator = new Comparator[Pair] { def compare(t1: Pair, t2: Pair): Int = { t1._1.compareTo(t2._1) } } private def writePair(p: Pair, buf: ByteBuffer, enc: CharsetEncoder, md: MessageDigest) { enc.encode(CharBuffer.wrap(p._1), buf, true) buf.flip() md.update(buf) buf.clear() md.update('='.asInstanceOf[Byte]) enc.encode(CharBuffer.wrap(p._2), buf, true) buf.flip() md.update(buf) buf.clear() } /** * Compute an identifier for a set of tags. The id is a sha1 hash of a normalized string * representation. Identical tags will always get the same id. */ def computeId(tags: Map[String, String]): BigInteger = { if (tags.isEmpty) emptyId else { val pairs = new Array[Pair](tags.size) val it = tags.iterator var pos = 0 var maxLength = 0 while (it.hasNext) { val t = it.next() pairs(pos) = t pos += 1 maxLength = if (t._1.length > maxLength) t._1.length else maxLength maxLength = if (t._2.length > maxLength) t._2.length else maxLength } util.Arrays.sort(pairs, keyComparator) val md = Hash.get("SHA1") val enc = Charset.forName("UTF-8").newEncoder val buf = ByteBuffer.allocate(maxLength * 2) writePair(pairs(0), buf, enc, md) pos = 1 while (pos < pairs.length) { md.update(','.asInstanceOf[Byte]) writePair(pairs(pos), buf, enc, md) pos += 1 } new BigInteger(1, md.digest) } } /** * Compute the id and return an interned copy of the value. This function should be used if * keeping metric data in memory for a long time to avoid redundant big integer objects hanging * around. */ def createId(tags: Map[String, String]): BigInteger = { val id = computeId(tags) idInterner.intern(id) } def internId(id: BigInteger): BigInteger = { idInterner.intern(id) } def internTags(tags: Map[String, String]): Map[String, String] = { val strInterner = Interner.forStrings val iter = tags.iterator.map { t => strInterner.intern(t._1) -> strInterner.intern(t._2) } val smallMap = SmallHashMap(tags.size, iter) tagsInterner.intern(smallMap) } def internTagsShallow(tags: Map[String, String]): Map[String, String] = { tagsInterner.intern(tags) } def retain(keep: Long => Boolean) { idInterner.retain(keep) tagsInterner.retain(keep) } /** * Compute the new tags for the aggregate buffer. The tags are the intersection of tag values. */ def aggrTags(t1: Map[String, String], t2: Map[String, String]): Map[String, String] = { t1.toSet.intersect(t2.toSet).toMap } } /** * Represents an item that can be searched for using a set of tags. */ trait TaggedItem { /** Unique id based on the tags. */ def id: BigInteger /** Standard string representation of the id. */ def idString: String = "%040x".format(id) /** The tags associated with this item. */ def tags: Map[String, String] /** Returns true if the item is expired and no data is available. */ def isExpired: Boolean = false /** * Code that just needs to iterate over all tags should use this method. Allows for * implementations to optimize how the tag data is stored and traversed. */ def foreach(f: (String, String) => Unit) { tags match { case m: SmallHashMap[String, String] => m.foreachItem(f) case m: Map[String, String] => m.foreach { t => f(t._1, t._2) } } } } trait LazyTaggedItem extends TaggedItem { lazy val id: BigInteger = TaggedItem.computeId(tags) } case class BasicTaggedItem(tags: Map[String, String]) extends LazyTaggedItem
rspieldenner/atlas
atlas-core/src/main/scala/com/netflix/atlas/core/model/TaggedItem.scala
Scala
apache-2.0
5,111
package spotlight import java.util.Collections import java.util.concurrent.{ TimeUnit, AbstractExecutorService } import scala.concurrent.{ ExecutionContextExecutorService, ExecutionContext } /** * Created by rolfsd on 1/23/16. */ package object train { def executionContextToService( ec: ExecutionContext ): ExecutionContextExecutorService = { ec match { case null => throw null case eces: ExecutionContextExecutorService => eces case other => new AbstractExecutorService with ExecutionContextExecutorService { override def prepare(): ExecutionContext = other override def isShutdown = false override def isTerminated = false override def shutdown() = () override def shutdownNow() = Collections.emptyList[Runnable] override def execute(runnable: Runnable): Unit = other execute runnable override def reportFailure(t: Throwable): Unit = other reportFailure t override def awaitTermination(length: Long,unit: TimeUnit): Boolean = false } } } }
dmrolfs/lineup
sandbox/src/main/scala/spotlight/train/package.scala
Scala
mit
1,051
package flockMortalityExample import java.nio.file.{Files, Paths} import com.typesafe.config.ConfigFactory import play.api.libs.json.Json import sampler._ import sampler.abc._ import sampler.distribution.Distribution import sampler.example.abc.flockMortality.util.{Model, _} import sampler.maths.Random object Main extends App { implicit val r = Random // Name and location of output files val outDir = Paths.get("results", "flockMortality").toAbsolutePath Files.createDirectories(outDir) // Observed data val observedJsonString = """{ "observed" : [ { "id" : 1, "size" : 3000, "days" : [ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24 ], "eggs" : [ 2400, 2400, 2400, 2400, 1561, 1283, 1097, 971, 888, 831, 794, 769, 752, 740, 733, 728, 724, 722, 720, 719, 719, 718, 718, 718, 717 ], "dead" : [ 0, 0, 0, 0, 89, 63, 43, 29, 19, 13, 9, 6, 4, 3, 2, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0 ], "infectionFreeDays" : 3 }, { "id" : 2, "size" : 3000, "days" : [ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24 ], "eggs" : [ 2400, 2400, 2400, 2400, 2400, 2400, 2400, 1561, 1283, 1097, 971, 888, 831, 794, 769, 752, 740, 733, 728, 724, 722, 720, 719, 719, 718 ], "dead" : [ 0, 0, 0, 0, 0, 0, 0, 89, 63, 43, 29, 19, 13, 9, 6, 4, 3, 2, 1, 0, 0, 0, 0, 0, 0 ], "infectionFreeDays" : 6 } ] }""" val observedJson = Json.parse(observedJsonString) val observed = Observed(observedJson) /* * beta = transmission rate * eta = 1 / latent period * gamma = 1 / infectious period * delta = mortality rate * sigma = rate of egg production for infectious birds * sigma2 = rate of egg production for recovered birds * offset = start day of infection */ // Prior val priorJsonString = """{ "type" : "interval", "params" : { "beta" : [ 0.0, 0.5 ], "eta" : [ 0.0, 1.0 ], "gamma" : [ 0.1, 1.0 ], "delta" : [ 0.0, 1.0 ], "sigma" : [ 0.1, 0.8 ], "sigma2" : [ 0.1, 0.8 ], "offset" : [ -5, 15 ] } }""" val priorJson = Json.parse(priorJsonString) val prior = IntervalPrior(priorJson) // Create an instance of Model based on the observed data and prior val model = new Model(observed, prior) // Load flock mortality parameters from application.conf val abcConfig = ABCConfig(ConfigFactory.load.getConfig("flock-mortality-example")) val abcReporting = StandardReport[Parameters](outDir, abcConfig) // Use ABC to produce population of parameters val population: Population[Parameters] = ABC(model, abcConfig, abcReporting) // JSON.writeToFile(outDir.resolve("population.json"), population.toJSON()) StandardReport.doPlotting(outDir) // val result = ABCResult(prior, observed, abcConfig, population) // val resultJSON = Json.toJson(result) // JSON.writeToFile(resultsJSON, resultJSON) //======================= // SAMPLE FROM POPULATION TO PRODUCE POSTERIOR println("Sample from population to produce posterior: ") val numParticles = abcConfig.numGenerations val sample: IndexedSeq[Parameters] = Distribution.fromWeightsTable(population.consolidatedWeightsTable).until(_.size == 1000 * numParticles).sample val posterior: Posterior = Posterior.fromSeq(sample) println(posterior.offset.length) //======================= // POSTERIOR FIT println("Observed data: ") println("Dead: " + observed.map(_.dead)) println("Eggs: " + observed.map(_.eggs)) println("Fitting median parameters: ") val medianParams = Posterior.getMarginalMedian(posterior) // println(medianParams) val modelFit = FittedResult(model.modelDistribution(medianParams).sample) println(modelFit.map(_.fitDead)) println(modelFit.map(_.fitEggs)) // Convert list of Fitted data to jsons val jsonFit = modelFit.map { shed => val json = Json.toJson(shed) val filename = s"Shed${shed.id}_fitted.json" JSON.writeToFile(outDir.resolve(filename), json) json } println("Fitted parameters: " + medianParams.toSeq) // Call PlotResult.scala if you want to see the output - the ABC will need to have finished and generated result.json }
tearne/Sampler
sampler-examples/src/main/scala/sampler/example/abc/flockMortality/Main.scala
Scala
apache-2.0
4,254
/* * Copyright 2011 Delving B.V. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package controllers.dos.ui import play.mvc.Controller import java.io.File import play.mvc.results.Result import controllers.dos.{FileUpload, ImageDisplay} /** * * @author Manuel Bernhardt <[email protected]> */ object MCP extends Controller { def index() = Template def browse(path: String): Result = { val f = new File(path) if (!f.exists()) return Error("Directory '%s' does not exist".format(path)) if (!f.isDirectory) return Error("Trying to browse a file: " + path) val files = if (f.listFiles == null) { List() } else { f.listFiles.map(f => BrowserFile( path = f.getAbsolutePath, name = f.getName, isDir = f.isDirectory, contentType = play.libs.MimeTypes.getContentType(f.getName) )).sortBy(!_.isDir) } Template("/dos/ui/MCP/index.html", 'files -> files) } } case class BrowserFile(path: String, name: String, isDir: Boolean, contentType: String) { def isImage = contentType.contains("image") def id = if(name.contains(".") && !name.startsWith(".")) name.split("\\\\.")(0) else name }
delving/dos
app/controllers/dos/ui/MCP.scala
Scala
apache-2.0
1,761
/* Copyright (C) 2008-2010 Univ of Massachusetts Amherst, Computer Science Dept This file is part of "FACTORIE" (Factor graphs, Imperative, Extensible) http://factorie.cs.umass.edu, http://code.google.com/p/factorie/ This software is provided under the terms of the Eclipse Public License 1.0 as published by http://www.opensource.org. For further information, see the file `LICENSE.txt' included with this distribution. */ package cc.factorie import scala.reflect.Manifest import scala.collection.mutable.{ArrayBuffer,HashMap} /** A sampler that has a Manifest for its context type. Samplers are implicit converted to these before being added to a SamplerSuite. The Manifest is necessary for type checking the AnyRef arguments to 'process'. @author Andrew McCallum */ class GenericSampler[C](val sampler:Sampler[C])(implicit mc:Manifest[C]) extends Sampler[C] { //println("GenericSampler m="+mc) val contextClass = mc.erasure val contextManifest = mc /** If argument is the right type, then call process method. */ val contextClassCache = new HashMap[Class[_],Boolean] def compatible(c:Class[_]): Boolean = { //mc.erasure.isAssignableFrom(c) // This takes 44.4 seconds for LDADemo contextClassCache.getOrElseUpdate(c, contextManifest >:> Manifest.classType(c)) // This takes 42.8 seconds for LDADemo // No caching Manifest comparison with >:> took 468 seconds. Wow! } def process0[T<:AnyRef](context:T): DiffList = if (compatible(context.getClass)) { val c:C = context.asInstanceOf[C] // TODO How slow is this check? //|**("GenericSampler.process") val d = process(c) //**| d } else null def process1(context:C) = sampler.process1(context) } /** A collection of samplers that might play beautiful music together. Can you call this.process passing AnyRef, and the suite will offer each sampler (in order) the opportunity to handle this context. The first sampler in the suite to accept it, gets it. It is envisioned that the individual samplers in the suite may send variables back to the suite or coordinate among the suite. @author Andrew McCallum */ class SamplerSuite extends ArrayBuffer[GenericSampler[_]] with Sampler[AnyRef] with cc.factorie.util.Trackable { /*def this() = this(Nil) def this(ss:Sampler[_]*) = this(ss) this ++= ss*/ def process1(context:AnyRef) : DiffList = { val samplers = this.elements while (samplers.hasNext) { //|**("SamplerSuite") val sampler = samplers.next //println("SamplerSuite context "+context+" sampler "+sampler.sampler) val d:DiffList = sampler.process0(context) //**| if (d != null) { //println("SamplerSuite sampler "+sampler.sampler+" diff "+d) return d } } return null } override def noDiffList: this.type = { this.foreach(_.sampler.noDiffList) super.noDiffList } }
andrewmilkowski/factorie
src/main/scala/cc/factorie/SamplerSuite.scala
Scala
epl-1.0
2,952
package com.artclod.mathml import com.artclod.mathml.scalar._ import com.artclod.mathml.scalar.apply._ import com.artclod.mathml.scalar.apply.trig._ import com.artclod.mathml.scalar.concept.Constant import scala.util._ import scala.xml._ object MathML { val h = <hack/> // This is a hack so we can get default XML meta data for default MathML objects def apply(text: String): Try[MathMLElem] = Try(xml.XML.loadString(text)).flatMap(apply(_)) def apply(xml: Elem): Try[MathMLElem] = { xml.label.toLowerCase match { case "math" => Try(Math(xml.prefix, xml.attributes, xml.scope, xml.minimizeEmpty, MathML(xml.childElem(0)).get)) case "apply" => applyElement(xml) case "cn" => constantElement(xml) case "ci" => Success(Ci(xml.child(0).text)) // LATER child(0).text could be nonsense case "exponentiale" => Success(ExponentialE) case "pi" => Success(com.artclod.mathml.scalar.Pi) case "logbase" => Cn(xml.childElem(0)).map(Logbase(_)) // LATER need to handle special Constants, xml.childElem(0) could fail case "degree" => Cn(xml.childElem(0)).map(Degree(_)) // LATER need to handle special Constants, xml.childElem(0) could fail case "mfenced" => MathML(xml.childElem(0)).map(Mfenced(_)) case _ => Failure(new IllegalArgumentException(xml + " was not recognized as a MathML element")) } } private def constantElement(xml: Elem): Try[Constant] = Cn(xml.child(0)) private def applyElement(xml: Elem): Try[MathMLElem] = { if (xml.childElem.size < 2) { Failure(new IllegalArgumentException("Apply MathML Elements must have at least two children " + xml)) } else { val apply = xml val operator = xml.childElem(0) val argumentsTry = xml.childElem.drop(1).map(MathML(_)) val failure = argumentsTry.find(_.isFailure) if (failure.nonEmpty) failure.get else applyElementCreate(apply, operator, argumentsTry.map(_.get)) } } private def applyElementCreate(a: Elem, o: Elem, args: Seq[MathMLElem]): Try[MathMLElem] = { (o.label.toLowerCase(), args) match { case ("plus", _) => Success(ApplyPlus(args: _*)) case ("minus", Seq(v)) => Success(ApplyMinusU(v)) case ("minus", Seq(v1, v2)) => Success(ApplyMinusB(v1, v2)) case ("times", _) => Success(ApplyTimes(args: _*)) case ("divide", Seq(num, den)) => Success(ApplyDivide(num, den)) case ("power", Seq(base, exp)) => Success(ApplyPower(base, exp)) case ("ln", Seq(value)) => Success(ApplyLn(value)) case ("log", Seq(value)) => Success(ApplyLog10(value)) case ("log", Seq(b: Logbase, value)) => Success(ApplyLog(b.v, value)) case ("sin", Seq(v)) => Success(ApplySin(v)) case ("cos", Seq(v)) => Success(ApplyCos(v)) case ("tan", Seq(v)) => Success(ApplyTan(v)) case ("sec", Seq(v)) => Success(ApplySec(v)) case ("csc", Seq(v)) => Success(ApplyCsc(v)) case ("cot", Seq(v)) => Success(ApplyCot(v)) case ("root", Seq(value)) => Success(ApplySqrt(value)) case ("root", Seq(d: Degree, value)) => Success(ApplyRoot(d.v, value)) case (a, c) => Failure(new IllegalArgumentException(o + " was not recognized as an applyable MathML element (label [" + o.label + "] might not be recognized or wrong number of child elements [" + c.length + "])")) } } private implicit class PimpedElem(e: Elem) { def childElem : Seq[Elem] = e.child.collect(_ match { case x: Elem => x }) } }
kristiankime/calc-tutor
app/com/artclod/mathml/MathML.scala
Scala
mit
3,339
package com.twitter.util.validation.internal.validators import com.twitter.util.validation.constraints.CountryCode import com.twitter.util.validation.constraintvalidation.TwitterConstraintValidatorContext import jakarta.validation.{ConstraintValidator, ConstraintValidatorContext} import java.util.Locale private object ISO3166CountryCodeConstraintValidator { /** @see [[https://www.iso.org/iso-3166-country-codes.html ISO 3166]] */ val CountryCodes: Set[String] = Locale.getISOCountries.toSet } private[validation] class ISO3166CountryCodeConstraintValidator extends ConstraintValidator[CountryCode, Any] { import ISO3166CountryCodeConstraintValidator._ @volatile private[this] var countryCode: CountryCode = _ override def initialize(constraintAnnotation: CountryCode): Unit = { this.countryCode = constraintAnnotation } override def isValid( obj: Any, constraintValidatorContext: ConstraintValidatorContext ): Boolean = obj match { case null => true case typedValue: Array[Any] => validationResult(typedValue, constraintValidatorContext) case typedValue: Iterable[Any] => validationResult(typedValue, constraintValidatorContext) case anyValue => validationResult(Seq(anyValue.toString), constraintValidatorContext) } /* Private */ private[this] def validationResult( value: Iterable[Any], constraintValidatorContext: ConstraintValidatorContext ): Boolean = { val invalidCountryCodes = findInvalidCountryCodes(value) // an empty value is not a valid country code val valid = if (value.isEmpty) false else invalidCountryCodes.isEmpty if (!valid) { TwitterConstraintValidatorContext .addExpressionVariable("validatedValue", mkString(value)) .withMessageTemplate(countryCode.message()) .addConstraintViolation(constraintValidatorContext) } valid } private[this] def findInvalidCountryCodes(values: Iterable[Any]): Set[String] = { val uppercaseCountryCodes = values.toSet.map { value: Any => value.toString.toUpperCase } uppercaseCountryCodes.diff(CountryCodes) } }
twitter/util
util-validator/src/main/scala/com/twitter/util/validation/internal/validators/ISO3166CountryCodeConstraintValidator.scala
Scala
apache-2.0
2,136
package at.forsyte.apalache.tla.bmcmt.analyses import at.forsyte.apalache.tla.lir.UID import com.google.inject.Singleton import scala.collection.mutable @Singleton class ExprGradeStoreImpl extends ExprGradeStore with Serializable { var store: mutable.Map[UID, ExprGrade.Value] = mutable.HashMap[UID, ExprGrade.Value]() override def get(uid: UID): Option[ExprGrade.Value] = { store.get(uid) } }
konnov/apalache
tla-bmcmt/src/main/scala/at/forsyte/apalache/tla/bmcmt/analyses/ExprGradeStoreImpl.scala
Scala
apache-2.0
408
package org.scalatest.events import org.scalatest.junit.JUnit3Suite import org.scalatest._ class LocationSuiteProp extends SuiteProp { test("All suite types should have correct location in SuiteStarting, SuiteCompleted, SuiteAborted and TestFailed event.") { forAll(examples) { suite => val reporter = new EventRecordingReporter suite.run(None, Args(reporter, Stopper.default, Filter(), ConfigMap.empty, None, new Tracker(new Ordinal(99)), Set.empty)) val eventList = reporter.eventsReceived eventList.foreach { event => suite.checkFun(event) } suite.allChecked } } type FixtureServices = TestLocationServices def suite = new TestLocationSuite class TestLocationSuite extends Suite with FixtureServices { val suiteTypeName = "org.scalatest.events.LocationSuiteProp$TestLocationSuite" val expectedSuiteStartingList = List(TopOfClassPair(suiteTypeName + "$NestedSuite"), TopOfClassPair(suiteTypeName + "$AbortNestedSuite"), TopOfClassPair(suiteTypeName + "$FailNestedSuite")) val expectedSuiteCompletedList = List(TopOfClassPair(suiteTypeName + "$NestedSuite"), TopOfClassPair(suiteTypeName + "$FailNestedSuite")) val expectedSuiteAbortedList = List(SeeStackDepthExceptionPair(suiteTypeName + "$AbortNestedSuite")) val expectedTestSucceededList = List(TopOfMethodPair(suiteTypeName + "$NestedSuite", "public void " + suiteTypeName + "$NestedSuite.testInfo(org.scalatest.Informer)")) val expectedTestFailedList = List(SeeStackDepthExceptionPair("testFail")) val expectedInfoProvidedList = List(LineInFilePair("testInfo", "LocationSuiteProp.scala", thisLineNumber + 4)) class NestedSuite extends Suite { def testInfo(info: Informer) { info("testInfo") } } class AbortNestedSuite extends Suite { override protected def runNestedSuites(args: Args): Status = { throw new RuntimeException } } class FailNestedSuite extends Suite { def testFail() { fail } } override def nestedSuites = Vector(new NestedSuite, new AbortNestedSuite, new FailNestedSuite) } def fixtureSuite = new TestLocationFixtureSuite class StringFixtureSuite extends fixture.Suite with StringFixture class TestLocationFixtureSuite extends StringFixtureSuite with FixtureServices { val suiteTypeName = "org.scalatest.events.LocationSuiteProp$TestLocationFixtureSuite" val expectedSuiteStartingList = List(TopOfClassPair(suiteTypeName + "$NestedSuite"), TopOfClassPair(suiteTypeName + "$AbortNestedSuite"), TopOfClassPair(suiteTypeName + "$FailNestedSuite")) val expectedSuiteCompletedList = List(TopOfClassPair(suiteTypeName + "$NestedSuite"), TopOfClassPair(suiteTypeName + "$FailNestedSuite")) val expectedSuiteAbortedList = List(SeeStackDepthExceptionPair(suiteTypeName + "$AbortNestedSuite")) val expectedTestSucceededList = List(TopOfMethodPair(suiteTypeName + "$NestedSuite", "public void " + suiteTypeName + "$NestedSuite.testInfo(org.scalatest.Informer)")) val expectedTestFailedList = List(SeeStackDepthExceptionPair("testFail")) val expectedInfoProvidedList = List(LineInFilePair("testInfo", "LocationSuiteProp.scala", thisLineNumber + 4)) class NestedSuite extends StringFixtureSuite { def testInfo(info: Informer) { info("testInfo") } } class AbortNestedSuite extends StringFixtureSuite { override protected def runNestedSuites(args: Args): Status = { throw new RuntimeException } } class FailNestedSuite extends StringFixtureSuite { def testFail() { fail } } override def nestedSuites = Vector(new NestedSuite, new AbortNestedSuite, new FailNestedSuite) } def junit3Suite = new TestLocationJUnit3Suite def junitSuite = new TestLocationJUnitSuite def testngSuite = new TestLocationTestNGSuite def funSuite = new TestLocationFunSuite class TestLocationFunSuite extends FunSuite with FixtureServices { val suiteTypeName = "org.scalatest.events.LocationSuiteProp$TestLocationFunSuite" val expectedSuiteStartingList = List(TopOfClassPair(suiteTypeName + "$NestedSuite"), TopOfClassPair(suiteTypeName + "$AbortNestedSuite"), TopOfClassPair(suiteTypeName + "$FailNestedSuite")) val expectedSuiteCompletedList = List(TopOfClassPair(suiteTypeName + "$NestedSuite"), TopOfClassPair(suiteTypeName + "$FailNestedSuite")) val expectedSuiteAbortedList = List(SeeStackDepthExceptionPair(suiteTypeName + "$AbortNestedSuite")) val expectedTestSucceededList = List(LineInFilePair("info", "LocationSuiteProp.scala", thisLineNumber + 5)) val expectedTestFailedList = List(SeeStackDepthExceptionPair("fail")) val expectedInfoProvidedList = List(LineInFilePair("test info", "LocationSuiteProp.scala", thisLineNumber + 4)) class NestedSuite extends FunSuite { test("info") { info("test info") } } class AbortNestedSuite extends FunSuite { override protected def runNestedSuites(args: Args): Status = { throw new RuntimeException } } class FailNestedSuite extends FunSuite { test("fail") { fail } } override def nestedSuites = Vector(new NestedSuite, new AbortNestedSuite, new FailNestedSuite) } def fixtureFunSuite = new TestLocationFixtureFunSuite class TestLocationFixtureFunSuite extends StringFixtureFunSuite with FixtureServices { val suiteTypeName = "org.scalatest.events.LocationSuiteProp$TestLocationFixtureFunSuite" val expectedSuiteStartingList = List(TopOfClassPair(suiteTypeName + "$NestedSuite"), TopOfClassPair(suiteTypeName + "$AbortNestedSuite"), TopOfClassPair(suiteTypeName + "$FailNestedSuite")) val expectedSuiteCompletedList = List(TopOfClassPair(suiteTypeName + "$NestedSuite"), TopOfClassPair(suiteTypeName + "$FailNestedSuite")) val expectedSuiteAbortedList = List(SeeStackDepthExceptionPair(suiteTypeName + "$AbortNestedSuite")) val expectedTestSucceededList = List(LineInFilePair("info", "LocationSuiteProp.scala", thisLineNumber + 5)) val expectedTestFailedList = List(SeeStackDepthExceptionPair("fail")) val expectedInfoProvidedList = List(LineInFilePair("test info", "LocationSuiteProp.scala", thisLineNumber + 4)) class NestedSuite extends StringFixtureFunSuite { test("info") { param => info("test info") } } class AbortNestedSuite extends StringFixtureFunSuite { override protected def runNestedSuites(args: Args): Status = { throw new RuntimeException } } class FailNestedSuite extends StringFixtureFunSuite { test("fail") { fail } } override def nestedSuites = Vector(new NestedSuite, new AbortNestedSuite, new FailNestedSuite) } def funSpec = new LocationTestSpec class LocationTestSpec extends FunSpec with FixtureServices { val suiteTypeName = "org.scalatest.events.LocationSuiteProp$LocationTestSpec" val expectedSuiteStartingList = List(TopOfClassPair(suiteTypeName + "$NestedSuite"), TopOfClassPair(suiteTypeName + "$AbortNestedSuite"), TopOfClassPair(suiteTypeName + "$FailNestedSuite")) val expectedSuiteCompletedList = List(TopOfClassPair(suiteTypeName + "$NestedSuite"), TopOfClassPair(suiteTypeName + "$FailNestedSuite")) val expectedSuiteAbortedList = List(SeeStackDepthExceptionPair(suiteTypeName + "$AbortNestedSuite")) val expectedTestSucceededList = List(LineInFilePair("info", "LocationSuiteProp.scala", thisLineNumber + 5)) val expectedTestFailedList = List(SeeStackDepthExceptionPair("fail")) val expectedInfoProvidedList = List(LineInFilePair("test info", "LocationSuiteProp.scala", thisLineNumber + 4)) class NestedSuite extends FunSpec { it("info") { info("test info") } } class AbortNestedSuite extends FunSpec { override protected def runNestedSuites(args: Args): Status = { throw new RuntimeException } } class FailNestedSuite extends FunSpec { it("fail") { fail } } override def nestedSuites = Vector(new NestedSuite, new AbortNestedSuite, new FailNestedSuite) } def fixtureSpec = new TestLocationFixtureSpec class TestLocationFixtureSpec extends StringFixtureFunSpec with FixtureServices { val suiteTypeName = "org.scalatest.events.LocationSuiteProp$TestLocationFixtureSpec" val expectedSuiteStartingList = List(TopOfClassPair(suiteTypeName + "$NestedSuite"), TopOfClassPair(suiteTypeName + "$AbortNestedSuite"), TopOfClassPair(suiteTypeName + "$FailNestedSuite")) val expectedSuiteCompletedList = List(TopOfClassPair(suiteTypeName + "$NestedSuite"), TopOfClassPair(suiteTypeName + "$FailNestedSuite")) val expectedSuiteAbortedList = List(SeeStackDepthExceptionPair(suiteTypeName + "$AbortNestedSuite")) val expectedTestSucceededList = List(LineInFilePair("info", "LocationSuiteProp.scala", thisLineNumber + 5)) val expectedTestFailedList = List(SeeStackDepthExceptionPair("fail")) val expectedInfoProvidedList = List(LineInFilePair("test info", "LocationSuiteProp.scala", thisLineNumber + 4)) class NestedSuite extends StringFixtureFunSpec { it("info") { param => info("test info") } } class AbortNestedSuite extends StringFixtureFunSpec { override protected def runNestedSuites(args: Args): Status = { throw new RuntimeException } } class FailNestedSuite extends FunSpec { it("fail") { fail } } override def nestedSuites = Vector(new NestedSuite, new AbortNestedSuite, new FailNestedSuite) } def featureSpec = new TestLocationFeatureSpec class TestLocationFeatureSpec extends FeatureSpec with FixtureServices { val suiteTypeName = "org.scalatest.events.LocationSuiteProp$TestLocationFeatureSpec" val expectedSuiteStartingList = List(TopOfClassPair(suiteTypeName + "$NestedSuite"), TopOfClassPair(suiteTypeName + "$AbortNestedSuite"), TopOfClassPair(suiteTypeName + "$FailNestedSuite")) val expectedSuiteCompletedList = List(TopOfClassPair(suiteTypeName + "$NestedSuite"), TopOfClassPair(suiteTypeName + "$FailNestedSuite")) val expectedSuiteAbortedList = List(SeeStackDepthExceptionPair(suiteTypeName + "$AbortNestedSuite")) val expectedTestSucceededList = List(LineInFilePair("Feature: feature Scenario: info", "LocationSuiteProp.scala", thisLineNumber + 6)) val expectedTestFailedList = List(SeeStackDepthExceptionPair("Feature: feature Scenario: fail")) val expectedInfoProvidedList = List(LineInFilePair("test info", "LocationSuiteProp.scala", thisLineNumber + 5)) class NestedSuite extends FeatureSpec { feature("feature") { scenario("info") { info("test info") } } } class AbortNestedSuite extends FeatureSpec { override protected def runNestedSuites(args: Args): Status = { throw new RuntimeException } } class FailNestedSuite extends FeatureSpec { feature("feature") { scenario("fail") { fail } } } override def nestedSuites = Vector(new NestedSuite, new AbortNestedSuite, new FailNestedSuite) } def fixtureFeatureSpec = new TestLocationFixtureFeatureSpec class StringFixtureFeatureSpec extends fixture.FeatureSpec with StringFixture class TestLocationFixtureFeatureSpec extends StringFixtureFeatureSpec with FixtureServices { val suiteTypeName = "org.scalatest.events.LocationSuiteProp$TestLocationFixtureFeatureSpec" val expectedSuiteStartingList = List(TopOfClassPair(suiteTypeName + "$NestedSuite"), TopOfClassPair(suiteTypeName + "$AbortNestedSuite"), TopOfClassPair(suiteTypeName + "$FailNestedSuite")) val expectedSuiteCompletedList = List(TopOfClassPair(suiteTypeName + "$NestedSuite"), TopOfClassPair(suiteTypeName + "$FailNestedSuite")) val expectedSuiteAbortedList = List(SeeStackDepthExceptionPair(suiteTypeName + "$AbortNestedSuite")) val expectedTestSucceededList = List(LineInFilePair("Feature: feature Scenario: info", "LocationSuiteProp.scala", thisLineNumber + 6)) val expectedTestFailedList = List(SeeStackDepthExceptionPair("Feature: feature Scenario: fail")) val expectedInfoProvidedList = List(LineInFilePair("test info", "LocationSuiteProp.scala", thisLineNumber + 5)) class NestedSuite extends StringFixtureFeatureSpec { feature("feature") { scenario("info") { param => info("test info") } } } class AbortNestedSuite extends StringFixtureFeatureSpec { override protected def runNestedSuites(args: Args): Status = { throw new RuntimeException } } class FailNestedSuite extends StringFixtureFeatureSpec { feature("feature") { scenario("fail") { fail } } } override def nestedSuites = Vector(new NestedSuite, new AbortNestedSuite, new FailNestedSuite) } def flatSpec = new TestLocationFlatSpec class TestLocationFlatSpec extends FlatSpec with FixtureServices { val suiteTypeName = "org.scalatest.events.LocationSuiteProp$TestLocationFlatSpec" val expectedSuiteStartingList = List(TopOfClassPair(suiteTypeName + "$NestedSuite"), TopOfClassPair(suiteTypeName + "$AbortNestedSuite"), TopOfClassPair(suiteTypeName + "$FailNestedSuite")) val expectedSuiteCompletedList = List(TopOfClassPair(suiteTypeName + "$NestedSuite"), TopOfClassPair(suiteTypeName + "$FailNestedSuite")) val expectedSuiteAbortedList = List(SeeStackDepthExceptionPair(suiteTypeName + "$AbortNestedSuite")) val expectedTestSucceededList = List(LineInFilePair("Test should info", "LocationSuiteProp.scala", thisLineNumber + 5)) val expectedTestFailedList = List(SeeStackDepthExceptionPair("Test should fail")) val expectedInfoProvidedList = List(LineInFilePair("test info", "LocationSuiteProp.scala", thisLineNumber + 4)) class NestedSuite extends FlatSpec { "Test" should "info" in { info("test info") } } class AbortNestedSuite extends FlatSpec { override protected def runNestedSuites(args: Args): Status = { throw new RuntimeException } } class FailNestedSuite extends FlatSpec { "Test" should "fail" in { fail } } override def nestedSuites = Vector(new NestedSuite, new AbortNestedSuite, new FailNestedSuite) } def fixtureFlatSpec = new TestLocationFixtureFlatSpec class StringFixtureFlatSpec extends fixture.FlatSpec with StringFixture class TestLocationFixtureFlatSpec extends StringFixtureFlatSpec with FixtureServices { val suiteTypeName = "org.scalatest.events.LocationSuiteProp$TestLocationFixtureFlatSpec" val expectedSuiteStartingList = List(TopOfClassPair(suiteTypeName + "$NestedSuite"), TopOfClassPair(suiteTypeName + "$AbortNestedSuite"), TopOfClassPair(suiteTypeName + "$FailNestedSuite")) val expectedSuiteCompletedList = List(TopOfClassPair(suiteTypeName + "$NestedSuite"), TopOfClassPair(suiteTypeName + "$FailNestedSuite")) val expectedSuiteAbortedList = List(SeeStackDepthExceptionPair(suiteTypeName + "$AbortNestedSuite")) val expectedTestSucceededList = List(LineInFilePair("Test should info", "LocationSuiteProp.scala", thisLineNumber + 5)) val expectedTestFailedList = List(SeeStackDepthExceptionPair("Test should fail")) val expectedInfoProvidedList = List(LineInFilePair("test info", "LocationSuiteProp.scala", thisLineNumber + 4)) class NestedSuite extends StringFixtureFlatSpec { "Test" should "info" in { param => info("test info") } } class AbortNestedSuite extends StringFixtureFlatSpec { override protected def runNestedSuites(args: Args): Status = { throw new RuntimeException } } class FailNestedSuite extends StringFixtureFlatSpec { "Test" should "fail" in { param => fail } } override def nestedSuites = Vector(new NestedSuite, new AbortNestedSuite, new FailNestedSuite) } def freeSpec = new TestLocationFreeSpec class TestLocationFreeSpec extends FreeSpec with FixtureServices { val suiteTypeName = "org.scalatest.events.LocationSuiteProp$TestLocationFreeSpec" val expectedSuiteStartingList = List(TopOfClassPair(suiteTypeName + "$NestedSuite"), TopOfClassPair(suiteTypeName + "$AbortNestedSuite"), TopOfClassPair(suiteTypeName + "$FailNestedSuite")) val expectedSuiteCompletedList = List(TopOfClassPair(suiteTypeName + "$NestedSuite"), TopOfClassPair(suiteTypeName + "$FailNestedSuite")) val expectedSuiteAbortedList = List(SeeStackDepthExceptionPair(suiteTypeName + "$AbortNestedSuite")) val expectedTestSucceededList = List(LineInFilePair("Test should info", "LocationSuiteProp.scala", thisLineNumber + 6)) val expectedTestFailedList = List(SeeStackDepthExceptionPair("Test should fail")) val expectedInfoProvidedList = List(LineInFilePair("test info", "LocationSuiteProp.scala", thisLineNumber + 5)) class NestedSuite extends FreeSpec { "Test" - { "should info" in { info("test info") } } } class AbortNestedSuite extends FreeSpec { override protected def runNestedSuites(args: Args): Status = { throw new RuntimeException } } class FailNestedSuite extends FreeSpec { "Test" - { "should fail" in { fail } } } override def nestedSuites = Vector(new NestedSuite, new AbortNestedSuite, new FailNestedSuite) } def fixtureFreeSpec = new TestLocationFixtureFreeSpec class StringFixtureFreeSpec extends fixture.FreeSpec with StringFixture class TestLocationFixtureFreeSpec extends StringFixtureFreeSpec with FixtureServices { val suiteTypeName = "org.scalatest.events.LocationSuiteProp$TestLocationFixtureFreeSpec" val expectedSuiteStartingList = List(TopOfClassPair(suiteTypeName + "$NestedSuite"), TopOfClassPair(suiteTypeName + "$AbortNestedSuite"), TopOfClassPair(suiteTypeName + "$FailNestedSuite")) val expectedSuiteCompletedList = List(TopOfClassPair(suiteTypeName + "$NestedSuite"), TopOfClassPair(suiteTypeName + "$FailNestedSuite")) val expectedSuiteAbortedList = List(SeeStackDepthExceptionPair(suiteTypeName + "$AbortNestedSuite")) val expectedTestSucceededList = List(LineInFilePair("Test should info", "LocationSuiteProp.scala", thisLineNumber + 6)) val expectedTestFailedList = List(SeeStackDepthExceptionPair("Test should fail")) val expectedInfoProvidedList = List(LineInFilePair("test info", "LocationSuiteProp.scala", thisLineNumber + 5)) class NestedSuite extends StringFixtureFreeSpec { "Test" - { "should info" in { param => info("test info") } } } class AbortNestedSuite extends StringFixtureFreeSpec { override protected def runNestedSuites(args: Args): Status = { throw new RuntimeException } } class FailNestedSuite extends StringFixtureFreeSpec { "Test" - { "should fail" in { param => fail } } } override def nestedSuites = Vector(new NestedSuite, new AbortNestedSuite, new FailNestedSuite) } def propSpec = new TestLocationPropSpec class TestLocationPropSpec extends PropSpec with FixtureServices { val suiteTypeName = "org.scalatest.events.LocationSuiteProp$TestLocationPropSpec" val expectedSuiteStartingList = List(TopOfClassPair(suiteTypeName + "$NestedSuite"), TopOfClassPair(suiteTypeName + "$AbortNestedSuite"), TopOfClassPair(suiteTypeName + "$FailNestedSuite")) val expectedSuiteCompletedList = List(TopOfClassPair(suiteTypeName + "$NestedSuite"), TopOfClassPair(suiteTypeName + "$FailNestedSuite")) val expectedSuiteAbortedList = List(SeeStackDepthExceptionPair(suiteTypeName + "$AbortNestedSuite")) val expectedTestSucceededList = List(LineInFilePair("Test should info", "LocationSuiteProp.scala", thisLineNumber + 5)) val expectedTestFailedList = List(SeeStackDepthExceptionPair("Test should fail")) val expectedInfoProvidedList = List(LineInFilePair("test info", "LocationSuiteProp.scala", thisLineNumber + 4)) class NestedSuite extends PropSpec { property("Test should info") { info("test info") } } class AbortNestedSuite extends PropSpec { override protected def runNestedSuites(args: Args): Status = { throw new RuntimeException } } class FailNestedSuite extends PropSpec { property("Test should fail") { fail } } override def nestedSuites = Vector(new NestedSuite, new AbortNestedSuite, new FailNestedSuite) } def fixturePropSpec = new TestLocationFixturePropSpec class StringFixturePropSpec extends fixture.PropSpec with StringFixture class TestLocationFixturePropSpec extends StringFixturePropSpec with FixtureServices { val suiteTypeName = "org.scalatest.events.LocationSuiteProp$TestLocationFixturePropSpec" val expectedSuiteStartingList = List(TopOfClassPair(suiteTypeName + "$NestedSuite"), TopOfClassPair(suiteTypeName + "$AbortNestedSuite"), TopOfClassPair(suiteTypeName + "$FailNestedSuite")) val expectedSuiteCompletedList = List(TopOfClassPair(suiteTypeName + "$NestedSuite"), TopOfClassPair(suiteTypeName + "$FailNestedSuite")) val expectedSuiteAbortedList = List(SeeStackDepthExceptionPair(suiteTypeName + "$AbortNestedSuite")) val expectedTestSucceededList = List(LineInFilePair("Test should info", "LocationSuiteProp.scala", thisLineNumber + 5)) val expectedTestFailedList = List(SeeStackDepthExceptionPair("Test should fail")) val expectedInfoProvidedList = List(LineInFilePair("test info", "LocationSuiteProp.scala", thisLineNumber + 4)) class NestedSuite extends StringFixturePropSpec { property("Test should info") { param => info("test info") } } class AbortNestedSuite extends StringFixturePropSpec { override protected def runNestedSuites(args: Args): Status = { throw new RuntimeException } } class FailNestedSuite extends StringFixturePropSpec { property("Test should fail") { param => fail } } override def nestedSuites = Vector(new NestedSuite, new AbortNestedSuite, new FailNestedSuite) } def wordSpec = new TestLocationWordSpec class TestLocationWordSpec extends WordSpec with FixtureServices { val suiteTypeName = "org.scalatest.events.LocationSuiteProp$TestLocationWordSpec" val expectedSuiteStartingList = List(TopOfClassPair(suiteTypeName + "$NestedSuite"), TopOfClassPair(suiteTypeName + "$AbortNestedSuite"), TopOfClassPair(suiteTypeName + "$FailNestedSuite")) val expectedSuiteCompletedList = List(TopOfClassPair(suiteTypeName + "$NestedSuite"), TopOfClassPair(suiteTypeName + "$FailNestedSuite")) val expectedSuiteAbortedList = List(SeeStackDepthExceptionPair(suiteTypeName + "$AbortNestedSuite")) val expectedTestSucceededList = List(LineInFilePair("Test should info", "LocationSuiteProp.scala", thisLineNumber + 6)) val expectedTestFailedList = List(SeeStackDepthExceptionPair("Test should fail")) val expectedInfoProvidedList = List(LineInFilePair("test info", "LocationSuiteProp.scala", thisLineNumber + 5)) class NestedSuite extends WordSpec { "Test" should { "info" in { info("test info") } } } class AbortNestedSuite extends WordSpec { override protected def runNestedSuites(args: Args): Status = { throw new RuntimeException } } class FailNestedSuite extends WordSpec { "Test" should { "fail" in { fail } } } override def nestedSuites = Vector(new NestedSuite, new AbortNestedSuite, new FailNestedSuite) } def fixtureWordSpec = new TestLocationFixtureWordSpec class StringFixtureWordSpec extends fixture.WordSpec with StringFixture class TestLocationFixtureWordSpec extends StringFixtureWordSpec with FixtureServices { val suiteTypeName = "org.scalatest.events.LocationSuiteProp$TestLocationFixtureWordSpec" val expectedSuiteStartingList = List(TopOfClassPair(suiteTypeName + "$NestedSuite"), TopOfClassPair(suiteTypeName + "$AbortNestedSuite"), TopOfClassPair(suiteTypeName + "$FailNestedSuite")) val expectedSuiteCompletedList = List(TopOfClassPair(suiteTypeName + "$NestedSuite"), TopOfClassPair(suiteTypeName + "$FailNestedSuite")) val expectedSuiteAbortedList = List(SeeStackDepthExceptionPair(suiteTypeName + "$AbortNestedSuite")) val expectedTestSucceededList = List(LineInFilePair("Test should info", "LocationSuiteProp.scala", thisLineNumber + 6)) val expectedTestFailedList = List(SeeStackDepthExceptionPair("Test should fail")) val expectedInfoProvidedList = List(LineInFilePair("test info", "LocationSuiteProp.scala", thisLineNumber + 5)) class NestedSuite extends StringFixtureWordSpec { "Test" should { "info" in { param => info("test info") } } } class AbortNestedSuite extends StringFixtureWordSpec { override protected def runNestedSuites(args: Args): Status = { throw new RuntimeException } } class FailNestedSuite extends StringFixtureWordSpec { "Test" should { "fail" in { param => fail } } } override def nestedSuites = Vector(new NestedSuite, new AbortNestedSuite, new FailNestedSuite) } }
svn2github/scalatest
src/test/scala/org/scalatest/events/LocationSuiteProp.scala
Scala
apache-2.0
27,445
/* * Copyright 2013 Commonwealth Computer Research, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package geomesa.core.iterators import SpatioTemporalIntersectingIterator._ import org.apache.accumulo.core.data.{Key, Value} import org.apache.hadoop.io.Text trait ColumnQualifierAggregator { def collect(key: Key, value: Value) def reset() def aggregate(): Value } class AttributeAggregator extends ColumnQualifierAggregator { var attrs = collection.mutable.ListBuffer[Attribute]() def reset() { attrs.clear() } def collect(key: Key, value: Value) { // the key is not used in this version; the value is a composite that // includes the attribute name as well as the attribute value attrs += decodeAttributeValue(value) } def aggregate() = { AttributeAggregator.encode(attrs) } } object AttributeAggregator { val SIMPLE_FEATURE_ATTRIBUTE_NAME = "SimpleFeatureAttribute" lazy val SIMPLE_FEATURE_ATTRIBUTE_NAME_TEXT = new Text(SIMPLE_FEATURE_ATTRIBUTE_NAME) def encode(attrs: Seq[Attribute]): Value = { // it should not be possible to have more than one attribute per feature // (until we re-decompose them in a subsequent round of refactoring) assert(attrs.size == 1) new Value(attrs.head.value.getBytes) } def decode(v: Value): Map[String,String] = { Map[String,String](SIMPLE_FEATURE_ATTRIBUTE_NAME -> v.toString) } }
anthonyccri/geomesa
geomesa-core/src/main/scala/geomesa/core/iterators/ColumnQualifierAggregator.scala
Scala
apache-2.0
1,922
/* * Copyright (C) 2009-2017 Lightbend Inc. <https://www.lightbend.com> */ package scalaguide.advanced.filters.essential // #essential-filter-example import javax.inject.Inject import akka.util.ByteString import play.api.Logger import play.api.libs.streams.Accumulator import play.api.mvc._ import scala.concurrent.ExecutionContext class LoggingFilter @Inject() (implicit ec: ExecutionContext) extends EssentialFilter { def apply(nextFilter: EssentialAction) = new EssentialAction { def apply(requestHeader: RequestHeader) = { val startTime = System.currentTimeMillis val accumulator: Accumulator[ByteString, Result] = nextFilter(requestHeader) accumulator.map { result => val endTime = System.currentTimeMillis val requestTime = endTime - startTime Logger.info(s"${requestHeader.method} ${requestHeader.uri} took ${requestTime}ms and returned ${result.header.status}") result.withHeaders("Request-Time" -> requestTime.toString) } } } } // #essential-filter-example
wsargent/playframework
documentation/manual/working/scalaGuide/main/application/code/EssentialFilter.scala
Scala
apache-2.0
1,044
package drt.client.components import japgolly.scalajs.react.ReactNode import japgolly.scalajs.react.vdom.prefix_<^._ /** * Provides type-safe access to Font Awesome icons */ object Icon { type Icon = ReactNode def apply(name: String): Icon = <.i(^.className := s"fa fa-$name") def adjust = apply("adjust") def adn = apply("adn") def alignCenter = apply("align-center") def alignJustify = apply("align-justify") def alignLeft = apply("align-left") def alignRight = apply("align-right") def ambulance = apply("ambulance") def anchor = apply("anchor") def android = apply("android") def angellist = apply("angellist") def angleDoubleDown = apply("angle-double-down") def angleDoubleLeft = apply("angle-double-left") def angleDoubleRight = apply("angle-double-right") def angleDoubleUp = apply("angle-double-up") def angleDown = apply("angle-down") def angleLeft = apply("angle-left") def angleRight = apply("angle-right") def angleUp = apply("angle-up") def apple = apply("apple") def archive = apply("archive") def areaChart = apply("area-chart") def arrowCircleDown = apply("arrow-circle-down") def arrowCircleLeft = apply("arrow-circle-left") def arrowCircleODown = apply("arrow-circle-o-down") def arrowCircleOLeft = apply("arrow-circle-o-left") def arrowCircleORight = apply("arrow-circle-o-right") def arrowCircleOUp = apply("arrow-circle-o-up") def arrowCircleRight = apply("arrow-circle-right") def arrowCircleUp = apply("arrow-circle-up") def arrowDown = apply("arrow-down") def arrowLeft = apply("arrow-left") def arrowRight = apply("arrow-right") def arrowUp = apply("arrow-up") def arrows = apply("arrows") def arrowsAlt = apply("arrows-alt") def arrowsH = apply("arrows-h") def arrowsV = apply("arrows-v") def asterisk = apply("asterisk") def at = apply("at") def automobile = apply("automobile") def backward = apply("backward") def ban = apply("ban") def bank = apply("bank") def barChart = apply("bar-chart") def barChartO = apply("bar-chart-o") def barcode = apply("barcode") def bars = apply("bars") def bed = apply("bed") def beer = apply("beer") def behance = apply("behance") def behanceSquare = apply("behance-square") def bell = apply("bell") def bellO = apply("bell-o") def bellSlash = apply("bell-slash") def bellSlashO = apply("bell-slash-o") def bicycle = apply("bicycle") def binoculars = apply("binoculars") def birthdayCake = apply("birthday-cake") def bitbucket = apply("bitbucket") def bitbucketSquare = apply("bitbucket-square") def bitcoin = apply("bitcoin") def bold = apply("bold") def bolt = apply("bolt") def bomb = apply("bomb") def book = apply("book") def bookmark = apply("bookmark") def bookmarkO = apply("bookmark-o") def briefcase = apply("briefcase") def btc = apply("btc") def bug = apply("bug") def building = apply("building") def buildingO = apply("building-o") def bullhorn = apply("bullhorn") def bullseye = apply("bullseye") def bus = apply("bus") def buysellads = apply("buysellads") def cab = apply("cab") def calculator = apply("calculator") def calendar = apply("calendar") def calendarO = apply("calendar-o") def camera = apply("camera") def cameraRetro = apply("camera-retro") def car = apply("car") def caretDown = apply("caret-down") def caretLeft = apply("caret-left") def caretRight = apply("caret-right") def caretSquareODown = apply("caret-square-o-down") def caretSquareOLeft = apply("caret-square-o-left") def caretSquareORight = apply("caret-square-o-right") def caretSquareOUp = apply("caret-square-o-up") def caretUp = apply("caret-up") def cartArrowDown = apply("cart-arrow-down") def cartPlus = apply("cart-plus") def cc = apply("cc") def ccAmex = apply("cc-amex") def ccDiscover = apply("cc-discover") def ccMastercard = apply("cc-mastercard") def ccPaypal = apply("cc-paypal") def ccStripe = apply("cc-stripe") def ccVisa = apply("cc-visa") def certificate = apply("certificate") def chain = apply("chain") def chainBroken = apply("chain-broken") def check = apply("check") def checkCircle = apply("check-circle") def checkCircleO = apply("check-circle-o") def checkSquare = apply("check-square") def checkSquareO = apply("check-square-o") def chevronCircleDown = apply("chevron-circle-down") def chevronCircleLeft = apply("chevron-circle-left") def chevronCircleRight = apply("chevron-circle-right") def chevronCircleUp = apply("chevron-circle-up") def chevronDown = apply("chevron-down") def chevronLeft = apply("chevron-left") def chevronRight = apply("chevron-right") def chevronUp = apply("chevron-up") def child = apply("child") def circle = apply("circle") def circleO = apply("circle-o") def circleONotch = apply("circle-o-notch") def circleThin = apply("circle-thin") def clipboard = apply("clipboard") def clockO = apply("clock-o") def close = apply("close") def cloud = apply("cloud") def cloudDownload = apply("cloud-download") def cloudUpload = apply("cloud-upload") def cny = apply("cny") def code = apply("code") def codeFork = apply("code-fork") def codepen = apply("codepen") def coffee = apply("coffee") def cog = apply("cog") def cogs = apply("cogs") def columns = apply("columns") def comment = apply("comment") def commentO = apply("comment-o") def comments = apply("comments") def commentsO = apply("comments-o") def compass = apply("compass") def compress = apply("compress") def connectdevelop = apply("connectdevelop") def copy = apply("copy") def copyright = apply("copyright") def creditCard = apply("credit-card") def crop = apply("crop") def crosshairs = apply("crosshairs") def css3 = apply("css3") def cube = apply("cube") def cubes = apply("cubes") def cut = apply("cut") def cutlery = apply("cutlery") def dashboard = apply("dashboard") def dashcube = apply("dashcube") def database = apply("database") def dedent = apply("dedent") def delicious = apply("delicious") def desktop = apply("desktop") def deviantart = apply("deviantart") def diamond = apply("diamond") def digg = apply("digg") def dollar = apply("dollar") def dotCircleO = apply("dot-circle-o") def download = apply("download") def dribbble = apply("dribbble") def dropbox = apply("dropbox") def drupal = apply("drupal") def edit = apply("edit") def eject = apply("eject") def ellipsisH = apply("ellipsis-h") def ellipsisV = apply("ellipsis-v") def empire = apply("empire") def envelope = apply("envelope") def envelopeO = apply("envelope-o") def envelopeSquare = apply("envelope-square") def eraser = apply("eraser") def eur = apply("eur") def euro = apply("euro") def exchange = apply("exchange") def exclamation = apply("exclamation") def exclamationCircle = apply("exclamation-circle") def exclamationTriangle = apply("exclamation-triangle") def expand = apply("expand") def externalLink = apply("external-link") def externalLinkSquare = apply("external-link-square") def eye = apply("eye") def eyeSlash = apply("eye-slash") def eyedropper = apply("eyedropper") def facebook = apply("facebook") def facebookF = apply("facebook-f") def facebookOfficial = apply("facebook-official") def facebookSquare = apply("facebook-square") def fastBackward = apply("fast-backward") def fastForward = apply("fast-forward") def fax = apply("fax") def female = apply("female") def fighterJet = apply("fighter-jet") def file = apply("file") def fileArchiveO = apply("file-archive-o") def fileAudioO = apply("file-audio-o") def fileCodeO = apply("file-code-o") def fileExcelO = apply("file-excel-o") def fileImageO = apply("file-image-o") def fileMovieO = apply("file-movie-o") def fileO = apply("file-o") def filePdfO = apply("file-pdf-o") def filePhotoO = apply("file-photo-o") def filePictureO = apply("file-picture-o") def filePowerpointO = apply("file-powerpoint-o") def fileSoundO = apply("file-sound-o") def fileText = apply("file-text") def fileTextO = apply("file-text-o") def fileVideoO = apply("file-video-o") def fileWordO = apply("file-word-o") def fileZipO = apply("file-zip-o") def filesO = apply("files-o") def film = apply("film") def filter = apply("filter") def fire = apply("fire") def fireExtinguisher = apply("fire-extinguisher") def flag = apply("flag") def flagCheckered = apply("flag-checkered") def flagO = apply("flag-o") def flash = apply("flash") def flask = apply("flask") def flickr = apply("flickr") def floppyO = apply("floppy-o") def folder = apply("folder") def folderO = apply("folder-o") def folderOpen = apply("folder-open") def folderOpenO = apply("folder-open-o") def font = apply("font") def forumbee = apply("forumbee") def forward = apply("forward") def foursquare = apply("foursquare") def frownO = apply("frown-o") def futbolO = apply("futbol-o") def gamepad = apply("gamepad") def gavel = apply("gavel") def gbp = apply("gbp") def ge = apply("ge") def gear = apply("gear") def gears = apply("gears") def genderless = apply("genderless") def gift = apply("gift") def git = apply("git") def gitSquare = apply("git-square") def github = apply("github") def githubAlt = apply("github-alt") def githubSquare = apply("github-square") def gittip = apply("gittip") def glass = apply("glass") def globe = apply("globe") def google = apply("google") def googlePlus = apply("google-plus") def googlePlusSquare = apply("google-plus-square") def googleWallet = apply("google-wallet") def graduationCap = apply("graduation-cap") def gratipay = apply("gratipay") def group = apply("group") def hSquare = apply("h-square") def hackerNews = apply("hacker-news") def handODown = apply("hand-o-down") def handOLeft = apply("hand-o-left") def handORight = apply("hand-o-right") def handOUp = apply("hand-o-up") def hddO = apply("hdd-o") def header = apply("header") def headphones = apply("headphones") def heart = apply("heart") def heartO = apply("heart-o") def heartbeat = apply("heartbeat") def history = apply("history") def home = apply("home") def hospitalO = apply("hospital-o") def hotel = apply("hotel") def html5 = apply("html5") def ils = apply("ils") def image = apply("image") def inbox = apply("inbox") def indent = apply("indent") def info = apply("info") def infoCircle = apply("info-circle") def inr = apply("inr") def instagram = apply("instagram") def institution = apply("institution") def ioxhost = apply("ioxhost") def italic = apply("italic") def joomla = apply("joomla") def jpy = apply("jpy") def jsfiddle = apply("jsfiddle") def key = apply("key") def keyboardO = apply("keyboard-o") def krw = apply("krw") def language = apply("language") def laptop = apply("laptop") def lastfm = apply("lastfm") def lastfmSquare = apply("lastfm-square") def leaf = apply("leaf") def leanpub = apply("leanpub") def legal = apply("legal") def lemonO = apply("lemon-o") def levelDown = apply("level-down") def levelUp = apply("level-up") def lifeBouy = apply("life-bouy") def lifeBuoy = apply("life-buoy") def lifeRing = apply("life-ring") def lifeSaver = apply("life-saver") def lightbulbO = apply("lightbulb-o") def lineChart = apply("line-chart") def link = apply("link") def linkedin = apply("linkedin") def linkedinSquare = apply("linkedin-square") def linux = apply("linux") def list = apply("list") def listAlt = apply("list-alt") def listOl = apply("list-ol") def listUl = apply("list-ul") def locationArrow = apply("location-arrow") def lock = apply("lock") def longArrowDown = apply("long-arrow-down") def longArrowLeft = apply("long-arrow-left") def longArrowRight = apply("long-arrow-right") def longArrowUp = apply("long-arrow-up") def magic = apply("magic") def magnet = apply("magnet") def mailForward = apply("mail-forward") def mailReply = apply("mail-reply") def mailReplyAll = apply("mail-reply-all") def male = apply("male") def mapMarker = apply("map-marker") def mars = apply("mars") def marsDouble = apply("mars-double") def marsStroke = apply("mars-stroke") def marsStrokeH = apply("mars-stroke-h") def marsStrokeV = apply("mars-stroke-v") def maxcdn = apply("maxcdn") def meanpath = apply("meanpath") def medium = apply("medium") def medkit = apply("medkit") def mehO = apply("meh-o") def mercury = apply("mercury") def microphone = apply("microphone") def microphoneSlash = apply("microphone-slash") def minus = apply("minus") def minusCircle = apply("minus-circle") def minusSquare = apply("minus-square") def minusSquareO = apply("minus-square-o") def mobile = apply("mobile") def mobilePhone = apply("mobile-phone") def money = apply("money") def moonO = apply("moon-o") def mortarBoard = apply("mortar-board") def motorcycle = apply("motorcycle") def music = apply("music") def navicon = apply("navicon") def neuter = apply("neuter") def newspaperO = apply("newspaper-o") def openid = apply("openid") def outdent = apply("outdent") def pagelines = apply("pagelines") def paintBrush = apply("paint-brush") def paperPlane = apply("paper-plane") def paperPlaneO = apply("paper-plane-o") def paperclip = apply("paperclip") def paragraph = apply("paragraph") def paste = apply("paste") def pause = apply("pause") def paw = apply("paw") def paypal = apply("paypal") def pencil = apply("pencil") def pencilSquare = apply("pencil-square") def pencilSquareO = apply("pencil-square-o") def phone = apply("phone") def phoneSquare = apply("phone-square") def photo = apply("photo") def pictureO = apply("picture-o") def pieChart = apply("pie-chart") def piedPiper = apply("pied-piper") def piedPiperAlt = apply("pied-piper-alt") def pinterest = apply("pinterest") def pinterestP = apply("pinterest-p") def pinterestSquare = apply("pinterest-square") def plane = apply("plane") def play = apply("play") def playCircle = apply("play-circle") def playCircleO = apply("play-circle-o") def plug = apply("plug") def plus = apply("plus") def plusCircle = apply("plus-circle") def plusSquare = apply("plus-square") def plusSquareO = apply("plus-square-o") def powerOff = apply("power-off") def print = apply("print") def puzzlePiece = apply("puzzle-piece") def qq = apply("qq") def qrcode = apply("qrcode") def question = apply("question") def questionCircle = apply("question-circle") def quoteLeft = apply("quote-left") def quoteRight = apply("quote-right") def ra = apply("ra") def random = apply("random") def rebel = apply("rebel") def recycle = apply("recycle") def reddit = apply("reddit") def redditSquare = apply("reddit-square") def refresh = apply("refresh") def remove = apply("remove") def renren = apply("renren") def reorder = apply("reorder") def repeat = apply("repeat") def reply = apply("reply") def replyAll = apply("reply-all") def retweet = apply("retweet") def rmb = apply("rmb") def road = apply("road") def rocket = apply("rocket") def rotateLeft = apply("rotate-left") def rotateRight = apply("rotate-right") def rouble = apply("rouble") def rss = apply("rss") def rssSquare = apply("rss-square") def rub = apply("rub") def ruble = apply("ruble") def rupee = apply("rupee") def save = apply("save") def scissors = apply("scissors") def search = apply("search") def searchMinus = apply("search-minus") def searchPlus = apply("search-plus") def sellsy = apply("sellsy") def send = apply("send") def sendO = apply("send-o") def server = apply("server") def share = apply("share") def shareAlt = apply("share-alt") def shareAltSquare = apply("share-alt-square") def shareSquare = apply("share-square") def shareSquareO = apply("share-square-o") def shekel = apply("shekel") def sheqel = apply("sheqel") def shield = apply("shield") def ship = apply("ship") def shirtsinbulk = apply("shirtsinbulk") def shoppingCart = apply("shopping-cart") def signIn = apply("sign-in") def signOut = apply("sign-out") def signal = apply("signal") def simplybuilt = apply("simplybuilt") def sitemap = apply("sitemap") def skyatlas = apply("skyatlas") def skype = apply("skype") def slack = apply("slack") def sliders = apply("sliders") def slideshare = apply("slideshare") def smileO = apply("smile-o") def soccerBallO = apply("soccer-ball-o") def sort = apply("sort") def sortAlphaAsc = apply("sort-alpha-asc") def sortAlphaDesc = apply("sort-alpha-desc") def sortAmountAsc = apply("sort-amount-asc") def sortAmountDesc = apply("sort-amount-desc") def sortAsc = apply("sort-asc") def sortDesc = apply("sort-desc") def sortDown = apply("sort-down") def sortNumericAsc = apply("sort-numeric-asc") def sortNumericDesc = apply("sort-numeric-desc") def sortUp = apply("sort-up") def soundcloud = apply("soundcloud") def spaceShuttle = apply("space-shuttle") def spinner = apply("spinner") def spoon = apply("spoon") def spotify = apply("spotify") def square = apply("square") def squareO = apply("square-o") def stackExchange = apply("stack-exchange") def stackOverflow = apply("stack-overflow") def star = apply("star") def starHalf = apply("star-half") def starHalfEmpty = apply("star-half-empty") def starHalfFull = apply("star-half-full") def starHalfO = apply("star-half-o") def starO = apply("star-o") def steam = apply("steam") def steamSquare = apply("steam-square") def stepBackward = apply("step-backward") def stepForward = apply("step-forward") def stethoscope = apply("stethoscope") def stop = apply("stop") def streetView = apply("street-view") def strikethrough = apply("strikethrough") def stumbleupon = apply("stumbleupon") def stumbleuponCircle = apply("stumbleupon-circle") def subscript = apply("subscript") def subway = apply("subway") def suitcase = apply("suitcase") def sunO = apply("sun-o") def superscript = apply("superscript") def support = apply("support") def table = apply("table") def tablet = apply("tablet") def tachometer = apply("tachometer") def tag = apply("tag") def tags = apply("tags") def tasks = apply("tasks") def taxi = apply("taxi") def tencentWeibo = apply("tencent-weibo") def terminal = apply("terminal") def textHeight = apply("text-height") def textWidth = apply("text-width") def th = apply("th") def thLarge = apply("th-large") def thList = apply("th-list") def thumbTack = apply("thumb-tack") def thumbsDown = apply("thumbs-down") def thumbsODown = apply("thumbs-o-down") def thumbsOUp = apply("thumbs-o-up") def thumbsUp = apply("thumbs-up") def ticket = apply("ticket") def times = apply("times") def timesCircle = apply("times-circle") def timesCircleO = apply("times-circle-o") def tint = apply("tint") def toggleDown = apply("toggle-down") def toggleLeft = apply("toggle-left") def toggleOff = apply("toggle-off") def toggleOn = apply("toggle-on") def toggleRight = apply("toggle-right") def toggleUp = apply("toggle-up") def train = apply("train") def transgender = apply("transgender") def transgenderAlt = apply("transgender-alt") def trash = apply("trash") def trashO = apply("trash-o") def tree = apply("tree") def trello = apply("trello") def trophy = apply("trophy") def truck = apply("truck") def `try` = apply("try") def tty = apply("tty") def tumblr = apply("tumblr") def tumblrSquare = apply("tumblr-square") def turkishLira = apply("turkish-lira") def twitch = apply("twitch") def twitter = apply("twitter") def twitterSquare = apply("twitter-square") def umbrella = apply("umbrella") def underline = apply("underline") def undo = apply("undo") def university = apply("university") def unlink = apply("unlink") def unlock = apply("unlock") def unlockAlt = apply("unlock-alt") def unsorted = apply("unsorted") def upload = apply("upload") def usd = apply("usd") def user = apply("user") def userMd = apply("user-md") def userPlus = apply("user-plus") def userSecret = apply("user-secret") def userTimes = apply("user-times") def users = apply("users") def venus = apply("venus") def venusDouble = apply("venus-double") def venusMars = apply("venus-mars") def viacoin = apply("viacoin") def videoCamera = apply("video-camera") def vimeoSquare = apply("vimeo-square") def vine = apply("vine") def vk = apply("vk") def volumeDown = apply("volume-down") def volumeOff = apply("volume-off") def volumeUp = apply("volume-up") def warning = apply("warning") def wechat = apply("wechat") def weibo = apply("weibo") def weixin = apply("weixin") def whatsapp = apply("whatsapp") def wheelchair = apply("wheelchair") def wifi = apply("wifi") def windows = apply("windows") def won = apply("won") def wordpress = apply("wordpress") def wrench = apply("wrench") def xing = apply("xing") def xingSquare = apply("xing-square") def yahoo = apply("yahoo") def yelp = apply("yelp") def yen = apply("yen") def youtube = apply("youtube") def youtubePlay = apply("youtube-play") def youtubeSquare = apply("youtube-square") }
somanythings/drt-scalajs-spa-exploration
client/src/main/scala/spatutorial/client/components/Icon.scala
Scala
apache-2.0
21,471
object Hello extends App { println("hello" + args.toList.toString) }
som-snytt/xsbt
sbt/src/sbt-test/actions/input-task/Hello.scala
Scala
bsd-3-clause
71
package cz.vse.easyminer.miner case class MinerTask( antecedent: BoolExpression[Attribute], interestMeasures: Set[InterestMeasure], consequent: BoolExpression[Attribute] ) trait MinerTaskValidator { def validate(mt: MinerTask): Unit }
KIZI/EasyMiner-Apriori-R
src/main/scala/cz/vse/easyminer/miner/MinerTask.scala
Scala
bsd-3-clause
246
/* * Copyright 2001-2013 Artima, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.scalactic import org.scalatest._ class TimesOnIntSpec extends FunSpec with TimesOnInt { describe("The TimesOnInt trait") { it("should allow people to repeat side effects a specified number of times") { // Need to support this one, because someone may invoke times on an integer variable. // Probably need to support 0 times as well, but should throw IAE if negative is passed. var i = 0 0 times { i += 1 } assert(i === 0) 1 times { i += 1 } assert(i === 1) 2 times { i += 1 } assert(i === 3) 3 times { i += 1 } assert(i === 6) 4 times { i += 1 } assert(i === 10) 90 times { i += 1 } assert(i === 100) } it("should throw IllegalArgumentException if times is invoked on a negative integer") { var i = 0 intercept[IllegalArgumentException] { -1 times { i += 1 } } assert(i === 0) } } }
SRGOM/scalatest
scalactic-test/src/test/scala/org/scalactic/TimesOnIntSpec.scala
Scala
apache-2.0
1,544
/* * Copyright (c) 2013, Hidekatsu Hirose * Copyright (c) 2013, Hirose-Zouen * This file is subject to the terms and conditions defined in * This file is subject to the terms and conditions defined in * file 'LICENSE.txt', which is part of this source code package. */ package org.hirosezouen.hznet import java.security.MessageDigest import org.hirosezouen.hzutil._ object HZHTTP_Digest_Authentication { case class HashHex(hash: IndexedSeq[Byte]) { import HashHex._ require(hash.size == LENGTH) lazy val hexStr = hash.map("%02x".format(_)).mkString def getBytes: Array[Byte] = hexStr.getBytes override def toString(): String = hexStr } object HashHex { def LENGTH = 16 def empty(): HashHex = HashHex(new Array[Byte](LENGTH)) } /* calculate H(A1) as per spec */ def DigestCalcHA1(pszAlg: String, pszUserName: String, pszRealm: String, pszPassword: String, pszNonce: String, pszCNonce: String ): HashHex = { var md = MessageDigest.getInstance("MD5") md.update(pszUserName.getBytes) md.update(":".getBytes) md.update(pszRealm.getBytes) md.update(":".getBytes) md.update(pszPassword.getBytes) var ha1 = md.clone().asInstanceOf[MessageDigest].digest if(pszAlg.compareToIgnoreCase("md5-sess") == 0) { md.update(":".getBytes) md.update(pszNonce.getBytes) md.update(":".getBytes) md.update(pszCNonce.getBytes) ha1 = md.digest } HashHex(ha1) } /* calculate request-digest/response-digest as per HTTP Digest spec */ def DigestCalcResponse(HA1: HashHex, /* H(A1) */ pszNonce: String, /* nonce from server */ pszNonceCount: String, /* 8 hex digits */ pszCNonce: String, /* client nonce */ pszQop: String, /* qop-value: "", "auth", "auth-int" */ pszMethod: String, /* method from the request */ pszDigestUri: String, /* requested URL */ HEntity: HashHex /* H(entity body) if qop="auth-int" */ ): HashHex = { var md = MessageDigest.getInstance("MD5") md.update(pszMethod.getBytes) md.update(":".getBytes) md.update(pszDigestUri.getBytes) if(pszQop.compareToIgnoreCase("auth-int") == 0) { md.update(":".getBytes) md.update(HEntity.getBytes) } val HA2 = HashHex(md.digest) md = MessageDigest.getInstance("MD5") md.update(HA1.getBytes) md.update(":".getBytes) md.update(pszNonce.getBytes) md.update(":".getBytes) if(pszQop.length != 0) { md.update(pszNonceCount.getBytes) md.update(":".getBytes) md.update(pszCNonce.getBytes) md.update(":".getBytes) md.update(pszQop.getBytes) md.update(":".getBytes) } md.update(HA2.getBytes) HashHex(md.digest) } }
chokopapashi/HZUtils1.6.x_Scala2.10.5
src/main/scala/org/hirosezouen/hznet/HZHTTTP_Digest_Authentication.scala
Scala
bsd-3-clause
3,397
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ai.h2o.sparkling.benchmarks import ai.h2o.sparkling.H2OFrame import org.apache.spark.sql.DataFrame class DataFrameToH2OFrameConversionBenchmark(context: BenchmarkContext) extends BenchmarkBase[DataFrame, H2OFrame](context) { override protected def initialize(): DataFrame = loadDataToDataFrame() override protected def body(dataFrame: DataFrame): H2OFrame = context.hc.asH2OFrame(dataFrame) override protected def cleanUp(dataFrame: DataFrame, h2oFrame: H2OFrame): Unit = { removeFromCache(dataFrame) h2oFrame.delete() } }
h2oai/sparkling-water
benchmarks/src/main/scala/ai/h2o/sparkling/benchmarks/DataFrameToH2OFrameConversionBenchmark.scala
Scala
apache-2.0
1,356
package org.http4s package servlet import java.util.concurrent.ExecutorService import org.http4s.headers.`Transfer-Encoding` import server._ import javax.servlet.http.{HttpServletResponse, HttpServletRequest, HttpServlet} import java.net.{InetSocketAddress, InetAddress} import scala.collection.JavaConverters._ import javax.servlet._ import scala.concurrent.duration.Duration import scalaz.concurrent.{Strategy, Task} import scalaz.{-\\/, \\/-} import scala.util.control.NonFatal import org.log4s.getLogger class Http4sServlet(service: HttpService, asyncTimeout: Duration = Duration.Inf, threadPool: ExecutorService = Strategy.DefaultExecutorService, private[this] var servletIo: ServletIo = BlockingServletIo(DefaultChunkSize)) extends HttpServlet { private[this] val logger = getLogger private val asyncTimeoutMillis = if (asyncTimeout.isFinite()) asyncTimeout.toMillis else -1 // -1 == Inf private[this] var serverSoftware: ServerSoftware = _ // micro-optimization: unwrap the service and call its .run directly private[this] val serviceFn = service.run override def init(config: ServletConfig): Unit = { val servletContext = config.getServletContext val servletApiVersion = ServletApiVersion(servletContext) logger.info(s"Detected Servlet API version $servletApiVersion") verifyServletIo(servletApiVersion) logServletIo() serverSoftware = ServerSoftware(servletContext.getServerInfo) } // TODO This is a dodgy check. It will have already triggered class loading of javax.servlet.WriteListener. // Remove when we can break binary compatibility. private def verifyServletIo(servletApiVersion: ServletApiVersion): Unit = servletIo match { case NonBlockingServletIo(chunkSize) if servletApiVersion < ServletApiVersion(3, 1) => logger.warn("Non-blocking servlet I/O requires Servlet API >= 3.1. Falling back to blocking I/O.") servletIo = BlockingServletIo(chunkSize) case _ => // cool } private def logServletIo(): Unit = logger.info(servletIo match { case BlockingServletIo(chunkSize) => s"Using blocking servlet I/O with chunk size ${chunkSize}" case NonBlockingServletIo(chunkSize) => s"Using non-blocking servlet I/O with chunk size ${chunkSize}" }) override def service(servletRequest: HttpServletRequest, servletResponse: HttpServletResponse): Unit = try { val ctx = servletRequest.startAsync() ctx.setTimeout(asyncTimeoutMillis) // Must be done on the container thread for Tomcat's sake when using async I/O. val bodyWriter = servletIo.initWriter(servletResponse) toRequest(servletRequest).fold( onParseFailure(_, servletResponse, bodyWriter), handleRequest(ctx, _, bodyWriter) ).runAsync { case \\/-(()) => ctx.complete() case -\\/(t) => errorHandler(servletRequest, servletResponse)(t) } } catch errorHandler(servletRequest, servletResponse) private def onParseFailure(parseFailure: ParseFailure, servletResponse: HttpServletResponse, bodyWriter: BodyWriter): Task[Unit] = { val response = Response(Status.BadRequest).withBody(parseFailure.sanitized) renderResponse(response, servletResponse, bodyWriter) } private def handleRequest(ctx: AsyncContext, request: Request, bodyWriter: BodyWriter): Task[Unit] = { ctx.addListener(new AsyncTimeoutHandler(request, bodyWriter)) val response = Task.fork(serviceFn(request))(threadPool) val servletResponse = ctx.getResponse.asInstanceOf[HttpServletResponse] renderResponse(response, servletResponse, bodyWriter) } private class AsyncTimeoutHandler(request: Request, bodyWriter: BodyWriter) extends AbstractAsyncListener { override def onTimeout(event: AsyncEvent): Unit = { val ctx = event.getAsyncContext val servletResponse = ctx.getResponse.asInstanceOf[HttpServletResponse] if (!servletResponse.isCommitted) { val response = Response(Status.InternalServerError).withBody("Service timed out.") renderResponse(response, servletResponse, bodyWriter).run } else { val servletRequest = ctx.getRequest.asInstanceOf[HttpServletRequest] logger.warn(s"Async context timed out, but response was already committed: ${request.method} ${request.uri.path}") } ctx.complete() } } private def renderResponse(response: Task[Response], servletResponse: HttpServletResponse, bodyWriter: BodyWriter): Task[Unit] = response.flatMap { r => // Note: the servlet API gives us no undeprecated method to both set // a body and a status reason. We sacrifice the status reason. servletResponse.setStatus(r.status.code) for (header <- r.headers if header.isNot(`Transfer-Encoding`)) servletResponse.addHeader(header.name.toString, header.value) bodyWriter(r) } private def errorHandler(servletRequest: ServletRequest, servletResponse: HttpServletResponse): PartialFunction[Throwable, Unit] = { case t: Throwable if servletResponse.isCommitted => logger.error(t)("Error processing request after response was committed") case t: Throwable => logger.error(t)("Error processing request") val response = Task.now(Response(Status.InternalServerError)) // We don't know what I/O mode we're in here, and we're not rendering a body // anyway, so we use a NullBodyWriter. renderResponse(response, servletResponse, NullBodyWriter).run if (servletRequest.isAsyncStarted) servletRequest.getAsyncContext.complete() } private def toRequest(req: HttpServletRequest): ParseResult[Request] = for { method <- Method.fromString(req.getMethod) uri <- Uri.requestTarget(Option(req.getQueryString).map { q => s"${req.getRequestURI}?$q" }.getOrElse(req.getRequestURI)) version <- HttpVersion.fromString(req.getProtocol) } yield Request( method = method, uri = uri, httpVersion = version, headers = toHeaders(req), body = servletIo.reader(req), attributes = AttributeMap( Request.Keys.PathInfoCaret(req.getContextPath.length + req.getServletPath.length), Request.Keys.ConnectionInfo(Request.Connection( InetSocketAddress.createUnresolved(req.getRemoteAddr, req.getRemotePort), InetSocketAddress.createUnresolved(req.getLocalAddr, req.getLocalPort), req.isSecure )), Request.Keys.ServerSoftware(serverSoftware) ) ) private def toHeaders(req: HttpServletRequest): Headers = { val headers = for { name <- req.getHeaderNames.asScala value <- req.getHeaders(name).asScala } yield Header(name, value) Headers(headers.toSeq : _*) } }
m4dc4p/http4s
servlet/src/main/scala/org/http4s/servlet/Http4sServlet.scala
Scala
apache-2.0
6,940
package forms import play.api.data.Form import play.api.data.Forms._ /** * Represents the sign in form and its data. */ object SignInForm { /** * A play framework form. */ val form = Form( mapping( "email" -> email, "password" -> nonEmptyText )(SignInData.apply)(SignInData.unapply) ) /** * The sign in form data. * * @param email email of the user * @param password password of the user */ case class SignInData(email: String, password: String) }
mb720/cvs
app/forms/SignInForm.scala
Scala
bsd-2-clause
518
package com.giyeok.jparser.study.parsergen import com.giyeok.jparser.{NGrammar, Symbols} import com.giyeok.jparser.metalang.MetaGrammar import com.giyeok.jparser.parsergen.deprecated.{AKernel, GrammarAnalyzer} import com.giyeok.jparser.utils.TermGrouper import com.giyeok.jparser.visualize.FigureGenerator.Spacing import com.giyeok.jparser.visualize.{BasicVisualizeResources, FigureGenerator} import org.eclipse.draw2d.{Figure, FigureCanvas, LineBorder} import org.eclipse.swt.SWT import org.eclipse.swt.layout.FillLayout import org.eclipse.swt.widgets.{Display, Shell} object AllPathsPrinter { def main(args: Array[String]): Unit = { val testGrammarText: String = """S = 'a'+ """.stripMargin('|') val rawGrammar = MetaGrammar.translate("Test Grammar", testGrammarText).left.get val grammar: NGrammar = NGrammar.fromGrammar(rawGrammar) val analyzer = new GrammarAnalyzer(grammar) val display = new Display() val shell = new Shell(display) val g = FigureGenerator.draw2d.Generator val nodeFig = BasicVisualizeResources.nodeFigureGenerators val canvas = new FigureCanvas(shell, SWT.NONE) val paths = analyzer.zeroReachablePathsToTerminalsFrom(analyzer.startKernel) sortBy { path => path.last.end.symbolId } val reachableTerms = TermGrouper.termGroupsOf((paths map { path => path.last.end } map { reachableTerm => grammar.symbolOf(reachableTerm.symbolId) } map { term => term.symbol.asInstanceOf[Symbols.Terminal] }).toSet) reachableTerms foreach { termGroup => println(termGroup.toShortString) } val figure = g.verticalFig(Spacing.Big, paths map { path => def genFig(kernel: AKernel): Figure = { nodeFig.symbol.symbolPointerFig(grammar, kernel.symbolId, kernel.pointer) } g.horizontalFig(Spacing.Small, List(genFig(path.head.start)) ++ (path map { edge => val endFig = genFig(edge.end) if (grammar.nsequences.contains(edge.end.symbolId)) { val border = new LineBorder() endFig.setBorder(border) } g.horizontalFig(Spacing.None, Seq(g.textFig("->", nodeFig.appear.default), endFig)) })) }) canvas.setContents(figure) shell.setLayout(new FillLayout) shell.open() while (!shell.isDisposed) { if (!display.readAndDispatch()) { display.sleep() } } display.dispose() } }
Joonsoo/moon-parser
study/src/main/scala/com/giyeok/jparser/study/parsergen/AllPathsPrinter.scala
Scala
mit
2,612
package edu.berkeley.nlp.coref import scala.collection.mutable.HashMap import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer class OrderedClustering(val clusters: Seq[Seq[Int]]) { // Elements must be consecutive integers from 0 up to n private val allIndicesSorted = clusters.foldLeft(new ArrayBuffer[Int])(_ ++ _).sorted; require(allIndicesSorted.sameElements((0 until allIndicesSorted.size).toSeq), allIndicesSorted); private val mentionToClusterMap = new HashMap[Int,Seq[Int]]; for (cluster <- clusters) { for (i <- cluster) { mentionToClusterMap.put(i, cluster); } } def getCluster(idx: Int) = mentionToClusterMap(idx); def isSingleton(idx: Int) = mentionToClusterMap(idx).size == 1; def startsCluster(idx: Int) = mentionToClusterMap(idx)(0) == idx; def areInSameCluster(idx1: Int, idx2: Int) = mentionToClusterMap(idx1).contains(idx2); def getImmediateAntecedent(idx: Int) = { val cluster = mentionToClusterMap(idx); val mentIdxInCluster = cluster.indexOf(idx); if (mentIdxInCluster == 0) { -1 } else { cluster(mentIdxInCluster - 1); } } def getAllAntecedents(idx: Int) = { val cluster = mentionToClusterMap(idx); cluster.slice(0, cluster.indexOf(idx)); } def getAllConsequents(idx: Int) = { val cluster = mentionToClusterMap(idx); cluster.slice(cluster.indexOf(idx) + 1, cluster.size); } // Needed for output printing def getClusterIdx(idx: Int) = { var clusterIdx = 0; for (i <- 0 until clusters.size) { if (clusters(i).sameElements(mentionToClusterMap(idx))) { clusterIdx = i; } } clusterIdx; } def getSubclustering(mentIdxsToKeep: Seq[Int]): OrderedClustering = { val oldIndicesToNewIndicesMap = new HashMap[Int,Int](); (0 until mentIdxsToKeep.size).map(i => oldIndicesToNewIndicesMap.put(mentIdxsToKeep(i), i)); val filteredConvertedClusters = clusters.map(cluster => cluster.filter(mentIdxsToKeep.contains(_)).map(mentIdx => oldIndicesToNewIndicesMap(mentIdx))); val filteredConvertedClustersNoEmpties = filteredConvertedClusters.filter(cluster => !cluster.isEmpty); new OrderedClustering(filteredConvertedClustersNoEmpties); } } object OrderedClustering { def createFromClusterIds(clusterIds: Seq[Int]) = { val mentIdAndClusterId = (0 until clusterIds.size).map(i => (i, clusterIds(i))); val clustersUnsorted = mentIdAndClusterId.groupBy(_._2).values; val finalClusters = clustersUnsorted.toSeq.sortBy(_.head).map(clusterWithClusterId => clusterWithClusterId.map(_._1)); new OrderedClustering(finalClusters.toSeq); } def createFromBackpointers(backpointers: Seq[Int]) = { var nextClusterID = 0; val clusters = new ArrayBuffer[ArrayBuffer[Int]](); val mentionToCluster = new HashMap[Int,ArrayBuffer[Int]](); for (i <- 0 until backpointers.size) { if (backpointers(i) == i) { val cluster = ArrayBuffer(i); clusters += cluster; mentionToCluster.put(i, cluster); } else { val cluster = mentionToCluster(backpointers(i)); cluster += i; mentionToCluster.put(i, cluster); } } new OrderedClustering(clusters); } }
nate331/jbt-berkeley-coref-resolution
src/main/java/edu/berkeley/nlp/coref/OrderedClustering.scala
Scala
gpl-3.0
3,263
package rpgboss.model import java.io._ import au.com.bytecode.opencsv.{ CSVReader, CSVWriter } import org.json4s.native.Serialization import org.json4s.ShortTypeHints import rpgboss.lib._ import rpgboss.model._ import rpgboss.model.event._ import rpgboss.lib.FileHelper._ import rpgboss.model.resource.RpgMap import scala.collection.mutable.ArrayBuffer import rpgboss.model.resource.RpgMapMetadata /* * This class has mutable members. * * See RpgMap object for an explanation of the data format. * * botLayer, midLayer, and topLayer must always be of size at least 1 x 1 */ case class RpgMapData(botLayer: Array[Array[Byte]], midLayer: Array[Array[Byte]], topLayer: Array[Array[Byte]], var events: Map[Int, RpgEvent]) { import RpgMapData._ def drawOrder = List(botLayer, midLayer, topLayer) /** * Removes all the invalid tiles. Allows for an optimized player engine. */ def sanitizeForMetadata(metadata: RpgMapMetadata) = { for (layerAry <- List(botLayer, midLayer, topLayer)) { for (tileY <- 0 until metadata.ySize) { val row = layerAry(tileY) import RpgMap.bytesPerTile for (tileX <- 0 until metadata.xSize) { val idx = tileX * bytesPerTile val byte1 = row(idx) val byte2 = row(idx + 1) val byte3 = row(idx + 2) if (byte1 < 0) { } else { // Regular tile if (byte1 >= metadata.tilesets.length) { row(idx) = RpgMap.emptyTileByte } } } } } } def writeCsv(file: File, data: Array[Array[Byte]]) = { val writer = new CSVWriter(new FileWriter(file), '\t', CSVWriter.NO_QUOTE_CHARACTER) data.foreach(row => writer.writeNext(row.map(b => b.toString).toArray)) writer.close() true } def writeToFile(p: Project, name: String) = { val (mapFile, botFile, midFile, topFile, evtFile) = datafiles(p, name) val mapFileWritten = mapFile.isFile() || mapFile.createNewFile() val layersWritten = writeCsv(botFile, botLayer) && writeCsv(midFile, midLayer) && writeCsv(topFile, topLayer) val eventsWritten = JsonUtils.writeModelToJsonWithFormats( evtFile, RpgMapDataEventsIntermediate(events.values.toArray), RpgMapData.formats) mapFileWritten && layersWritten && eventsWritten } def resized(newXSize: Int, newYSize: Int) = { import RpgMap._ val newLayers = List(botLayer, midLayer, topLayer) map { layerAry => assert(!layerAry.isEmpty) assert(layerAry.head.length % RpgMap.bytesPerTile == 0) val oldXSize = layerAry.head.length / RpgMap.bytesPerTile assert(layerAry.forall(_.length == oldXSize * RpgMap.bytesPerTile)) val oldYSize = layerAry.length // Expand or contract all the existing rows val newRowsSameYDim = layerAry.map { row => val newRow = if (row.size > newXSize * bytesPerTile) row.take(newXSize * bytesPerTile) else row ++ makeRowArray(newXSize - oldXSize, RpgMap.emptyTileSeed) assert(newRow.length == newXSize * bytesPerTile) newRow } // Generate or destroy new rows if (newYSize < layerAry.size) { newRowsSameYDim.take(newYSize) } else { newRowsSameYDim ++ Array.fill(newYSize - oldYSize)({ makeRowArray(newXSize, RpgMap.emptyTileSeed) }) } } copy( botLayer = newLayers(0), midLayer = newLayers(1), topLayer = newLayers(2)) } def deepcopy() = { val eventsCopy = events.map { case (k, v) => k -> Utils.deepCopy(v) } copy(botLayer = botLayer.map(_.clone()), midLayer = midLayer.map(_.clone()), topLayer = topLayer.map(_.clone()), events = eventsCopy) } lazy val distinctChars = { val set = new DistinctCharacterSet for ((_, event) <- events; state <- event.states) { set ++= state.distinctChars } set } } case class RpgMapDataEventsIntermediate(events: Array[RpgEvent]) case object RpgMapData { val formats = Serialization.formats(EventCmd.hints + EventParameter.hints) def datafiles(p: Project, name: String) = { val mapFile = new File(RpgMap.rcDir(p), name) val botFile = new File(RpgMap.rcDir(p), name + ".bot.csv") val midFile = new File(RpgMap.rcDir(p), name + ".mid.csv") val topFile = new File(RpgMap.rcDir(p), name + ".top.csv") val evtFile = new File(RpgMap.rcDir(p), name + ".evt.json") (mapFile, botFile, midFile, topFile, evtFile) } def readCsvArray(file: File): Option[Array[Array[Byte]]]= { val reader = new CSVReader(new FileReader(file), '\t') val buffer = new ArrayBuffer[ArrayBuffer[Byte]]() val csvIt = Iterator.continually(reader.readNext()).takeWhile(_ != null) for (row <- csvIt) { buffer.append(ArrayBuffer(row.map(_.toInt.toByte): _*)) } reader.close() Some(buffer.map(_.toArray).toArray) } def readFromDisk(p: Project, name: String): Option[RpgMapData] = { val (_, botFile, midFile, topFile, evtFile) = datafiles(p, name) val botAryOpt = readCsvArray(botFile) val midAryOpt = readCsvArray(midFile) val topAryOpt = readCsvArray(topFile) val eventsIntermediateOpt = JsonUtils.readModelFromJsonWithFormats[RpgMapDataEventsIntermediate]( evtFile, RpgMapData.formats) for (botAry <- botAryOpt; midAry <- midAryOpt; topAry <- topAryOpt; eventsIntermediate <- eventsIntermediateOpt) yield { val events = eventsIntermediate.events.map(e => e.id->e).toMap RpgMapData(botAry, midAry, topAry, events) } } }
DrDub/rpgboss
core/src/main/scala/rpgboss/model/RpgMapData.scala
Scala
agpl-3.0
5,728
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.catalyst.expressions import scala.collection.immutable.TreeSet import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.catalyst.analysis.TypeCheckResult import org.apache.spark.sql.catalyst.expressions.codegen.{CodegenContext, ExprCode, GenerateSafeProjection, GenerateUnsafeProjection, Predicate => BasePredicate} import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan import org.apache.spark.sql.catalyst.util.TypeUtils import org.apache.spark.sql.types._ object InterpretedPredicate { def create(expression: Expression, inputSchema: Seq[Attribute]): InterpretedPredicate = create(BindReferences.bindReference(expression, inputSchema)) def create(expression: Expression): InterpretedPredicate = new InterpretedPredicate(expression) } case class InterpretedPredicate(expression: Expression) extends BasePredicate { override def eval(r: InternalRow): Boolean = expression.eval(r).asInstanceOf[Boolean] } /** * An [[Expression]] that returns a boolean value. */ trait Predicate extends Expression { override def dataType: DataType = BooleanType } trait PredicateHelper { protected def splitConjunctivePredicates(condition: Expression): Seq[Expression] = { condition match { case And(cond1, cond2) => splitConjunctivePredicates(cond1) ++ splitConjunctivePredicates(cond2) case other => other :: Nil } } protected def splitDisjunctivePredicates(condition: Expression): Seq[Expression] = { condition match { case Or(cond1, cond2) => splitDisjunctivePredicates(cond1) ++ splitDisjunctivePredicates(cond2) case other => other :: Nil } } // Substitute any known alias from a map. protected def replaceAlias( condition: Expression, aliases: AttributeMap[Expression]): Expression = { // Use transformUp to prevent infinite recursion when the replacement expression // redefines the same ExprId, condition.transformUp { case a: Attribute => aliases.getOrElse(a, a) } } /** * Returns true if `expr` can be evaluated using only the output of `plan`. This method * can be used to determine when it is acceptable to move expression evaluation within a query * plan. * * For example consider a join between two relations R(a, b) and S(c, d). * * - `canEvaluate(EqualTo(a,b), R)` returns `true` * - `canEvaluate(EqualTo(a,c), R)` returns `false` * - `canEvaluate(Literal(1), R)` returns `true` as literals CAN be evaluated on any plan */ protected def canEvaluate(expr: Expression, plan: LogicalPlan): Boolean = expr.references.subsetOf(plan.outputSet) /** * Returns true iff `expr` could be evaluated as a condition within join. */ protected def canEvaluateWithinJoin(expr: Expression): Boolean = expr match { // Non-deterministic expressions are not allowed as join conditions. case e if !e.deterministic => false case _: ListQuery | _: Exists => // A ListQuery defines the query which we want to search in an IN subquery expression. // Currently the only way to evaluate an IN subquery is to convert it to a // LeftSemi/LeftAnti/ExistenceJoin by `RewritePredicateSubquery` rule. // It cannot be evaluated as part of a Join operator. // An Exists shouldn't be push into a Join operator too. false case e: SubqueryExpression => // non-correlated subquery will be replaced as literal e.children.isEmpty case a: AttributeReference => true case e: Unevaluable => false case e => e.children.forall(canEvaluateWithinJoin) } } @ExpressionDescription( usage = "_FUNC_ expr - Logical not.") case class Not(child: Expression) extends UnaryExpression with Predicate with ImplicitCastInputTypes with NullIntolerant { override def toString: String = s"NOT $child" override def inputTypes: Seq[DataType] = Seq(BooleanType) protected override def nullSafeEval(input: Any): Any = !input.asInstanceOf[Boolean] override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { defineCodeGen(ctx, ev, c => s"!($c)") } override def sql: String = s"(NOT ${child.sql})" } /** * Evaluates to `true` if `list` contains `value`. */ // scalastyle:off line.size.limit @ExpressionDescription( usage = "expr1 _FUNC_(expr2, expr3, ...) - Returns true if `expr` equals to any valN.", arguments = """ Arguments: * expr1, expr2, expr3, ... - the arguments must be same type. """, examples = """ Examples: > SELECT 1 _FUNC_(1, 2, 3); true > SELECT 1 _FUNC_(2, 3, 4); false > SELECT named_struct('a', 1, 'b', 2) _FUNC_(named_struct('a', 1, 'b', 1), named_struct('a', 1, 'b', 3)); false > SELECT named_struct('a', 1, 'b', 2) _FUNC_(named_struct('a', 1, 'b', 2), named_struct('a', 1, 'b', 3)); true """) // scalastyle:on line.size.limit case class In(value: Expression, list: Seq[Expression]) extends Predicate { require(list != null, "list should not be null") override def checkInputDataTypes(): TypeCheckResult = { val mismatchOpt = list.find(l => !DataType.equalsStructurally(l.dataType, value.dataType, ignoreNullability = true)) if (mismatchOpt.isDefined) { list match { case ListQuery(_, _, _, childOutputs) :: Nil => val valExprs = value match { case cns: CreateNamedStruct => cns.valExprs case expr => Seq(expr) } if (valExprs.length != childOutputs.length) { TypeCheckResult.TypeCheckFailure( s""" |The number of columns in the left hand side of an IN subquery does not match the |number of columns in the output of subquery. |#columns in left hand side: ${valExprs.length}. |#columns in right hand side: ${childOutputs.length}. |Left side columns: |[${valExprs.map(_.sql).mkString(", ")}]. |Right side columns: |[${childOutputs.map(_.sql).mkString(", ")}].""".stripMargin) } else { val mismatchedColumns = valExprs.zip(childOutputs).flatMap { case (l, r) if l.dataType != r.dataType => s"(${l.sql}:${l.dataType.catalogString}, ${r.sql}:${r.dataType.catalogString})" case _ => None } TypeCheckResult.TypeCheckFailure( s""" |The data type of one or more elements in the left hand side of an IN subquery |is not compatible with the data type of the output of the subquery |Mismatched columns: |[${mismatchedColumns.mkString(", ")}] |Left side: |[${valExprs.map(_.dataType.catalogString).mkString(", ")}]. |Right side: |[${childOutputs.map(_.dataType.catalogString).mkString(", ")}].""".stripMargin) } case _ => TypeCheckResult.TypeCheckFailure(s"Arguments must be same type but were: " + s"${value.dataType.simpleString} != ${mismatchOpt.get.dataType.simpleString}") } } else { TypeUtils.checkForOrderingExpr(value.dataType, s"function $prettyName") } } override def children: Seq[Expression] = value +: list lazy val inSetConvertible = list.forall(_.isInstanceOf[Literal]) private lazy val ordering = TypeUtils.getInterpretedOrdering(value.dataType) override def nullable: Boolean = children.exists(_.nullable) override def foldable: Boolean = children.forall(_.foldable) override def toString: String = s"$value IN ${list.mkString("(", ",", ")")}" override def eval(input: InternalRow): Any = { val evaluatedValue = value.eval(input) if (evaluatedValue == null) { null } else { var hasNull = false list.foreach { e => val v = e.eval(input) if (v == null) { hasNull = true } else if (ordering.equiv(v, evaluatedValue)) { return true } } if (hasNull) { null } else { false } } } override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { val javaDataType = ctx.javaType(value.dataType) val valueGen = value.genCode(ctx) val listGen = list.map(_.genCode(ctx)) // inTmpResult has 3 possible values: // -1 means no matches found and there is at least one value in the list evaluated to null val HAS_NULL = -1 // 0 means no matches found and all values in the list are not null val NOT_MATCHED = 0 // 1 means one value in the list is matched val MATCHED = 1 val tmpResult = ctx.freshName("inTmpResult") val valueArg = ctx.freshName("valueArg") // All the blocks are meant to be inside a do { ... } while (false); loop. // The evaluation of variables can be stopped when we find a matching value. val listCode = listGen.map(x => s""" |${x.code} |if (${x.isNull}) { | $tmpResult = $HAS_NULL; // ${ev.isNull} = true; |} else if (${ctx.genEqual(value.dataType, valueArg, x.value)}) { | $tmpResult = $MATCHED; // ${ev.isNull} = false; ${ev.value} = true; | continue; |} """.stripMargin) val codes = ctx.splitExpressionsWithCurrentInputs( expressions = listCode, funcName = "valueIn", extraArguments = (javaDataType, valueArg) :: (ctx.JAVA_BYTE, tmpResult) :: Nil, returnType = ctx.JAVA_BYTE, makeSplitFunction = body => s""" |do { | $body |} while (false); |return $tmpResult; """.stripMargin, foldFunctions = _.map { funcCall => s""" |$tmpResult = $funcCall; |if ($tmpResult == $MATCHED) { | continue; |} """.stripMargin }.mkString("\\n")) ev.copy(code = s""" |${valueGen.code} |byte $tmpResult = $HAS_NULL; |if (!${valueGen.isNull}) { | $tmpResult = $NOT_MATCHED; | $javaDataType $valueArg = ${valueGen.value}; | do { | $codes | } while (false); |} |final boolean ${ev.isNull} = ($tmpResult == $HAS_NULL); |final boolean ${ev.value} = ($tmpResult == $MATCHED); """.stripMargin) } override def sql: String = { val childrenSQL = children.map(_.sql) val valueSQL = childrenSQL.head val listSQL = childrenSQL.tail.mkString(", ") s"($valueSQL IN ($listSQL))" } } /** * Optimized version of In clause, when all filter values of In clause are * static. */ case class InSet(child: Expression, hset: Set[Any]) extends UnaryExpression with Predicate { require(hset != null, "hset could not be null") override def toString: String = s"$child INSET ${hset.mkString("(", ",", ")")}" @transient private[this] lazy val hasNull: Boolean = hset.contains(null) override def nullable: Boolean = child.nullable || hasNull protected override def nullSafeEval(value: Any): Any = { if (set.contains(value)) { true } else if (hasNull) { null } else { false } } @transient lazy val set: Set[Any] = child.dataType match { case _: AtomicType => hset case _: NullType => hset case _ => // for structs use interpreted ordering to be able to compare UnsafeRows with non-UnsafeRows TreeSet.empty(TypeUtils.getInterpretedOrdering(child.dataType)) ++ hset } override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { val setTerm = ctx.addReferenceObj("set", set) val childGen = child.genCode(ctx) val setIsNull = if (hasNull) { s"${ev.isNull} = !${ev.value};" } else { "" } ev.copy(code = s""" |${childGen.code} |${ctx.JAVA_BOOLEAN} ${ev.isNull} = ${childGen.isNull}; |${ctx.JAVA_BOOLEAN} ${ev.value} = false; |if (!${ev.isNull}) { | ${ev.value} = $setTerm.contains(${childGen.value}); | $setIsNull |} """.stripMargin) } override def sql: String = { val valueSQL = child.sql val listSQL = hset.toSeq.map(Literal(_).sql).mkString(", ") s"($valueSQL IN ($listSQL))" } } @ExpressionDescription( usage = "expr1 _FUNC_ expr2 - Logical AND.") case class And(left: Expression, right: Expression) extends BinaryOperator with Predicate { override def inputType: AbstractDataType = BooleanType override def symbol: String = "&&" override def sqlOperator: String = "AND" override def eval(input: InternalRow): Any = { val input1 = left.eval(input) if (input1 == false) { false } else { val input2 = right.eval(input) if (input2 == false) { false } else { if (input1 != null && input2 != null) { true } else { null } } } } override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { val eval1 = left.genCode(ctx) val eval2 = right.genCode(ctx) // The result should be `false`, if any of them is `false` whenever the other is null or not. if (!left.nullable && !right.nullable) { ev.copy(code = s""" ${eval1.code} boolean ${ev.value} = false; if (${eval1.value}) { ${eval2.code} ${ev.value} = ${eval2.value}; }""", isNull = "false") } else { ev.copy(code = s""" ${eval1.code} boolean ${ev.isNull} = false; boolean ${ev.value} = false; if (!${eval1.isNull} && !${eval1.value}) { } else { ${eval2.code} if (!${eval2.isNull} && !${eval2.value}) { } else if (!${eval1.isNull} && !${eval2.isNull}) { ${ev.value} = true; } else { ${ev.isNull} = true; } } """) } } } @ExpressionDescription( usage = "expr1 _FUNC_ expr2 - Logical OR.") case class Or(left: Expression, right: Expression) extends BinaryOperator with Predicate { override def inputType: AbstractDataType = BooleanType override def symbol: String = "||" override def sqlOperator: String = "OR" override def eval(input: InternalRow): Any = { val input1 = left.eval(input) if (input1 == true) { true } else { val input2 = right.eval(input) if (input2 == true) { true } else { if (input1 != null && input2 != null) { false } else { null } } } } override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { val eval1 = left.genCode(ctx) val eval2 = right.genCode(ctx) // The result should be `true`, if any of them is `true` whenever the other is null or not. if (!left.nullable && !right.nullable) { ev.isNull = "false" ev.copy(code = s""" ${eval1.code} boolean ${ev.value} = true; if (!${eval1.value}) { ${eval2.code} ${ev.value} = ${eval2.value}; }""", isNull = "false") } else { ev.copy(code = s""" ${eval1.code} boolean ${ev.isNull} = false; boolean ${ev.value} = true; if (!${eval1.isNull} && ${eval1.value}) { } else { ${eval2.code} if (!${eval2.isNull} && ${eval2.value}) { } else if (!${eval1.isNull} && !${eval2.isNull}) { ${ev.value} = false; } else { ${ev.isNull} = true; } } """) } } } abstract class BinaryComparison extends BinaryOperator with Predicate { // Note that we need to give a superset of allowable input types since orderable types are not // finitely enumerable. The allowable types are checked below by checkInputDataTypes. override def inputType: AbstractDataType = AnyDataType override def checkInputDataTypes(): TypeCheckResult = super.checkInputDataTypes() match { case TypeCheckResult.TypeCheckSuccess => TypeUtils.checkForOrderingExpr(left.dataType, this.getClass.getSimpleName) case failure => failure } override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { if (ctx.isPrimitiveType(left.dataType) && left.dataType != BooleanType // java boolean doesn't support > or < operator && left.dataType != FloatType && left.dataType != DoubleType) { // faster version defineCodeGen(ctx, ev, (c1, c2) => s"$c1 $symbol $c2") } else { defineCodeGen(ctx, ev, (c1, c2) => s"${ctx.genComp(left.dataType, c1, c2)} $symbol 0") } } protected lazy val ordering: Ordering[Any] = TypeUtils.getInterpretedOrdering(left.dataType) } object BinaryComparison { def unapply(e: BinaryComparison): Option[(Expression, Expression)] = Some((e.left, e.right)) } /** An extractor that matches both standard 3VL equality and null-safe equality. */ object Equality { def unapply(e: BinaryComparison): Option[(Expression, Expression)] = e match { case EqualTo(l, r) => Some((l, r)) case EqualNullSafe(l, r) => Some((l, r)) case _ => None } } // TODO: although map type is not orderable, technically map type should be able to be used // in equality comparison @ExpressionDescription( usage = "expr1 _FUNC_ expr2 - Returns true if `expr1` equals `expr2`, or false otherwise.", arguments = """ Arguments: * expr1, expr2 - the two expressions must be same type or can be casted to a common type, and must be a type that can be used in equality comparison. Map type is not supported. For complex types such array/struct, the data types of fields must be orderable. """, examples = """ Examples: > SELECT 2 _FUNC_ 2; true > SELECT 1 _FUNC_ '1'; true > SELECT true _FUNC_ NULL; NULL > SELECT NULL _FUNC_ NULL; NULL """) case class EqualTo(left: Expression, right: Expression) extends BinaryComparison with NullIntolerant { override def symbol: String = "=" protected override def nullSafeEval(left: Any, right: Any): Any = ordering.equiv(left, right) override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { defineCodeGen(ctx, ev, (c1, c2) => ctx.genEqual(left.dataType, c1, c2)) } } // TODO: although map type is not orderable, technically map type should be able to be used // in equality comparison @ExpressionDescription( usage = """ expr1 _FUNC_ expr2 - Returns same result as the EQUAL(=) operator for non-null operands, but returns true if both are null, false if one of the them is null. """, arguments = """ Arguments: * expr1, expr2 - the two expressions must be same type or can be casted to a common type, and must be a type that can be used in equality comparison. Map type is not supported. For complex types such array/struct, the data types of fields must be orderable. """, examples = """ Examples: > SELECT 2 _FUNC_ 2; true > SELECT 1 _FUNC_ '1'; true > SELECT true _FUNC_ NULL; false > SELECT NULL _FUNC_ NULL; true """) case class EqualNullSafe(left: Expression, right: Expression) extends BinaryComparison { override def symbol: String = "<=>" override def nullable: Boolean = false override def eval(input: InternalRow): Any = { val input1 = left.eval(input) val input2 = right.eval(input) if (input1 == null && input2 == null) { true } else if (input1 == null || input2 == null) { false } else { ordering.equiv(input1, input2) } } override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { val eval1 = left.genCode(ctx) val eval2 = right.genCode(ctx) val equalCode = ctx.genEqual(left.dataType, eval1.value, eval2.value) ev.copy(code = eval1.code + eval2.code + s""" boolean ${ev.value} = (${eval1.isNull} && ${eval2.isNull}) || (!${eval1.isNull} && !${eval2.isNull} && $equalCode);""", isNull = "false") } } @ExpressionDescription( usage = "expr1 _FUNC_ expr2 - Returns true if `expr1` is less than `expr2`.", arguments = """ Arguments: * expr1, expr2 - the two expressions must be same type or can be casted to a common type, and must be a type that can be ordered. For example, map type is not orderable, so it is not supported. For complex types such array/struct, the data types of fields must be orderable. """, examples = """ Examples: > SELECT 1 _FUNC_ 2; true > SELECT 1.1 _FUNC_ '1'; false > SELECT to_date('2009-07-30 04:17:52') _FUNC_ to_date('2009-07-30 04:17:52'); false > SELECT to_date('2009-07-30 04:17:52') _FUNC_ to_date('2009-08-01 04:17:52'); true > SELECT 1 _FUNC_ NULL; NULL """) case class LessThan(left: Expression, right: Expression) extends BinaryComparison with NullIntolerant { override def symbol: String = "<" protected override def nullSafeEval(input1: Any, input2: Any): Any = ordering.lt(input1, input2) } @ExpressionDescription( usage = "expr1 _FUNC_ expr2 - Returns true if `expr1` is less than or equal to `expr2`.", arguments = """ Arguments: * expr1, expr2 - the two expressions must be same type or can be casted to a common type, and must be a type that can be ordered. For example, map type is not orderable, so it is not supported. For complex types such array/struct, the data types of fields must be orderable. """, examples = """ Examples: > SELECT 2 _FUNC_ 2; true > SELECT 1.0 _FUNC_ '1'; true > SELECT to_date('2009-07-30 04:17:52') _FUNC_ to_date('2009-07-30 04:17:52'); true > SELECT to_date('2009-07-30 04:17:52') _FUNC_ to_date('2009-08-01 04:17:52'); true > SELECT 1 _FUNC_ NULL; NULL """) case class LessThanOrEqual(left: Expression, right: Expression) extends BinaryComparison with NullIntolerant { override def symbol: String = "<=" protected override def nullSafeEval(input1: Any, input2: Any): Any = ordering.lteq(input1, input2) } @ExpressionDescription( usage = "expr1 _FUNC_ expr2 - Returns true if `expr1` is greater than `expr2`.", arguments = """ Arguments: * expr1, expr2 - the two expressions must be same type or can be casted to a common type, and must be a type that can be ordered. For example, map type is not orderable, so it is not supported. For complex types such array/struct, the data types of fields must be orderable. """, examples = """ Examples: > SELECT 2 _FUNC_ 1; true > SELECT 2 _FUNC_ '1.1'; true > SELECT to_date('2009-07-30 04:17:52') _FUNC_ to_date('2009-07-30 04:17:52'); false > SELECT to_date('2009-07-30 04:17:52') _FUNC_ to_date('2009-08-01 04:17:52'); false > SELECT 1 _FUNC_ NULL; NULL """) case class GreaterThan(left: Expression, right: Expression) extends BinaryComparison with NullIntolerant { override def symbol: String = ">" protected override def nullSafeEval(input1: Any, input2: Any): Any = ordering.gt(input1, input2) } @ExpressionDescription( usage = "expr1 _FUNC_ expr2 - Returns true if `expr1` is greater than or equal to `expr2`.", arguments = """ Arguments: * expr1, expr2 - the two expressions must be same type or can be casted to a common type, and must be a type that can be ordered. For example, map type is not orderable, so it is not supported. For complex types such array/struct, the data types of fields must be orderable. """, examples = """ Examples: > SELECT 2 _FUNC_ 1; true > SELECT 2.0 _FUNC_ '2.1'; false > SELECT to_date('2009-07-30 04:17:52') _FUNC_ to_date('2009-07-30 04:17:52'); true > SELECT to_date('2009-07-30 04:17:52') _FUNC_ to_date('2009-08-01 04:17:52'); false > SELECT 1 _FUNC_ NULL; NULL """) case class GreaterThanOrEqual(left: Expression, right: Expression) extends BinaryComparison with NullIntolerant { override def symbol: String = ">=" protected override def nullSafeEval(input1: Any, input2: Any): Any = ordering.gteq(input1, input2) }
esi-mineset/spark
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala
Scala
apache-2.0
25,205
package com.wix.fax.interfax.sl import com.google.api.client.http.HttpRequestFactory import com.wix.fax.FaxErrorException import com.wix.fax.interfax.sl.model.StatusCode import com.wix.fax.model.{Fax, Status} import scala.concurrent.duration.Duration import scala.util.{Failure, Success, Try} object Endpoints { /** * Secure Lounge endpoint for outbound PCI. * @see <a href="https://www.interfax.net/en/dev/secure_lounge/reference/soap/endpoint">Service Endpoint</a> */ val production = "https://ws-sl.fax.tc/Outbound.asmx/" } object InterfaxslFax { val ID = "net.interfax.sl" } /** * InterFax SecureLounge client. * * HTML documents with Unicode characters must include a * <meta http-equiv="Content-Type" content="text/html; charset=utf-8"> * in their head section (InterFax ignores <meta charset="utf-8">). */ class InterfaxslFax(requestFactory: HttpRequestFactory, endpoint: String = Endpoints.production, connectTimeout: Option[Duration] = None, readTimeout: Option[Duration] = None, numberOfRetries: Int = 0, credentials: Credentials) extends Fax { private val interfaxsl = new InterfaxslClient( requestFactory = requestFactory, endpoint = endpoint, connectTimeout = connectTimeout, readTimeout = readTimeout, numberOfRetries = numberOfRetries, credentials = credentials ) override def getId: String = InterfaxslFax.ID override def send(to: String, html: String): Try[String] = { interfaxsl.sendCharFax(to, html) match { case Success(transactionId) => Success(transactionId.toString) case Failure(e) => Failure(new FaxErrorException(e.getMessage, e)) } } override def retrieveStatus(documentId: String): Try[String] = { interfaxsl.queryList(List(documentId.toLong)) match { case Success(faxItems) => Success(translateInterfaxStatusCode(faxItems.head.Status)) case Failure(e) => Failure(new FaxErrorException(e.getMessage, e)) } } override def retrieveStatuses(documentIds: Iterable[String]): Try[Map[String, String]] = { val transactionIds = documentIds.map { _.toLong }.toList interfaxsl.queryList(transactionIds) match { case Success(faxItems) => Success(faxItems.map { faxItem => faxItem.TransactionID.toString -> translateInterfaxStatusCode(faxItem.Status) }.toMap) case Failure(e) => Failure(new FaxErrorException(e.getMessage, e)) } } private def translateInterfaxStatusCode(statusCode: Int): String = { // @see http://www.interfax.net/en/dev/secure_lounge/reference/soap/statuscodes statusCode match { case StatusCode.ok => Status.sent case x if x < 0 => Status.pending case x if x > 0 => Status.failed } } }
wix/libfax
libfax-interfaxsl/src/main/scala/com/wix/fax/interfax/sl/InterfaxslFax.scala
Scala
apache-2.0
2,903
package org.helianto.ingress.controller import org.springframework.web.bind.annotation.{RequestMapping, RestController} /** * Lead controller. */ @RestController @RequestMapping(Array("/lead")) class LeadController { }
iservport/helianto-spring
src/main/scala/org/helianto/ingress/controller/LeadController.scala
Scala
apache-2.0
226
package sai.bytecode import bytecode._ import ea._ import org.apache.bcel.generic.{ConstantPoolGen, InstructionHandle, InstructionList, Type} import sai.bytecode.instruction.{EntryPoint, ExitPoint, Instruction} import sai.vm.Reference import vm.Frame class Method(bcelMethod: org.apache.bcel.classfile.Method, val cpg: ConstantPoolGen, val clazz: Clazz) { val isAbstract = bcelMethod.isAbstract val isNative = bcelMethod.isNative val isDefined = !isAbstract && !isNative && bcelMethod.getCode != null val isPublic = bcelMethod.isPublic val isInstanceMethod = !bcelMethod.isStatic def id = s"${clazz.name}:$name" private def body(bcelInstructions: List[InstructionHandle]) = for (bcelInstruction <- bcelInstructions) yield Instruction(bcelInstruction, cpg, this) private def decorate(body: List[Instruction]) = new EntryPoint(this) :: body ::: List(new ExitPoint(this)) val instructions: List[Instruction] = if (isDefined) decorate(body(new InstructionList(bcelMethod.getCode.getCode).getInstructionHandles.toList)) else Nil def exitPoint = instructions.last def entryPoint = instructions.head def firstInstruction = instructions(1) def lastInstruction = instructions(instructions.length - 2) lazy val controlFlowGraph: List[BasicBlock] = BasicBlocks(this) lazy val exceptionInfo = ExceptionInfo(this, bcelMethod.getCode.getExceptionTable.toList) def lookup(bcelInstruction: org.apache.bcel.generic.InstructionHandle): Instruction = lookup(_ encapsulates bcelInstruction) def lookup(bcelInstruction: org.apache.bcel.generic.Instruction): Instruction = lookup(_ encapsulates bcelInstruction) def lookup(pc: Int): Instruction = lookup(_.pc contains pc) def lookup(predicate: Instruction => Boolean): Instruction = instructions .find(predicate) .getOrElse(throw new RuntimeException("instruction not found")) def lineNumber(bcelInstruction: org.apache.bcel.generic.InstructionHandle): Int = { val pos = lookup(bcelInstruction).pc.get bcelMethod.getLineNumberTable.getSourceLine(pos) } private def argReferences(index: Int, bcelArgs: List[org.apache.bcel.generic.Type]): Map[Int, Reference] = if (bcelArgs == Nil) Map() else bcelArgs.head match { case basicType: org.apache.bcel.generic.BasicType => argReferences(index + basicType.getSize, bcelArgs.tail) case referenceType: org.apache.bcel.generic.ReferenceType => argReferences(index + 1, bcelArgs.tail) + (index -> Reference(referenceType,LocalReferenceNode(this, index))) } val inputReferences: Map[Int, Reference] = if (bcelMethod.isStatic) argReferences(0, bcelMethod.getArgumentTypes.toList) else argReferences(1, bcelMethod.getArgumentTypes.toList) + (0 -> Reference(clazz.classType, LocalReferenceNode(this, 0))) def argumentTypes: List[Type] = { if (bcelMethod.isStatic) bcelMethod.getArgumentTypes.toList else clazz.classType :: bcelMethod.getArgumentTypes.toList } def maxLocals: Int = bcelMethod.getCode.getMaxLocals def name: String = bcelMethod.getName override def toString: String = id def callGraph = CallGraph(this) lazy val nonRecursiveSummary = NonRecursiveSummaryInformation(this) lazy val summary = SummaryInformation(Frame(this), controlFlowGraph, _.successors, _.predecessors) def interpret { println(summary) print } def print { println("." + toString + " " + inputReferences) instructions.foreach(instruction => instruction.print) } override def equals(obj: scala.Any): Boolean = { obj match { case m: Method if m.id == id => true case _ => false } } override def hashCode(): Int = id.hashCode def signature = bcelMethod.getSignature }
oliverhaase/sai
src/sai/bytecode/Method.scala
Scala
mit
3,932
package com.twitter.finatra.json.tests import com.google.inject.spi.Message import com.google.inject.{ConfigurationException, Injector, Key} import com.twitter.finatra.json.FinatraObjectMapper import com.twitter.finatra.json.internal.caseclass.exceptions.JsonInjectException import com.twitter.finatra.json.tests.internal._ import com.twitter.inject.{Mockito, Test} import java.util import net.codingwell.scalaguice.typeLiteral class GuiceInjectableValuesFinatraObjectMapperTest extends Test with Mockito { val injector = mock[Injector] /* Class under test */ val mapper = FinatraObjectMapper.create(injector) override def afterEach() = { super.afterEach() reset(injector) } "@Inject value should work when field not sent in json" in { val keyString = Key.get(classOf[String]) injector.getInstance(keyString) returns "Foo" assert(parse[CaseClassInjectString]( """ { } """) == CaseClassInjectString("Foo")) } "@Inject Guice Option[String] into case class Option[String]" in { val key = Key.get(typeLiteral[Option[String]]) injector.getInstance(key) returns Some("Foo") assert(parse[CaseClassInjectOptionString]( """ { } """) == CaseClassInjectOptionString(Some("Foo"))) } "@Inject value should use default when field not sent in json" in { assert(parse[CaseClassInjectStringWithDefault]( """ { } """) == CaseClassInjectStringWithDefault("DefaultHello")) } "@Inject value should use default when field sent in json" in { assert(parse[CaseClassInjectStringWithDefault]( """ { "string": "123" } """) == CaseClassInjectStringWithDefault("DefaultHello")) } "@Inject value should use None assumed default when field sent in json" in { assert(parse[CaseClassInjectOptionString]( """ { "string": "123" } """) == CaseClassInjectOptionString(None)) } "@Inject value takes precedence over value in json" in { val keyString = Key.get(classOf[String]) injector.getInstance(keyString) returns "Foo" assert(parse[CaseClassInjectString](""" { "string": "123" }""") == CaseClassInjectString("Foo")) } "@Inject ConfigurationException" in { val keyString = Key.get(classOf[String]) injector.getInstance(keyString) throws new ConfigurationException(new util.LinkedList[Message]()) intercept[JsonInjectException] { parse[CaseClassInjectString](""" { "string": "123" }""") } } "Too many injectable annotations" in { intercept[AssertionError] { parse[CaseClassTooManyInjectableAnnotations]("""{}""") } } "Too many binding annotations" in { intercept[Exception] { parse[CaseClassTooManyBindingAnnotations]("""{}""") } } private def parse[T: Manifest](string: String): T = { mapper.parse[T](string) } }
syamantm/finatra
jackson/src/test/scala/com/twitter/finatra/json/tests/GuiceInjectableValuesFinatraObjectMapperTest.scala
Scala
apache-2.0
2,939
/* * Copyright 2001-2011 Artima, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.scalatest.suiteprop import org.scalatest._ import prop.Tables trait SuiteExamples extends Tables { type FixtureServices val suite: Suite with FixtureServices val fixtureSuite: fixture.Suite with FixtureServices val funSuite: FunSuite with FixtureServices val fixtureFunSuite: fixture.FunSuite with FixtureServices val funSpec: FunSpec with FixtureServices val nestedFunSpec: FunSpec with FixtureServices val deeplyNestedFunSpec: FunSpec with FixtureServices val fixtureFunSpec: fixture.FunSpec with FixtureServices val nestedFixtureFunSpec: fixture.FunSpec with FixtureServices val deeplyNestedFixtureFunSpec: fixture.FunSpec with FixtureServices val pathFunSpec: path.FunSpec with FixtureServices val nestedPathFunSpec: path.FunSpec with FixtureServices val deeplyNestedPathFunSpec: path.FunSpec with FixtureServices val wordSpec: WordSpec with FixtureServices val nestedWordSpec: WordSpec with FixtureServices val deeplyNestedWordSpec: WordSpec with FixtureServices val fixtureWordSpec: fixture.WordSpec with FixtureServices val nestedFixtureWordSpec: fixture.WordSpec with FixtureServices val deeplyNestedFixtureWordSpec: fixture.WordSpec with FixtureServices val nestedWordSpecWithMust: WordSpec with FixtureServices val deeplyNestedWordSpecWithMust: WordSpec with FixtureServices val nestedFixtureWordSpecWithMust: fixture.WordSpec with FixtureServices val deeplyNestedFixtureWordSpecWithMust: fixture.WordSpec with FixtureServices val nestedWordSpecWithCan: WordSpec with FixtureServices val deeplyNestedWordSpecWithCan: WordSpec with FixtureServices val nestedFixtureWordSpecWithCan: fixture.WordSpec with FixtureServices val deeplyNestedFixtureWordSpecWithCan: fixture.WordSpec with FixtureServices val flatSpec: FlatSpec with FixtureServices val subjectFlatSpec: FlatSpec with FixtureServices val shorthandSubjectFlatSpec: FlatSpec with FixtureServices val fixtureFlatSpec: fixture.FlatSpec with FixtureServices val subjectFixtureFlatSpec: fixture.FlatSpec with FixtureServices val shorthandSubjectFixtureFlatSpec: fixture.FlatSpec with FixtureServices val flatSpecWithMust: FlatSpec with FixtureServices val subjectFlatSpecWithMust: FlatSpec with FixtureServices val shorthandSubjectFlatSpecWithMust: FlatSpec with FixtureServices val fixtureFlatSpecWithMust: fixture.FlatSpec with FixtureServices val subjectFixtureFlatSpecWithMust: fixture.FlatSpec with FixtureServices val shorthandSubjectFixtureFlatSpecWithMust: fixture.FlatSpec with FixtureServices val flatSpecWithCan: FlatSpec with FixtureServices val subjectFlatSpecWithCan: FlatSpec with FixtureServices val shorthandSubjectFlatSpecWithCan: FlatSpec with FixtureServices val fixtureFlatSpecWithCan: fixture.FlatSpec with FixtureServices val subjectFixtureFlatSpecWithCan: fixture.FlatSpec with FixtureServices val shorthandSubjectFixtureFlatSpecWithCan: fixture.FlatSpec with FixtureServices val freeSpec: FreeSpec with FixtureServices val nestedFreeSpec: FreeSpec with FixtureServices val deeplyNestedFreeSpec: FreeSpec with FixtureServices val fixtureFreeSpec: fixture.FreeSpec with FixtureServices val nestedFixtureFreeSpec: fixture.FreeSpec with FixtureServices val deeplyNestedFixtureFreeSpec: fixture.FreeSpec with FixtureServices val pathFreeSpec: path.FreeSpec with FixtureServices val nestedPathFreeSpec: path.FreeSpec with FixtureServices val deeplyNestedPathFreeSpec: path.FreeSpec with FixtureServices val featureSpec: FeatureSpec with FixtureServices val nestedFeatureSpec: FeatureSpec with FixtureServices val fixtureFeatureSpec: fixture.FeatureSpec with FixtureServices val nestedFixtureFeatureSpec: fixture.FeatureSpec with FixtureServices val propSpec: PropSpec with FixtureServices val fixturePropSpec: fixture.PropSpec with FixtureServices def examples = Table( "suite", suite, fixtureSuite, funSuite, fixtureFunSuite, funSpec, nestedFunSpec, deeplyNestedFunSpec, fixtureFunSpec, nestedFixtureFunSpec, deeplyNestedFixtureFunSpec, pathFunSpec, nestedPathFunSpec, deeplyNestedPathFunSpec, wordSpec, nestedWordSpec, deeplyNestedWordSpec, fixtureWordSpec, nestedFixtureWordSpec, deeplyNestedFixtureWordSpec, nestedWordSpecWithMust, deeplyNestedWordSpecWithMust, nestedFixtureWordSpecWithMust, deeplyNestedFixtureWordSpecWithMust, nestedWordSpecWithCan, deeplyNestedWordSpecWithCan, nestedFixtureWordSpecWithCan, deeplyNestedFixtureWordSpecWithCan, flatSpec, subjectFlatSpec, shorthandSubjectFlatSpec, fixtureFlatSpec, subjectFixtureFlatSpec, shorthandSubjectFixtureFlatSpec, flatSpecWithMust, subjectFlatSpecWithMust, shorthandSubjectFlatSpecWithMust, fixtureFlatSpecWithMust, subjectFixtureFlatSpecWithMust, shorthandSubjectFixtureFlatSpecWithMust, flatSpecWithCan, subjectFlatSpecWithCan, shorthandSubjectFlatSpecWithCan, fixtureFlatSpecWithCan, subjectFixtureFlatSpecWithCan, shorthandSubjectFixtureFlatSpecWithCan, freeSpec, nestedFreeSpec, deeplyNestedFreeSpec, fixtureFreeSpec, nestedFixtureFreeSpec, deeplyNestedFixtureFreeSpec, pathFreeSpec, nestedPathFreeSpec, deeplyNestedPathFreeSpec, featureSpec, nestedFeatureSpec, fixtureFeatureSpec, nestedFixtureFeatureSpec, propSpec, fixturePropSpec ) }
hubertp/scalatest
src/test/scala/org/scalatest/suiteprop/SuiteExamples.scala
Scala
apache-2.0
6,240
import scala.reflect.runtime.universe._ import scala.reflect.ClassManifest object Test extends App { def typeTagIsnotClassManifest[T: TypeTag] = { println(implicitly[ClassManifest[T]]) } typeTagIsnotClassManifest[Int] typeTagIsnotClassManifest[String] typeTagIsnotClassManifest[Array[Int]] }
scala/scala
test/files/neg/interop_typetags_arenot_classmanifests.scala
Scala
apache-2.0
308
/* Copyright (c) 2016, Rice University Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. Neither the name of Rice University nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.apache.spark.rdd.cl.tests import java.util.LinkedList import com.amd.aparapi.internal.writer.ScalaArrayParameter import com.amd.aparapi.internal.model.Tuple2ClassModel import com.amd.aparapi.internal.model.ClassModel import com.amd.aparapi.internal.model.HardCodedClassModels import com.amd.aparapi.internal.model.SparseVectorClassModel import org.apache.spark.rdd.cl.SyncCodeGenTest import org.apache.spark.rdd.cl.CodeGenTest import org.apache.spark.rdd.cl.CodeGenTests import org.apache.spark.rdd.cl.CodeGenUtil import org.apache.spark.mllib.linalg.SparseVector import org.apache.spark.rdd.cl.SparseVectorInputBufferWrapperConfig object SparseVectorInputTest extends SyncCodeGenTest[SparseVector, (Int, Double)] { def getExpectedException() : String = { return null } def getExpectedKernel() : String = { getExpectedKernelHelper(getClass) } def getExpectedNumInputs : Int = { 1 } def init() : HardCodedClassModels = { val models = new HardCodedClassModels() val sparseVectorModel : SparseVectorClassModel = SparseVectorClassModel.create() models.addClassModelFor(classOf[SparseVector], sparseVectorModel) val outputClassType1Name = CodeGenUtil.cleanClassName("I") val outputClassType2Name = CodeGenUtil.cleanClassName("D") val tuple2ClassModel : Tuple2ClassModel = Tuple2ClassModel.create( outputClassType1Name, outputClassType2Name, true) models.addClassModelFor(classOf[Tuple2[_, _]], tuple2ClassModel) models } def complete(params : LinkedList[ScalaArrayParameter]) { params.get(1).addTypeParameter("I", false) params.get(1).addTypeParameter("D", false) } def getFunction() : Function1[SparseVector, (Int, Double)] = { new Function[SparseVector, (Int, Double)] { override def apply(in : SparseVector) : (Int, Double) = { var indexSum = 0 var valueSum = 0.0 var i = 0 while (i < in.size) { indexSum += in.indices(i) valueSum += in.values(i) i += 1 } (indexSum, valueSum) } } } }
agrippa/spark-swat
swat/src/test/scala/org/apache/spark/rdd/cl/tests/SparseVectorInputTest.scala
Scala
bsd-3-clause
3,599
package cc.emberwalker.artemis import org.apache.logging.log4j.LogManager import net.minecraftforge.fml.common.event.FMLPreInitializationEvent import net.minecraftforge.fml.common.Mod import net.minecraftforge.fml.common.Mod.EventHandler import cc.emberwalker.artemis.lib.Config import cc.emberwalker.artemis.compat.CompatController import cc.emberwalker.artemis.util.{ModMapper, ExitLogThread} /** * The stdout-hunter mod. * * @author Arkan <[email protected]> */ @Mod(modid = "Artemis", name = "Artemis", version = "${version}", modLanguage = "scala", dependencies = "before:*", acceptableRemoteVersions="*") object Artemis { val logger = LogManager.getLogger("Artemis/Core") val outLogger = LogManager.getLogger("Artemis/STDOUT") val errLogger = LogManager.getLogger("Artemis/STDERR") @EventHandler def preInit(evt:FMLPreInitializationEvent) { logger.info("Artemis ${version} loading.") logger.info("Loading configuration.") Config.loadConfig(evt.getSuggestedConfigurationFile) logger.info("Inserting TracingPrintStream.") System.setOut(new TracingPrintStream(outLogger, System.out)) System.setErr(new TracingPrintStream(errLogger, System.err)) logger.info("Initialising plugins.") CompatController.loadCompatMods() if (Config.createBlamefile) { logger.info("Injecting JVM shutdown hook thread for blamefile.") Runtime.getRuntime.addShutdownHook(new ExitLogThread) } if (Config.mapModIds) ModMapper.init() logger.info("Setup completed.") } }
Emberwalker/Artemis
src/main/scala/cc/emberwalker/artemis/Artemis.scala
Scala
mit
1,532
package be.objectify.batch.concurrent import akka.actor._ import scala.concurrent.Future /** * * @param master */ abstract class Consumer(master: ActorSelection) extends Actor with ActorLogging { import Protocol._ import context._ def doWork(eventListener: ActorRef, key: Any, work: Any): Future[WorkComplete] def onCustomMessage(message: Any) = unhandled(message) override def preStart() = master ! ConsumerCreated(self) def active: Receive = { case WorkComplete(key, result, successful) => log.debug("[Active consumer] Work complete, informing master and becoming idle") master ! WorkIsDone(key, self, successful) master ! ConsumerRequestsWork(self) context.become(idle) case WorkIsReady => log.error("[Active consumer] Work is ready, but I'm already working. Ignoring request.") case WorkToBeDone(_, _) => log.error("[Active consumer] I 've been given work, but I 'm already busy.This is not good.") case NoWorkToBeDone => log.debug("[Active consumer] No work to be done. Ignoring request.") case msg => log.debug("[Active consumer] Received a custom message [{}]", msg) onCustomMessage(msg) } def idle: Receive = { case WorkIsReady => log.debug("[Idle consumer] Work is ready, requesting from master") master ! ConsumerRequestsWork(self) case WorkToBeDone(key, work) => log.debug("[Idle consumer - {}] Got work [{}], becoming active", key, work) context.become(active) import akka.pattern.pipe doWork(sender(), key, work) pipeTo self case NoWorkToBeDone => log.debug("[Idle consumer] Requested work, but none available") case msg => log.debug("[Idle consumer] Received a custom message [{}]", msg) onCustomMessage(msg) } def receive = idle }
schaloner/akka-batch
src/main/scala/be/objectify/batch/concurrent/Consumer.scala
Scala
apache-2.0
1,824
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.metrics.sink import java.util.Properties import java.util.concurrent.TimeUnit import javax.servlet.http.HttpServletRequest import com.codahale.metrics.MetricRegistry import com.codahale.metrics.json.MetricsModule import com.fasterxml.jackson.databind.ObjectMapper import org.eclipse.jetty.servlet.ServletContextHandler import org.apache.spark.SparkConf import org.apache.spark.ui.JettyUtils._ private[spark] class MetricsServlet( val property: Properties, val registry: MetricRegistry) extends Sink { val SERVLET_KEY_PATH = "path" val SERVLET_KEY_SAMPLE = "sample" val SERVLET_DEFAULT_SAMPLE = false val servletPath = property.getProperty(SERVLET_KEY_PATH) val servletShowSample = Option(property.getProperty(SERVLET_KEY_SAMPLE)).map(_.toBoolean) .getOrElse(SERVLET_DEFAULT_SAMPLE) val mapper = new ObjectMapper().registerModule( new MetricsModule(TimeUnit.SECONDS, TimeUnit.MILLISECONDS, servletShowSample)) def getHandlers(conf: SparkConf): Array[ServletContextHandler] = { Array[ServletContextHandler]( createServletHandler(servletPath, new ServletParams(request => getMetricsSnapshot(request), "text/json"), conf) ) } def getMetricsSnapshot(request: HttpServletRequest): String = { mapper.writeValueAsString(registry) } override def start(): Unit = { } override def stop(): Unit = { } override def report(): Unit = { } }
hvanhovell/spark
core/src/main/scala/org/apache/spark/metrics/sink/MetricsServlet.scala
Scala
apache-2.0
2,230
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.openwhisk.core.invoker import org.apache.openwhisk.common.TransactionId import org.apache.openwhisk.core.containerpool.Container import org.apache.openwhisk.core.containerpool.logging.LogStore import org.apache.openwhisk.core.entity.{ActivationLogs, ExecutableWhiskAction, Identity, WhiskActivation} import org.apache.openwhisk.core.invoker.Invoker.LogsCollector import scala.concurrent.Future class LogStoreCollector(store: LogStore) extends LogsCollector { override def logsToBeCollected(action: ExecutableWhiskAction): Boolean = super.logsToBeCollected(action) && !store.logCollectionOutOfBand override def apply(transid: TransactionId, user: Identity, activation: WhiskActivation, container: Container, action: ExecutableWhiskAction): Future[ActivationLogs] = store.collectLogs(transid, user, activation, container, action) }
jeremiaswerner/openwhisk
core/invoker/src/main/scala/org/apache/openwhisk/core/invoker/LogStoreCollector.scala
Scala
apache-2.0
1,753
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.catalyst.plans.logical import org.apache.spark.sql.catalyst.analysis.{NamedRelation, UnresolvedException} import org.apache.spark.sql.catalyst.catalog.CatalogTypes.TablePartitionSpec import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeReference, Expression, Unevaluable} import org.apache.spark.sql.catalyst.plans.DescribeTableSchema import org.apache.spark.sql.connector.catalog._ import org.apache.spark.sql.connector.catalog.TableChange.{AddColumn, ColumnChange} import org.apache.spark.sql.connector.expressions.Transform import org.apache.spark.sql.types.{DataType, MetadataBuilder, StringType, StructType} /** * Base trait for DataSourceV2 write commands */ trait V2WriteCommand extends Command { def table: NamedRelation def query: LogicalPlan override def children: Seq[LogicalPlan] = Seq(query) override lazy val resolved: Boolean = outputResolved def outputResolved: Boolean = { // If the table doesn't require schema match, we don't need to resolve the output columns. table.skipSchemaResolution || { table.resolved && query.resolved && query.output.size == table.output.size && query.output.zip(table.output).forall { case (inAttr, outAttr) => // names and types must match, nullability must be compatible inAttr.name == outAttr.name && DataType.equalsIgnoreCompatibleNullability(outAttr.dataType, inAttr.dataType) && (outAttr.nullable || !inAttr.nullable) } } } } /** * Append data to an existing table. */ case class AppendData( table: NamedRelation, query: LogicalPlan, writeOptions: Map[String, String], isByName: Boolean) extends V2WriteCommand object AppendData { def byName( table: NamedRelation, df: LogicalPlan, writeOptions: Map[String, String] = Map.empty): AppendData = { new AppendData(table, df, writeOptions, isByName = true) } def byPosition( table: NamedRelation, query: LogicalPlan, writeOptions: Map[String, String] = Map.empty): AppendData = { new AppendData(table, query, writeOptions, isByName = false) } } /** * Overwrite data matching a filter in an existing table. */ case class OverwriteByExpression( table: NamedRelation, deleteExpr: Expression, query: LogicalPlan, writeOptions: Map[String, String], isByName: Boolean) extends V2WriteCommand { override lazy val resolved: Boolean = outputResolved && deleteExpr.resolved } object OverwriteByExpression { def byName( table: NamedRelation, df: LogicalPlan, deleteExpr: Expression, writeOptions: Map[String, String] = Map.empty): OverwriteByExpression = { OverwriteByExpression(table, deleteExpr, df, writeOptions, isByName = true) } def byPosition( table: NamedRelation, query: LogicalPlan, deleteExpr: Expression, writeOptions: Map[String, String] = Map.empty): OverwriteByExpression = { OverwriteByExpression(table, deleteExpr, query, writeOptions, isByName = false) } } /** * Dynamically overwrite partitions in an existing table. */ case class OverwritePartitionsDynamic( table: NamedRelation, query: LogicalPlan, writeOptions: Map[String, String], isByName: Boolean) extends V2WriteCommand object OverwritePartitionsDynamic { def byName( table: NamedRelation, df: LogicalPlan, writeOptions: Map[String, String] = Map.empty): OverwritePartitionsDynamic = { OverwritePartitionsDynamic(table, df, writeOptions, isByName = true) } def byPosition( table: NamedRelation, query: LogicalPlan, writeOptions: Map[String, String] = Map.empty): OverwritePartitionsDynamic = { OverwritePartitionsDynamic(table, query, writeOptions, isByName = false) } } /** A trait used for logical plan nodes that create or replace V2 table definitions. */ trait V2CreateTablePlan extends LogicalPlan { def tableName: Identifier def partitioning: Seq[Transform] def tableSchema: StructType /** * Creates a copy of this node with the new partitioning transforms. This method is used to * rewrite the partition transforms normalized according to the table schema. */ def withPartitioning(rewritten: Seq[Transform]): V2CreateTablePlan } /** * Create a new table with a v2 catalog. */ case class CreateV2Table( catalog: TableCatalog, tableName: Identifier, tableSchema: StructType, partitioning: Seq[Transform], properties: Map[String, String], ignoreIfExists: Boolean) extends Command with V2CreateTablePlan { override def withPartitioning(rewritten: Seq[Transform]): V2CreateTablePlan = { this.copy(partitioning = rewritten) } } /** * Create a new table from a select query with a v2 catalog. */ case class CreateTableAsSelect( catalog: TableCatalog, tableName: Identifier, partitioning: Seq[Transform], query: LogicalPlan, properties: Map[String, String], writeOptions: Map[String, String], ignoreIfExists: Boolean) extends Command with V2CreateTablePlan { override def tableSchema: StructType = query.schema override def children: Seq[LogicalPlan] = Seq(query) override lazy val resolved: Boolean = childrenResolved && { // the table schema is created from the query schema, so the only resolution needed is to check // that the columns referenced by the table's partitioning exist in the query schema val references = partitioning.flatMap(_.references).toSet references.map(_.fieldNames).forall(query.schema.findNestedField(_).isDefined) } override def withPartitioning(rewritten: Seq[Transform]): V2CreateTablePlan = { this.copy(partitioning = rewritten) } } /** * Replace a table with a v2 catalog. * * If the table does not exist, and orCreate is true, then it will be created. * If the table does not exist, and orCreate is false, then an exception will be thrown. * * The persisted table will have no contents as a result of this operation. */ case class ReplaceTable( catalog: TableCatalog, tableName: Identifier, tableSchema: StructType, partitioning: Seq[Transform], properties: Map[String, String], orCreate: Boolean) extends Command with V2CreateTablePlan { override def withPartitioning(rewritten: Seq[Transform]): V2CreateTablePlan = { this.copy(partitioning = rewritten) } } /** * Replaces a table from a select query with a v2 catalog. * * If the table does not exist, and orCreate is true, then it will be created. * If the table does not exist, and orCreate is false, then an exception will be thrown. */ case class ReplaceTableAsSelect( catalog: TableCatalog, tableName: Identifier, partitioning: Seq[Transform], query: LogicalPlan, properties: Map[String, String], writeOptions: Map[String, String], orCreate: Boolean) extends Command with V2CreateTablePlan { override def tableSchema: StructType = query.schema override def children: Seq[LogicalPlan] = Seq(query) override lazy val resolved: Boolean = childrenResolved && { // the table schema is created from the query schema, so the only resolution needed is to check // that the columns referenced by the table's partitioning exist in the query schema val references = partitioning.flatMap(_.references).toSet references.map(_.fieldNames).forall(query.schema.findNestedField(_).isDefined) } override def withPartitioning(rewritten: Seq[Transform]): V2CreateTablePlan = { this.copy(partitioning = rewritten) } } /** * The logical plan of the CREATE NAMESPACE command that works for v2 catalogs. */ case class CreateNamespace( catalog: SupportsNamespaces, namespace: Seq[String], ifNotExists: Boolean, properties: Map[String, String]) extends Command /** * The logical plan of the DROP NAMESPACE command that works for v2 catalogs. */ case class DropNamespace( namespace: LogicalPlan, ifExists: Boolean, cascade: Boolean) extends Command { override def children: Seq[LogicalPlan] = Seq(namespace) } /** * The logical plan of the DESCRIBE NAMESPACE command that works for v2 catalogs. */ case class DescribeNamespace( namespace: LogicalPlan, extended: Boolean) extends Command { override def children: Seq[LogicalPlan] = Seq(namespace) override def output: Seq[Attribute] = Seq( AttributeReference("name", StringType, nullable = false, new MetadataBuilder().putString("comment", "name of the column").build())(), AttributeReference("value", StringType, nullable = true, new MetadataBuilder().putString("comment", "value of the column").build())()) } /** * The logical plan of the ALTER (DATABASE|SCHEMA|NAMESPACE) ... SET (DBPROPERTIES|PROPERTIES) * command that works for v2 catalogs. */ case class AlterNamespaceSetProperties( namespace: LogicalPlan, properties: Map[String, String]) extends Command { override def children: Seq[LogicalPlan] = Seq(namespace) } /** * The logical plan of the ALTER (DATABASE|SCHEMA|NAMESPACE) ... SET LOCATION * command that works for v2 catalogs. */ case class AlterNamespaceSetLocation( namespace: LogicalPlan, location: String) extends Command { override def children: Seq[LogicalPlan] = Seq(namespace) } /** * The logical plan of the SHOW NAMESPACES command that works for v2 catalogs. */ case class ShowNamespaces( namespace: LogicalPlan, pattern: Option[String]) extends Command { override def children: Seq[LogicalPlan] = Seq(namespace) override val output: Seq[Attribute] = Seq( AttributeReference("namespace", StringType, nullable = false)()) } /** * The logical plan of the DESCRIBE relation_name command that works for v2 tables. */ case class DescribeRelation( relation: LogicalPlan, partitionSpec: TablePartitionSpec, isExtended: Boolean) extends Command { override def children: Seq[LogicalPlan] = Seq(relation) override def output: Seq[Attribute] = DescribeTableSchema.describeTableAttributes() } /** * The logical plan of the DELETE FROM command that works for v2 tables. */ case class DeleteFromTable( table: LogicalPlan, condition: Option[Expression]) extends Command with SupportsSubquery { override def children: Seq[LogicalPlan] = table :: Nil } /** * The logical plan of the UPDATE TABLE command that works for v2 tables. */ case class UpdateTable( table: LogicalPlan, assignments: Seq[Assignment], condition: Option[Expression]) extends Command with SupportsSubquery { override def children: Seq[LogicalPlan] = table :: Nil } /** * The logical plan of the MERGE INTO command that works for v2 tables. */ case class MergeIntoTable( targetTable: LogicalPlan, sourceTable: LogicalPlan, mergeCondition: Expression, matchedActions: Seq[MergeAction], notMatchedActions: Seq[MergeAction]) extends Command with SupportsSubquery { override def children: Seq[LogicalPlan] = Seq(targetTable, sourceTable) } sealed abstract class MergeAction extends Expression with Unevaluable { def condition: Option[Expression] override def foldable: Boolean = false override def nullable: Boolean = false override def dataType: DataType = throw new UnresolvedException(this, "nullable") override def children: Seq[Expression] = condition.toSeq } case class DeleteAction(condition: Option[Expression]) extends MergeAction case class UpdateAction( condition: Option[Expression], assignments: Seq[Assignment]) extends MergeAction { override def children: Seq[Expression] = condition.toSeq ++ assignments } case class InsertAction( condition: Option[Expression], assignments: Seq[Assignment]) extends MergeAction { override def children: Seq[Expression] = condition.toSeq ++ assignments } case class Assignment(key: Expression, value: Expression) extends Expression with Unevaluable { override def foldable: Boolean = false override def nullable: Boolean = false override def dataType: DataType = throw new UnresolvedException(this, "nullable") override def children: Seq[Expression] = key :: value :: Nil } /** * The logical plan of the DROP TABLE command that works for v2 tables. */ case class DropTable( catalog: TableCatalog, ident: Identifier, ifExists: Boolean) extends Command /** * The logical plan of the ALTER TABLE command that works for v2 tables. */ case class AlterTable( catalog: TableCatalog, ident: Identifier, table: NamedRelation, changes: Seq[TableChange]) extends Command { override lazy val resolved: Boolean = table.resolved && { changes.forall { case add: AddColumn => add.fieldNames match { case Array(_) => // a top-level field can always be added true case _ => // the parent field must exist table.schema.findNestedField(add.fieldNames.init, includeCollections = true).isDefined } case colChange: ColumnChange => // the column that will be changed must exist table.schema.findNestedField(colChange.fieldNames, includeCollections = true).isDefined case _ => // property changes require no resolution checks true } } } /** * The logical plan of the ALTER TABLE RENAME command that works for v2 tables. */ case class RenameTable( catalog: TableCatalog, oldIdent: Identifier, newIdent: Identifier) extends Command /** * The logical plan of the SHOW TABLE command that works for v2 catalogs. */ case class ShowTables( namespace: LogicalPlan, pattern: Option[String]) extends Command { override def children: Seq[LogicalPlan] = Seq(namespace) override val output: Seq[Attribute] = Seq( AttributeReference("namespace", StringType, nullable = false)(), AttributeReference("tableName", StringType, nullable = false)()) } /** * The logical plan of the SHOW VIEWS command that works for v1 and v2 catalogs. * * Notes: v2 catalogs do not support views API yet, the command will fallback to * v1 ShowViewsCommand during ResolveSessionCatalog. */ case class ShowViews( namespace: LogicalPlan, pattern: Option[String]) extends Command { override def children: Seq[LogicalPlan] = Seq(namespace) override val output: Seq[Attribute] = Seq( AttributeReference("namespace", StringType, nullable = false)(), AttributeReference("viewName", StringType, nullable = false)()) } /** * The logical plan of the USE/USE NAMESPACE command that works for v2 catalogs. */ case class SetCatalogAndNamespace( catalogManager: CatalogManager, catalogName: Option[String], namespace: Option[Seq[String]]) extends Command /** * The logical plan of the REFRESH TABLE command that works for v2 catalogs. */ case class RefreshTable( catalog: TableCatalog, ident: Identifier) extends Command /** * The logical plan of the SHOW CURRENT NAMESPACE command that works for v2 catalogs. */ case class ShowCurrentNamespace(catalogManager: CatalogManager) extends Command { override val output: Seq[Attribute] = Seq( AttributeReference("catalog", StringType, nullable = false)(), AttributeReference("namespace", StringType, nullable = false)()) } /** * The logical plan of the SHOW TBLPROPERTIES command that works for v2 catalogs. */ case class ShowTableProperties( table: LogicalPlan, propertyKey: Option[String]) extends Command { override def children: Seq[LogicalPlan] = table :: Nil override val output: Seq[Attribute] = Seq( AttributeReference("key", StringType, nullable = false)(), AttributeReference("value", StringType, nullable = false)()) } /** * The logical plan that defines or changes the comment of an NAMESPACE for v2 catalogs. * * {{{ * COMMENT ON (DATABASE|SCHEMA|NAMESPACE) namespaceIdentifier IS ('text' | NULL) * }}} * * where the `text` is the new comment written as a string literal; or `NULL` to drop the comment. * */ case class CommentOnNamespace(child: LogicalPlan, comment: String) extends Command { override def children: Seq[LogicalPlan] = child :: Nil } /** * The logical plan that defines or changes the comment of an TABLE for v2 catalogs. * * {{{ * COMMENT ON TABLE tableIdentifier IS ('text' | NULL) * }}} * * where the `text` is the new comment written as a string literal; or `NULL` to drop the comment. * */ case class CommentOnTable(child: LogicalPlan, comment: String) extends Command { override def children: Seq[LogicalPlan] = child :: Nil } /** * The logical plan of the REFRESH FUNCTION command that works for v2 catalogs. */ case class RefreshFunction(child: LogicalPlan) extends Command { override def children: Seq[LogicalPlan] = child :: Nil } /** * The logical plan of the DESCRIBE FUNCTION command that works for v2 catalogs. */ case class DescribeFunction(child: LogicalPlan, isExtended: Boolean) extends Command { override def children: Seq[LogicalPlan] = child :: Nil } /** * The logical plan of the DROP FUNCTION command that works for v2 catalogs. */ case class DropFunction( child: LogicalPlan, ifExists: Boolean, isTemp: Boolean) extends Command { override def children: Seq[LogicalPlan] = child :: Nil } /** * The logical plan of the SHOW FUNCTIONS command that works for v2 catalogs. */ case class ShowFunctions( child: Option[LogicalPlan], userScope: Boolean, systemScope: Boolean, pattern: Option[String]) extends Command { override def children: Seq[LogicalPlan] = child.toSeq }
dbtsai/spark
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/v2Commands.scala
Scala
apache-2.0
18,343
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.lewuathe.dllib import breeze.linalg.Vector private[dllib] case class Instance(labelVector: Vector[Double], weight: Double, features: Vector[Double]) { // TODO: Multi vector blob instance val blob: Blob[Double] = new Blob(Array(features)) val label: Blob[Double] = new Blob(Array(labelVector)) }
Lewuathe/neurallib
src/main/scala/com/lewuathe/dllib/Instance.scala
Scala
mit
1,196
package de.mineformers.core.client.ui.view.inventory import de.mineformers.core.client.ui.view.container.Panel import de.mineformers.core.client.util.RenderUtils import de.mineformers.core.inventory.Inventory import de.mineformers.core.util.math.shape2d.Point /** * PlayerInventory * * @author PaleoCrafter */ class PlayerInventory extends Panel { val inventory = Inventory.enhance(RenderUtils.mc.thePlayer.inventory) for (i <- 0 until 9) { val slot = ItemSlot.createContainer(inventory(i)) slot.position = Point(i * 18, 0) add(slot) } }
MineFormers/MFCore
src/main/scala/de/mineformers/core/client/ui/view/inventory/PlayerInventory.scala
Scala
mit
562
/** * Copyright 2017 Alessandro Simi * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.exemplary.aws import java.net.URI import com.amazonaws.ClientConfiguration import com.amazonaws.auth.BasicAWSCredentials import com.amazonaws.internal.StaticCredentialsProvider import scala.collection.JavaConverters._ import scala.concurrent.ExecutionContext.Implicits._ class AmazonDynamoDBNioGzipTest extends AbstractTest with DynamoDBOperations { feature("Table") { scenario("creation") { val client = createClient var tables = resultOf(client.listTables) tables.getTableNames should be (empty) val creationResult = createTable(name = "tableName") val tableDescription = creationResult.getTableDescription tableDescription should not be null tables = resultOf(client.listTables) tables.getTableNames should have size 1 tables.getTableNames.asScala.head should be ("tableName") client.shutdown() } } val clientConfigWithGzip = { val config = new ClientConfiguration() config.setUseGzip(true) config } def createClient = new AmazonDynamoDBNioClient( endpoint = URI.create(server.getEndpoint), awsCredentialsProvider = new StaticCredentialsProvider(new BasicAWSCredentials("accessKey", "secretKey")), config = clientConfigWithGzip )(global) }
alessandrosimi/aws-dynamodb-nio
dynamodb/src/test/scala/io/exemplary/aws/AmazonDynamoDBNioGzipTest.scala
Scala
apache-2.0
1,881
// Copyright: 2010 - 2018 https://github.com/ensime/ensime-server/graphs // License: http://www.gnu.org/licenses/gpl-3.0.en.html package org.ensime.core import akka.actor._ import java.io.{ File, IOException } import java.util.jar.JarFile import org.ensime.api._ import org.ensime.config.richconfig._ import org.ensime.util.io._ class DocResolver( prefix: String, forceJavaVersion: Option[String] // for testing )( implicit config: EnsimeConfig ) extends Actor with ActorLogging with DocUsecaseHandling with DocResolverBackCompat { var htmlToJar = Map.empty[String, File] var jarNameToJar = Map.empty[String, File] var docTypes = Map.empty[String, DocType] sealed trait DocType case object Javadoc extends DocType case object Javadoc8 extends DocType case object Scaladoc extends DocType // In javadoc docs, index.html has a comment that reads 'Generated by javadoc' private val JavadocComment = """Generated by javadoc (?:\(([0-9\.]+))?""".r.unanchored override def preStart(): Unit = // On initialisation, do a fast scan (< 1s for 50 jars) to determine // the package contents of each jar, and whether it's a javadoc or // scaladoc. for (jarFile <- config.allDocJars if jarFile.exists()) { try { val jar = new JarFile(jarFile) val jarFileName = jarFile.getName jarNameToJar += jarFileName -> jarFile docTypes += (jarFileName -> Scaladoc) val enumEntries = jar.entries() while (enumEntries.hasMoreElements) { val entry = enumEntries.nextElement() if (!entry.isDirectory) { val f = new File(entry.getName) val dir = f.getParent if (dir != null) { htmlToJar += entry.getName -> jarFile } // Check for javadocs if (entry.getName == "index.html") { val bytes = jar.getInputStream(entry).toByteArray new String(bytes) match { case JavadocComment(version: String) if version.startsWith("1.8") => docTypes += jarFileName -> Javadoc8 case JavadocComment(_*) => docTypes += jarFileName -> Javadoc case _ => } } } } } catch { case e: IOException => // continue regardless log.error(e, "Failed to process doc jar: " + jarFile.getName) } } private def javaFqnToPath(fqn: DocFqn): String = if (fqn.typeName == "package") { fqn.pack.replace(".", "/") + "/package-summary.html" } else { fqn.pack.replace(".", "/") + "/" + fqn.typeName + ".html" } private def makeLocalUri(jar: File, sig: DocSigPair): String = { val jarName = jar.getName val docType = docTypes(jarName) val java = docType == Javadoc || docType == Javadoc8 if (java) { val path = javaFqnToPath(sig.java.fqn) val anchor = sig.java.member.map { s => "#" + { if (docType == Javadoc8) toJava8Anchor(s) else s } }.getOrElse("") s"$prefix/$jarName/$path$anchor" } else { val scalaSig = maybeReplaceWithUsecase(jar, sig.scala) scalaSigToLocalUri(prefix, jarName, scalaSig) } } private def guessJar(sig: DocSigPair): Option[(File, DocSigPair)] = { val scalafqn = scalaFqnToPath(sig.scala.fqn) val javafqn = javaFqnToPath(sig.java.fqn) val scala = htmlToJar.get(scalafqn).map((_, sig)) val scala2 = scala.orElse( htmlToJar .get(scalafqn.replace("$.html", ".html")) .map({ file => // Documentation for Object doesn't exists but documentation for Class does val typeName = sig.scala.fqn.typeName.replaceFirst("\\$$", "") val sigOfClass = sig.copy( scala = sig.scala.copy(fqn = sig.scala.fqn.copy(typeName = typeName)) ) (file, sigOfClass) }) ) scala2.orElse(htmlToJar.get(javafqn).map((_, sig))) } private def resolveLocalUri(sig: DocSigPair): Option[String] = guessJar(sig) match { case Some((jar, sig)) => Some(makeLocalUri(jar, sig)) case _ => log.debug(s"Failed to resolve doc jar for: $sig") None } // Javadoc 8 changed the anchor format to remove illegal // url characters: parens, commas, brackets. // See https://bugs.eclipse.org/bugs/show_bug.cgi?id=432056 // and https://bugs.openjdk.java.net/browse/JDK-8025633 private val Java8Chars = """(?:,| |\(|\)|\[\])""".r private def toJava8Anchor(anchor: String): String = Java8Chars.replaceAllIn(anchor, { m => anchor(m.start) match { case ',' => "-" case '(' => "-" case ')' => "-" case '[' => ":A" case ' ' => "" } }) private def toAndroidAnchor(anchor: String): String = anchor.replace(",", ", ") private def resolveWellKnownUri(sig: DocSigPair): Option[String] = if (sig.java.fqn.javaStdLib) { val path = javaFqnToPath(sig.java.fqn) val rawVersion = forceJavaVersion.getOrElse(scala.util.Properties.javaVersion) val version = if (rawVersion.startsWith("1.8")) "8" else if (rawVersion.startsWith("1.7")) "7" else "6" val anchor = sig.java.member.map { m => "#" + { if (version == "8") toJava8Anchor(m) else m } }.getOrElse("") Some(s"http://docs.oracle.com/javase/$version/docs/api/$path$anchor") } else if (sig.java.fqn.androidStdLib) { val path = javaFqnToPath(sig.java.fqn) val anchor = sig.java.member.map { m => "#" + toAndroidAnchor(m) }.getOrElse("") Some(s"http://developer.android.com/reference/$path$anchor") } else None def resolve(sig: DocSigPair): Option[String] = resolveLocalUri(sig) orElse resolveWellKnownUri(sig) // for java stuff, really def resolve(sig: DocSig): Option[String] = resolve(DocSigPair(sig, sig)) def receive: Receive = { case p: DocSigPair => val response = resolve(p) match { case Some(path) => StringResponse(path) case None => FalseResponse } sender() ! response } } object DocResolver { def apply( prefix: String = "docs", java: Option[String] = None )( implicit config: EnsimeConfig ): Props = Props(classOf[DocResolver], prefix, java, config) }
yyadavalli/ensime-server
core/src/main/scala/org/ensime/core/DocResolver.scala
Scala
gpl-3.0
6,426
import io.prediction.controller.P2LAlgorithm import io.prediction.controller.Params import org.apache.spark.SparkContext import org.apache.spark.mllib.classification.NaiveBayes import org.apache.spark.mllib.classification.NaiveBayesModel import org.apache.spark.mllib.linalg.Vector import com.github.fommil.netlib.F2jBLAS import scala.math._ /** Define parameters for Supervised Learning Model. We are * using a Naive Bayes classifier, which gives us only one * hyperparameter in this stage. */ case class NBAlgorithmParams(lambda: Double) extends Params /** Define SupervisedAlgorithm class. */ class NBAlgorithm( val ap: NBAlgorithmParams ) extends P2LAlgorithm[PreparedData, NBModel, Query, PredictedResult] { /** Train your model. */ def train(sc: SparkContext, pd: PreparedData): NBModel = { // Fit a Naive Bayes model using the prepared data. val nb: NaiveBayesModel = NaiveBayes.train(pd.transformedData, ap.lambda) new NBModel( tfIdf = pd.tfIdf, categoryMap = pd.categoryMap, nb = nb) } /** Prediction method for trained model. */ def predict(model: NBModel, query: Query): PredictedResult = { model.predict(query.text) } } class NBModel( val tfIdf: TFIDFModel, val categoryMap: Map[Double, String], val nb: NaiveBayesModel ) extends Serializable { private def innerProduct (x : Array[Double], y : Array[Double]) : Double = { x.zip(y).map(e => e._1 * e._2).sum } val normalize = (u: Array[Double]) => { val uSum = u.sum u.map(e => e / uSum) } private val scoreArray = nb.pi.zip(nb.theta) /** Given a document string, return a vector of corresponding * class membership probabilities. * Helper function used to normalize probability scores. * Returns an object of type Array[Double] */ private def getScores(doc: String): Array[Double] = { // Vectorize query val x: Vector = tfIdf.transform(doc) val z = scoreArray .map(e => innerProduct(e._2, x.toArray) + e._1) normalize((0 until z.size).map(k => exp(z(k) - z.max)).toArray) } /** Implement predict method for our model using * the prediction rule given in tutorial. */ def predict(doc : String) : PredictedResult = { val x: Array[Double] = getScores(doc) val y: (Double, Double) = (nb.labels zip x).maxBy(_._2) new PredictedResult(categoryMap.getOrElse(y._1, ""), y._2) } }
gongsy945/pio-engine-text-classification-heroku
src/main/scala/NBAlgorithm.scala
Scala
apache-2.0
2,400
package com.atomist.source import org.scalatest.{FlatSpec, Matchers} class ByteArrayFileArtifactTest extends FlatSpec with Matchers { "ByteArrayFileArtifact" should "correctly parse full constructor" in { val name = "filename" val pathElements = Seq("com", "atomist") val content = "".getBytes val mode = 100755 val sfa = ByteArrayFileArtifact(name, pathElements, content, mode, None) sfa.name shouldEqual name sfa.pathElements.seq shouldEqual pathElements } it should "correctly parse full path with apply" in { val name = "filename" val pathElements = Seq("com", "atomist") val content = "".getBytes val mode = 100755 val sfa = ByteArrayFileArtifact(pathName = s"${pathElements.mkString("/")}/$name", content = content) sfa.name shouldEqual name sfa.pathElements.seq shouldEqual pathElements sfa shouldEqual ByteArrayFileArtifact(name, pathElements, content, mode, None) } it should "not permit null path" in { an[IllegalArgumentException] should be thrownBy ByteArrayFileArtifact(null, "contents".getBytes) } it should "not permit empty path" in { an[IllegalArgumentException] should be thrownBy ByteArrayFileArtifact("", "contents".getBytes) } it should "not permit path with only /" in { an[IllegalArgumentException] should be thrownBy ByteArrayFileArtifact("/", "contents".getBytes) } it should "not permit relative paths" in { an[IllegalArgumentException] should be thrownBy ByteArrayFileArtifact("./test.txt", "contents".getBytes) } it should "not permit paths starting with ../" in { an[IllegalArgumentException] should be thrownBy ByteArrayFileArtifact("../test.txt", "contents".getBytes) } it should "remove opening / from artifacts in root" in { val (name, contents) = ("name", "contents") val withoutSlash = ByteArrayFileArtifact(name, contents.getBytes) val withSlash = ByteArrayFileArtifact("/" + name, contents.getBytes) withSlash.path should equal(name) withSlash should equal(withoutSlash) withSlash.pathElements should be(empty) withoutSlash.pathElements should be(empty) } it should "remove opening / from artifacts in nested path" in { val (path, contents) = ("src/main/java/Hello.java", "contents") val withoutSlash = ByteArrayFileArtifact(path, contents.getBytes) val withSlash = ByteArrayFileArtifact("/" + path, contents.getBytes) withSlash.path should equal(path) withSlash should equal(withoutSlash) withSlash.pathElements.length should equal(3) withoutSlash.pathElements.length should equal(3) } it should "create ByteArrayFileArtifact and modify mode and uniqueId" in { val name = "filename" val pathElements = Seq("com", "atomist") val content = "hello world" val mode = 100755 val uniqueId = Some("atomist") val sfa = ByteArrayFileArtifact(name, pathElements, content.getBytes, mode, uniqueId) sfa.content should equal(content) sfa.mode should equal(mode) sfa.uniqueId should equal(uniqueId) val sfa1 = sfa .withContent("Atomist") .withMode(FileArtifact.ExecutableMode) .withUniqueId("foobar") sfa1.content should equal("Atomist") sfa1.mode should equal(FileArtifact.ExecutableMode) sfa1.uniqueId.get should equal("foobar") } }
atomist/artifact-source
src/test/scala/com/atomist/source/ByteArrayFileArtifactTest.scala
Scala
gpl-3.0
3,318
package io.youi.material.impl import scala.scalajs.js @js.native trait MDCTextFieldImplementation extends js.Object { def getLabelAdapterMethods_(): MDCLabelAdapter }
outr/youi
gui/src/main/scala/io/youi/material/impl/MDCTextFieldImplementation.scala
Scala
mit
171
/* * Copyright 2017 Ahmad Mozafarnia * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ir.bama.services import ir.bama.repositories.BaseRepo import play.api.Logger import scala.concurrent.{ExecutionContext, Future} /** * @author ahmad */ abstract class BaseService[Entity, Repo <: BaseRepo[Entity]](val repo: Repo)(implicit ec: ExecutionContext) { protected val logger = Logger(getClass) import repo.dbConfig._ import profile.api._ def persist(entity: Entity): Future[Long] = db.run(repo.persist(entity)) def load(id: Long): Future[Option[Entity]] = db.run(repo.load(id)) def list(range: Option[Range]): Future[Seq[Entity]] = db.run(repo.list(range)) }
ahmadmo/bama-api-demo
app/ir/bama/services/BaseService.scala
Scala
apache-2.0
1,199
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.scheduler import org.apache.spark.annotation.DeveloperApi /** * :: DeveloperApi :: * Information about an [[org.apache.spark.Accumulable]] modified during a task or stage. */ @DeveloperApi class AccumulableInfo private[spark] ( val id: Long, val name: String, val update: Option[String], // represents a partial update within a task val value: String, val internal: Boolean) { override def equals(other: Any): Boolean = other match { case acc: AccumulableInfo => this.id == acc.id && this.name == acc.name && this.update == acc.update && this.value == acc.value case _ => false } } object AccumulableInfo { def apply(id: Long, name: String, update: Option[String], value: String): AccumulableInfo = { new AccumulableInfo(id, name, update, value, internal = false) } def apply(id: Long, name: String, value: String): AccumulableInfo = { new AccumulableInfo(id, name, None, value, internal = false) } }
ArvinDevel/onlineAggregationOnSparkV2
core/src/main/scala/org/apache/spark/scheduler/AccumulableInfo.scala
Scala
apache-2.0
1,798
def f(implicit i: Int) = {} implicit var v: Int = 1 println(/* offset: 4 */ f)
ilinum/intellij-scala
testdata/resolve2/function/implicit/VariableImplicit.scala
Scala
apache-2.0
80
package org.shapelogic.sc.imageprocessing import org.shapelogic.sc.util.Constants.DOWN import org.shapelogic.sc.util.Constants.LEFT import org.shapelogic.sc.util.Constants.RIGHT import org.shapelogic.sc.util.Constants.UP import org.shapelogic.sc.polygon.CPointInt import org.shapelogic.sc.polygon.Polygon import org.shapelogic.sc.util.Constants import org.shapelogic.sc.image.BufferImage import spire.implicits._ import spire.math._ import scala.reflect.ClassTag import org.shapelogic.sc.color.IColorDistanceWithImage import org.shapelogic.sc.pixel.PixelDistance import org.shapelogic.sc.numeric.PrimitiveNumberPromotersAux import org.shapelogic.sc.numeric.NumberPromotion /** * Edge Tracer. <br /> * * The first version is based on Wand from ImageJ 1.38.<br /> * * It traces with a 2 x 2 square that put the top left pixels inside the * particle and the bottom right outside.<br /> * * Might be replaced with a version that has all the pixels inside.<br /> * * @author Sami Badawi * * XXX EdgeTracerColor used to be specialized but this caused null pointer * problems for image in: * PixelDistance.scala */ class EdgeTracerColor[ // T: ClassTag, //Input image type C: ClassTag: Numeric: Ordering //Calculation type ]( val inputImage: BufferImage[T], maxDistance: C, similarIsMatch: Boolean)( implicit promoter: NumberPromotion.Aux[T, C]) extends PixelFollow[T, C]( inputImage, maxDistance.toInt, similarIsMatch)( implicitly[ClassTag[T]], implicitly[ClassTag[C]], implicitly[Numeric[C]], implicitly[Ordering[C]], promoter) with IEdgeTracer { val makeOutput = true /** * This will not be called */ lazy val outputImage: BufferImage[T] = inputImage.empty() /** * This seems a little slow * Calculate goodness around center point and leave in array of boolean */ def makeDirections(x: Int, y: Int, only4points: Boolean): Array[Boolean] = { var stepSize = 1 if (only4points) stepSize = STEP_SIZE_FOR_4_DIRECTIONS cfor(0)(_ < Constants.DIRECTIONS_AROUND_POINT, _ + stepSize) { i => _dirs(i) = matchInBounds(x + Constants.CYCLE_POINTS_X(i), y + Constants.CYCLE_POINTS_Y(i)) } _dirs } def nextDirection(x: Int, y: Int, lastDirection: Int, clockwise: Boolean): Int = { var directions: Array[Boolean] = makeDirections(x, y, true) val lastDirectionReleativeCurrent = lastDirection + Constants.DIRECTIONS_AROUND_POINT / 2 val stepSize = STEP_SIZE_FOR_4_DIRECTIONS cfor(2)(_ <= Constants.DIRECTIONS_AROUND_POINT, _ + stepSize) { i => var step = i if (!clockwise) step = Constants.DIRECTIONS_AROUND_POINT - i val real_direction = (lastDirectionReleativeCurrent + step) % Constants.DIRECTIONS_AROUND_POINT //Return first point that is inside if (directions(real_direction)) return real_direction } -1 //Not found } def traceEdge(xstart: Int, ystart: Int, startingDirectionIn: Int): Polygon = { //XXX val polygon = new Polygon() polygon.startMultiLine() val chainCodeHandler = new ChainCodeHandler(polygon.getAnnotatedShape()) chainCodeHandler.setup() chainCodeHandler.setMultiLine(polygon.getCurrentMultiLine()) chainCodeHandler.setFirstPoint(new CPointInt(xstart, ystart)) var x = xstart var y = ystart val startingDirection = BaseVectorizer.oppesiteDirection(nextDirection(x, y, startingDirectionIn - 2, false).toByte) var direction: Int = startingDirection var count = 0 var stop = false do { count += 1 if (makeOutput) copyPixel(x, y) direction = nextDirection(x, y, direction, clockwise = true) if (-1 == direction) stop = true direction match { case UP => { y = y - 1 } case DOWN => { y = y + 1 } case LEFT => { x = x - 1 } case RIGHT => { x = x + 1 } case -1 => { stop = true } } if (verboseLogging) println(s"direction: $direction new x: $x, y: $y") if (maxLength < count) { println(s"EdgeTracer: count $count exceeded max lenght") throw new Exception(s"EdgeTracer: count $count exceeded max lenght") stop = true } //If the chain becomes too long just give up if (!chainCodeHandler.addChainCode(direction.toByte)) stop = true // } while ((x!=xstart || y!=ystart)) //Original clause causes termination problems } while (x != xstart || y != ystart || direction != startingDirection || stop) chainCodeHandler.getValue() polygon.setPerimeter(chainCodeHandler.getPerimeter()) polygon.getValue() polygon.getBBox().add(chainCodeHandler._bBox) polygon } /** * Traces the boundary of an area of uniform color, where * 'startX' and 'startY' are somewhere inside the area. * A 16 entry lookup table is used to determine the * direction at each step of the tracing process. */ def autoOutline(startX: Int, startY: Int): Polygon = { val topOption = findTop(startX, startY) topOption match { case Some((x, y)) => traceEdge(x, y, 2) case None => { println(s"Top point not found starting at x: $startX, y: $startY") null } } } } object EdgeTracerColor { def apply( inputImage: BufferImage[Byte], maxDistance: Double, similarIsMatch: Boolean): EdgeTracerColor[Byte, Int] = { val edgeTracer = new EdgeTracerColor[Byte, Int](inputImage, maxDistance.toInt, similarIsMatch)( implicitly[ClassTag[Byte]], implicitly[ClassTag[Int]], implicitly[Numeric[Int]], implicitly[Ordering[Int]], PrimitiveNumberPromotersAux.BytePromotion) edgeTracer } def fromBufferImage( inputImage: BufferImage[Byte], referenceColor: Array[Byte], maxDistance: Double, similarIsMatch: Boolean): EdgeTracerColor[Byte, Int] = { val edgeTracer = apply(inputImage, maxDistance.toInt, similarIsMatch) edgeTracer.setReferencePointArray(referenceColor) edgeTracer } def fromBufferImageAndPoint( inputImage: BufferImage[Byte], x: Int, y: Int, maxDistance: Double = 10): EdgeTracerColor[Byte, Int] = { val edgeTracer = apply(inputImage, maxDistance.toInt, similarIsMatch = true) edgeTracer.takeColorFromPoint(x, y) edgeTracer } def makeByteTransform(inputImage: BufferImage[Byte], parameter: String): BufferImage[Byte] = { var x = inputImage.width / 2 var y = inputImage.height / 2 var distance = 10 try { val numbers = parameter.split(',').map(_.trim().toInt) x = numbers(0) y = numbers(1) distance = numbers(2) } catch { case ex: Throwable => println(s"Could not parse input: $parameter, should have format x,y,distance") } val edgeTracerColor = fromBufferImageAndPoint(inputImage, x, y, distance) val polygon = edgeTracerColor.autoOutline(x, y) println(s"polygon: $polygon") edgeTracerColor.outputImage } }
sami-badawi/shapelogic-scala
src/main/scala/org/shapelogic/sc/imageprocessing/EdgeTracerColor.scala
Scala
mit
7,090
/* Copyright 2009-2016 EPFL, Lausanne */ package leon.solvers class CantResetException(s: Solver) extends Exception(s"Unable to reset solver $s")
epfl-lara/leon
src/main/scala/leon/solvers/CantResetException.scala
Scala
gpl-3.0
148
package controllers import play.api.mvc._ object Application extends Controller { def index = TODO }
peoplepattern/LeVar
levar-web/app/controllers/Application.scala
Scala
apache-2.0
105
package ch.descabato.utils import java.io._ import java.nio.ByteBuffer import java.nio.file.Files import java.security.MessageDigest import java.text.DecimalFormat import java.util import javax.xml.bind.DatatypeConverter import ch.descabato.CustomByteArrayOutputStream import com.typesafe.scalalogging.{LazyLogging, Logger} import org.bouncycastle.crypto.Digest import scala.collection.mutable import scala.language.implicitConversions trait RealEquality[T] { def ===(t: T): Boolean def !==(t: T): Boolean = !(this === t) } object Hash { val empty: Hash = Hash(Array.ofDim(0)) def fromBase64(hash: String): Hash = { Hash(Utils.decodeBase64Url(hash)) } def isDefined(hash: Hash): Boolean = { hash.bytes != null && hash.length > 0 } def apply(hash: Array[Byte]) = { new Hash(hash) } } class Hash private (val bytes: Array[Byte]) extends AnyVal { def length: Int = bytes.length def base64: String = Utils.encodeBase64Url(bytes) def ===(other: Hash): Boolean = java.util.Arrays.equals(bytes, other.bytes) def !==(other: Hash): Boolean = !(this === other) def wrap(): BytesWrapper = BytesWrapper(bytes) def hashContent(): Int = util.Arrays.hashCode(bytes) } object BytesWrapper { def apply(bytes: Array[Byte], offset: Int = 0, length: Int = -1): BytesWrapper = { require(bytes != null) require(length <= bytes.length || length < 0) val correctLength = if (length < 0) bytes.length - offset else length new BytesWrapper(bytes, offset, correctLength) } } class BytesWrapper private (val array: Array[Byte], val offset: Int, val length: Int) { def asInputStream() = new ByteArrayInputStream(array, offset, length) def apply(i: Int) = array(i + offset) def asArray(): Array[Byte] = { if (array == null) { Array.empty[Byte] } else { if (offset == 0 && length == array.length) { array } else { val out = Array.ofDim[Byte](length) System.arraycopy(array, offset, out, 0, length) out } } } def equals(other: BytesWrapper): Boolean = { if (this.length != other.length) { return false } var o1 = this.offset var o2 = other.offset val end1 = this.offset + this.length while (o1 < end1) { if (array(o1) != other.array(o2)) { return false } o1 += 1 o2 += 1 } true } override def equals(obj: Any): Boolean = obj match { case other: BytesWrapper => equals(other) case _ => false } override def hashCode: Int = { if (array == null) return 0 var result: Int = 1 var i = offset val end = offset + length while (i < end) { result = 31 * result + array(i) i += 1 } result } override def toString(): String = array.length + ": " + new String(array) def toByteBuffer(): ByteBuffer = ByteBuffer.wrap(array, offset, length) } object Utils extends LazyLogging { private val units = Array[String]("B", "KB", "MB", "GB", "TB") def isWindows: Boolean = System.getProperty("os.name").contains("indows") def readableFileSize(size: Long, afterDot: Int = 1): String = { if (size <= 0) return "0" val digitGroups = (Math.log10(size) / Math.log10(1024)).toInt val afterDotPart = if (afterDot == 0) "#" else "0" * afterDot new DecimalFormat("#,##0. " + afterDotPart).format(size / Math.pow(1024, digitGroups)) + Utils.units(digitGroups) } def encodeBase64(bytes: Array[Byte]) = DatatypeConverter.printBase64Binary(bytes) def decodeBase64(s: String) = DatatypeConverter.parseBase64Binary(s) def encodeBase64Url(bytes: Array[Byte]): String = encodeBase64(bytes).replace('+', '-').replace('/', '_') def decodeBase64Url(s: String): Array[Byte] = decodeBase64(s.replace('-', '+').replace('_', '/')) def normalizePath(x: String): String = x.replace('\\', '/') def logException(t: Throwable) { val baos = new CustomByteArrayOutputStream() val ps = new PrintStream(baos) def print(t: Throwable) { t.printStackTrace(ps) if (t.getCause() != null) { ps.println() ps.println("Caused by: ") print(t.getCause()) } } print(t) logger.debug(baos.toString()) } } object Implicits { import scala.language.higherKinds implicit def hashToWrapper(a: Hash): BytesWrapper = BytesWrapper(a.bytes) implicit def hashToArray(a: Hash): Array[Byte] = a.bytes implicit class AwareMessageDigest(md: MessageDigest) { def update(bytesWrapper: BytesWrapper): Unit = { md.update(bytesWrapper.array, bytesWrapper.offset, bytesWrapper.length) } def digest(bytesWrapper: BytesWrapper): Hash = { update(bytesWrapper) digest() } def digest(): Hash = { Hash(md.digest()) } } implicit class AwareDigest(md: Digest) { def update(bytesWrapper: BytesWrapper): Unit = { md.update(bytesWrapper.array, bytesWrapper.offset, bytesWrapper.length) } def digest(bytesWrapper: BytesWrapper): Hash = { update(bytesWrapper) digest() } def digest(): Hash = { val bytes = Array.ofDim[Byte](md.getDigestSize) md.doFinal(bytes, 0) Hash(bytes) } } implicit class AwareOutputStream(os: OutputStream) { def write(bytesWrapper: BytesWrapper) { os.write(bytesWrapper.array, bytesWrapper.offset, bytesWrapper.length) } } implicit class ByteArrayUtils(buf: Array[Byte]) extends RealEquality[Array[Byte]] { def ===(other: Array[Byte]): Boolean = java.util.Arrays.equals(buf, other) def wrap(): BytesWrapper = BytesWrapper(buf) } implicit class InvariantContains[T, CC[X] <: Seq[X]](xs: CC[T]) { def safeContains(x: T): Boolean = xs contains x } implicit class InvariantContains2[T, CC[X] <: scala.collection.Set[X]](xs: CC[T]) { def safeContains(x: T): Boolean = xs contains x } implicit class InvariantContains3[T](xs: scala.collection.Map[T, _]) { def safeContains(x: T): Boolean = xs.keySet contains x } } object FileUtils extends Utils { def getRelativePath(dest: File, to: File, path: String): File = { // Needs to take common parts out of the path. // Different semantics on windows. Upper-/lowercase is ignored, ':' may not be part of the output def prepare(f: File) = { var path = if (Files.isSymbolicLink(f.toPath)) f.getAbsolutePath() else f.getCanonicalPath() if (Utils.isWindows) path = path.replaceAllLiterally("\\", "/") path.split("/").toList } val files = (prepare(to), prepare(new File(path))) def compare(s1: String, s2: String) = if (Utils.isWindows) s1.equalsIgnoreCase(s2) else s1 == s2 def cutFirst(files: (List[String], List[String])): String = { files match { case (x :: xTail, y :: yTail) if compare(x, y) => cutFirst(xTail, yTail) case (_, x) => x.mkString("/") } } val cut = cutFirst(files) def cleaned(s: String) = if (Utils.isWindows) s.replaceAllLiterally(":", "_") else s new File(dest, cleaned(cut)) } def deleteAll(f: File): Unit = { def walk(f: File) { if (f.isDirectory()) { f.listFiles().toList.foreach(walk) f.delete() } else { f.delete() Files.deleteIfExists(f.toPath()) } } var i = 0 do { walk(f) i += 1 Thread.sleep(500) } while (i < 5 && f.exists) if (i > 1) { logger.warn(s"Took delete all $i runs, now folder is deleted " + (!f.exists)) } } } trait Utils extends LazyLogging { lazy val l: Logger = logger def readableFileSize(size: Long): String = Utils.readableFileSize(size) def logException(t: Throwable) { Utils.logException(t) } } class ByteArrayMap[T] extends mutable.HashMap[Array[Byte], T] { override protected def elemHashCode(key: Array[Byte]): Int = { util.Arrays.hashCode(key) } override protected def elemEquals(key1: Array[Byte], key2: Array[Byte]): Boolean = { util.Arrays.equals(key1, key2) } } class FastHashMap[T] extends mutable.HashMap[Hash, T] { override protected def elemHashCode(key: Hash): Int = { util.Arrays.hashCode(key.bytes) } override protected def elemEquals(key1: Hash, key2: Hash): Boolean = { util.Arrays.equals(key1.bytes, key2.bytes) } }
Stivo/DeScaBaTo
core/src/main/scala/ch/descabato/utils/Utils.scala
Scala
gpl-3.0
8,307
package scalaz.stream.actor import java.util.concurrent.atomic.AtomicBoolean import scala._ import scala.annotation.tailrec import scalaz._ import scalaz.concurrent.{Strategy, Actor, Task} import scalaz.stream.Process._ import scalaz.stream.Step import scalaz.stream.wye.{AwaitBoth, AwaitR, AwaitL} import scalaz.stream.{Process, wye} object WyeActor { trait WyeSide[A, L, R, O] { /** returns next wye after processing the result of the step **/ def receive(r: \\/[Throwable, Step[Task, A]])(y2: Wye[L, R, O]): Wye[L, R, O] } val Interrupted = new java.lang.Exception("Interrupted to clean") { override def fillInStackTrace(): Throwable = this } sealed trait Msg case class Ready[A, L, R, O](from: WyeSide[A, L, R, O], s: Throwable \\/ Step[Task, A]) extends Msg case class Get[A](cb: (Throwable \\/ Seq[A]) => Unit) extends Msg case class Done(rsn: Throwable, cb: (Throwable \\/ Unit) => Unit) extends Msg trait WyeSideOps[A, L, R, O] extends WyeSide[A, L, R, O] { // Next step of process that feds into wye. // - left contains cleanup in case the step is running to perform any cleanup needed // - right contains next step of the process var step: (Process[Task,A] \\/ Step[Task, A]) // when this is set to`true` // it indicates the running task to be interrupted and cleanup process will start as next step // please not there is still slight chance that `cleanup` and last step will run in parallel. // to solve this, we need a fix or resolution to https://github.com/scalaz/scalaz/issues/599. private val cleanup: AtomicBoolean = new AtomicBoolean(false) def feedA(as: Seq[A])(y2: Wye[L, R, O]): Wye[L, R, O] def haltA(e: Throwable)(y2: Wye[L, R, O]): Wye[L, R, O] //feeds the wye by element or signals halt to wye, producing next state of process def receive(r: Throwable \\/ Step[Task, A])(y2: Wye[L, R, O]): Wye[L, R, O] = { r match { case \\/-(s) => step = \\/-(s) s match { case Step(\\/-(h), Halt(e), c) => haltA(e)(feedA(h)(y2)) case Step(\\/-(h),t,c) => feedA(h)(y2) case Step(-\\/(e), t, c) => haltA(e)(y2) } case -\\/(e) => step = \\/-(Step.failed(e)) haltA(e)(y2) } } def isClean: Boolean = step.toOption.exists { case s if s.isCleaned => true ; case _ => false } def isHalt: Boolean = step.toOption.exists { case s if s.isHalted => true ; case _ => false } def haltedBy: Option[Throwable] = step.toOption.collect { case Step(_,Halt(e),Halt(_)) => e } //returns true when the process is cleaned, or runs the cleanup and returns false //if process is running is no-op and returns false def runCleanup(a: Actor[Msg], e: Throwable): Boolean = step match { case \\/-(s) if s.isCleaned => true case \\/-(s) => runClean(s.cleanup, e, a); false case -\\/(c) if cleanup.get == false => a ! Ready(this,\\/-(Step.failed(Interrupted))) //this will have to be removed once Task will return error once interrupted in scalaz.task see comment to cleanup val above false case -\\/(c) => false } def pull(a: Actor[Msg]): Boolean = step match { case \\/-(s) if s.isCleaned => false case \\/-(s) if s.isHalted => runClean(s.cleanup,End,a) ; true case \\/-(s) => run(s.tail,a) ; true case -\\/(c) => false // request`s task is in process } private def runClean(c:Process[Task,A], e: Throwable, actor: Actor[Msg]) : Unit = { cleanup.set(true) step = -\\/(halt) c.causedBy(e).run.runAsync { cb => actor ! Ready(this, cb.map(_ => Step.failed(e)))} } private def run(s: Process[Task, A], actor: Actor[Msg]): Unit = { step = -\\/(s.cleanup) s.runStep.runAsyncInterruptibly ({ cb => actor ! Ready(this, cb) },cleanup) } } /** * Actor that backs the `wye`. Actor is reading non-deterministically from both sides * and interprets wye to produce output stream. * * @param pl left process * @param pr right process * @param y wye to control queueing and merging * @param S strategy, preferably executor service * @tparam L Type of left process element * @tparam R Type of right process elements * @tparam O Output type of resulting process * @return Process with merged elements. */ def wyeActor[L, R, O](pl: Process[Task, L], pr: Process[Task, R])(y: Wye[L, R, O])(S: Strategy): Process[Task, O] = { //current state of the wye var yy: Wye[L, R, O] = y //cb to be completed for `out` side var out: Option[(Throwable \\/ Seq[O]) => Unit] = None //forward referenced actor var a: Actor[Msg] = null //Bias for reading from either left or right. var leftBias: Boolean = true case class LeftWyeSide(var step: (Process[Task,L] \\/ Step[Task, L])) extends WyeSideOps[L, L, R, O] { def feedA(as: Seq[L])(y2: Process.Wye[L, R, O]): Process.Wye[L, R, O] = wye.feedL(as)(y2) def haltA(e: Throwable)(y2: Process.Wye[L, R, O]): Process.Wye[L, R, O] = wye.haltL(e)(y2) override def toString: String = "Left" } case class RightWyeSide(var step: (Process[Task,R] \\/ Step[Task, R])) extends WyeSideOps[R, L, R, O] { def feedA(as: Seq[R])(y2: Process.Wye[L, R, O]): Process.Wye[L, R, O] = wye.feedR(as)(y2) def haltA(e: Throwable)(y2: Process.Wye[L, R, O]): Process.Wye[L, R, O] = wye.haltR(e)(y2) override def toString: String = "Right" } val L: LeftWyeSide = LeftWyeSide(\\/-(Step.fromProcess(pl))) val R: RightWyeSide = RightWyeSide(\\/-(Step.fromProcess(pr))) //switches right and left to cleanup (if not yet switched) and runs the cleanup def tryCleanup(e: Throwable): Boolean = { val l = L.runCleanup(a, e) && L.isClean val r = R.runCleanup(a, e) && R.isClean l && r } def completeOut(cb: (Throwable \\/ Seq[O]) => Unit, r: Throwable \\/ Seq[O]): Unit = { out = None S(cb(r)) } @tailrec def tryCompleteOut(cb: (Throwable \\/ Seq[O]) => Unit, y2: Wye[L, R, O]): Wye[L, R, O] = { y2.unemit match { case (h, ny) if h.nonEmpty => completeOut(cb, \\/-(h)) ny case (_, ny@Halt(e)) => if (tryCleanup(e)) completeOut(cb, -\\/(e)) ny case (_, ny@AwaitL(_, _, _)) => L.haltedBy match { case Some(e) => tryCompleteOut(cb, ny.killBy(e)) case None => L.pull(a); ny } case (_, ny@AwaitR(_, _, _)) => R.haltedBy match { case Some(e) => tryCompleteOut(cb, ny.killBy(e)) case None => R.pull(a); ny } case (_, ny@AwaitBoth(_, _, _)) => if (L.isHalt && R.isHalt) { tryCompleteOut(cb, ny.killBy(L.haltedBy.get)) } else { if (leftBias) { L.pull(a); R.pull(a) } else { R.pull(a); L.pull(a) } ny } case (_, Emit(_, _)) => val e = new Exception("Impossible: Emit after unemit?") completeOut(cb,-\\/(e)) Halt(e) } } a = Actor.actor[Msg]({ case Ready(side: WyeSide[Any, L, R, O]@unchecked, stepr) => leftBias = side == R val ny = side.receive(stepr)(yy) yy = out match { case Some(cb) => tryCompleteOut(cb,ny) case None => ny } case get: Get[O@unchecked] => out = Some(get.cb) yy = tryCompleteOut(get.cb, yy) case Done(rsn, cb) => val cbOut = cb compose ((_: Throwable \\/ Seq[O]) => \\/-(())) out = Some(cbOut) yy = tryCompleteOut(cbOut, yy.killBy(rsn)) })(S) repeatEval(Task.async[Seq[O]](cb => a ! Get(cb))).flatMap(emitSeq(_)) onComplete suspend(eval(Task.async[Unit](cb => a ! Done(End, cb)))).drain } }
aindlq/scalaz-stream
src/main/scala/scalaz/stream/actor/WyeActor.scala
Scala
mit
7,862
/* Copyright (C) 2008-2014 University of Massachusetts Amherst. This file is part of "FACTORIE" (Factor graphs, Imperative, Extensible) http://factorie.cs.umass.edu, http://github.com/factorie Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package cc.factorie.app.nlp.load import cc.factorie.app.nlp.{Document,Sentence,Token,UnknownDocumentAnnotator} import scala.io.Source import cc.factorie.variable._ import cc.factorie.app.nlp.pos.PennPosTag import scala.Predef._ /** * @author John Sullivan * * Loads shallow parsing/chunking data from Conll 2000 shared task * Each sentence becomes a document * * 1 token type * 2 gold POS Tag * 3 gold chunk (BIO notation default) */ object LoadConll2000 extends Load { //Default BIO encoding for loadConll2000 from Source since this is the standard encoding for conll2000 training data def fromSource(source: Source) = fromSource(source,"BIO") def fromSource(source: Source,encoding:String): Seq[Document] = { val doc = new Document() doc.annotators(classOf[Token]) = UnknownDocumentAnnotator.getClass doc.annotators(classOf[Sentence]) = UnknownDocumentAnnotator.getClass doc.annotators(classOf[PennPosTag]) = UnknownDocumentAnnotator.getClass doc.annotators(classOf[BIOChunkTag]) = UnknownDocumentAnnotator.getClass //Enable multiple input encodings val newChunkLabel = encoding match { case "BILOU" => (t:Token,s:String) => new BILOUChunkTag(t,s) case "BIO" => (t:Token,s:String) => new BIOChunkTag(t,s) case "NESTED" => (t:Token,s:String) => new BILOUNestedChunkTag(t,s) case _ => (t:Token,s:String) => new BIOChunkTag(t,s) } var sent = new Sentence(doc) source.getLines().foreach{ line => sent = processWordLine(doc, sent, line, newChunkLabel) } Seq(doc) } val lineSplit = """([^\\s]+) ([^\\s]+) ([^\\s]+)""".r val posTranslations = Map("(" -> "-LRB-", ")" -> "-RRB-") private def processWordLine(doc:Document, sent:Sentence, line:String,newChunkLabel: (Token,String) => ChunkTag):Sentence = line match { case lineSplit(tokenType, posTagString, chunkTagString) => { val t = new Token(sent, tokenType + " ") t.attr += new PennPosTag(t, posTranslations.getOrElse(posTagString, identity(posTagString))) t.attr += newChunkLabel(t, chunkTagString) sent } case empty if empty.isEmpty => new Sentence(doc) case otw => throw new Exception("Expected either a line with token pos tag chunk tag, or an empty line, received: %s".format(otw)) } def convertBIOtoBILOU(sentences: Seq[Sentence]){ for(sentence <- sentences) { for(token <- sentence.tokens) { var prev : Token = null var next : Token = null if(token.sentenceHasPrev) prev = token.sentencePrev if(token.sentenceHasNext) next = token.sentenceNext token.sentenceNext val newLabel : String = BIOtoBILOU(prev, token, next) token.attr += new BILOUChunkTag(token, newLabel) } } } def BIOtoBILOU(prev : Token, token : Token, next : Token) : String = { if(token.attr[BIOChunkTag].categoryValue == "O") return "O" val ts = token.attr[BIOChunkTag].categoryValue.split("-") var ps : Array[String] = null var ns : Array[String] = null if(next != null) ns = splitLabel(next) if(prev != null) ps = splitLabel(prev) if(token.attr[BIOChunkTag].categoryValue.contains("B-")) { if(next == null || ns(1) != ts(1) || ns(0) == "B") return "U-" + ts(1) else return token.attr[BIOChunkTag].categoryValue } if(next == null || ns(1) != ts(1) || ns(0) == "B") return "L-" + ts(1) "I-" + ts(1) } private def splitLabel(token : Token) : Array[String] = { if(token.attr[BIOChunkTag].categoryValue.contains("-")) token.attr[BIOChunkTag].categoryValue.split("-") else Array("", "O") } } //Standard conll2000 Chunk Tags object BIOChunkDomain extends CategoricalDomain[String] { this ++= Vector("B-ADJP", "B-ADVP", "B-CONJP", "B-INTJ", "B-LST", "B-NP", "B-PP", "B-PRT", "B-SBAR", "B-UCP", "B-VP", "I-ADJP", "I-ADVP", "I-CONJP", "I-INTJ", "I-LST", "I-NP", "I-PP", "I-PRT", "I-SBAR", "I-UCP", "I-VP", "O") freeze() } object BILOUChunkDomain extends CategoricalDomain[String] { this ++= BIOChunkDomain.categories this ++= Vector( "L-ADVP", "L-ADJP", "L-CONJP", "L-INTJ", "L-LST", "L-NP", "L-PP", "L-PRT", "L-SBAR", "L-UCP", "L-VP", "U-ADJP", "U-ADVP", "U-CONJP", "U-INTJ", "U-LST", "U-NP", "U-PP", "U-PRT", "U-SBAR", "U-UCP", "U-VP") freeze() } //For Noun Phrase Chunk Tagging //Requires custom training data tagged in this notation object BILOUNestedChunkDomain extends CategoricalDomain[String] { this ++= Vector( "B-NP:B-NP", "B-NP:I-NP", "B-NP:L-NP", "B-NP:U-NP", "B-NP:O", "I-NP:B-NP", "I-NP:I-NP", "I-NP:L-NP", "I-NP:U-NP", "I-NP:O", "L-NP:B-NP", "L-NP:I-NP", "L-NP:L-NP", "L-NP:U-NP", "L-NP:O", "U-NP:B-NP", "U-NP:I-NP", "U-NP:L-NP", "U-NP:U-NP", "U-NP:O", "O:B-NP", "O:I-NP", "O:L-NP", "O:U-NP", "O:O" ) freeze() } //This could be combined into a single LabeledCategoricalVariable with a settable domain abstract class ChunkTag(val token:Token, tagValue:String) extends LabeledCategoricalVariable(tagValue) class BIOChunkTag(token:Token, tagValue:String) extends ChunkTag(token, tagValue) { def domain = BIOChunkDomain } class BILOUChunkTag(token:Token, tagValue:String) extends ChunkTag(token,tagValue) { def domain = BILOUChunkDomain } class BILOUNestedChunkTag(token:Token, tagValue:String) extends ChunkTag(token,tagValue) { def domain = BILOUNestedChunkDomain }
zxsted/factorie
src/main/scala/cc/factorie/app/nlp/load/LoadConll2000.scala
Scala
apache-2.0
6,384
package au.com.dius.pact.matchers.util import scala.collection.JavaConversions object CollectionUtils { def toOptionalList(list: java.util.List[String]): Option[List[String]] = { if (list == null) { None } else { Some(JavaConversions.collectionAsScalaIterable(list).toList) } } }
algra/pact-jvm
pact-jvm-matchers/src/main/scala/au/com/dius/pact/matchers/util/CollectionUtils.scala
Scala
apache-2.0
311
package breeze.linalg import scala.reflect.ClassTag class SliceMatrix[@specialized(Int) K1, @specialized(Int) K2, @specialized(Int, Double, Float) V](val tensor: QuasiTensor[(K1, K2),V], val slice1: IndexedSeq[K1], val slice2: IndexedSeq[K2]) extends Matrix[V] { def apply(i: Int, j: Int): V = tensor(slice1(i)->slice2(j)) def update(i: Int, j: Int, e: V) {tensor(slice1(i)->slice2(j)) = e} def rows: Int = slice1.length def cols: Int = slice2.length def activeValuesIterator: Iterator[V] = valuesIterator def activeIterator: Iterator[((Int, Int), V)] = iterator def activeKeysIterator: Iterator[(Int, Int)] = keysIterator def activeSize: Int = size def repr: Matrix[V] = this def copy: Matrix[V] = { if (rows == 0) Matrix.zeroRows[V](cols) else if (cols == 0) Matrix.zeroCols[V](rows) else { val v = apply(0,0) implicit val man = ClassTag[V](v.getClass) val result = new DenseMatrix[V](rows, cols, new Array[V](size)) result := (this:Matrix[V]) result } } }
ktakagaki/breeze
src/main/scala/breeze/linalg/SliceMatrix.scala
Scala
apache-2.0
1,128
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.planner.plan.rules.logical import org.apache.flink.api.scala._ import org.apache.flink.table.api._ import org.apache.flink.table.planner.plan.optimize.program.{BatchOptimizeContext, FlinkChainedProgram, FlinkHepRuleSetProgramBuilder, HEP_RULES_EXECUTION_TYPE} import org.apache.flink.table.planner.utils.TableTestBase import org.apache.calcite.plan.hep.HepMatchOrder import org.apache.calcite.rel.rules.{CoreRules, PruneEmptyRules} import org.apache.calcite.tools.RuleSets import org.junit.{Before, Test} /** * Test for [[FlinkPruneEmptyRules]]. */ class FlinkPruneEmptyRulesTest extends TableTestBase { private val util = batchTestUtil() @Before def setup(): Unit = { val programs = new FlinkChainedProgram[BatchOptimizeContext]() programs.addLast( "rules", FlinkHepRuleSetProgramBuilder.newBuilder .setHepRulesExecutionType(HEP_RULES_EXECUTION_TYPE.RULE_SEQUENCE) .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) .add(RuleSets.ofList( FlinkSubQueryRemoveRule.FILTER, CoreRules.FILTER_REDUCE_EXPRESSIONS, CoreRules.PROJECT_REDUCE_EXPRESSIONS, PruneEmptyRules.FILTER_INSTANCE, PruneEmptyRules.PROJECT_INSTANCE, FlinkPruneEmptyRules.JOIN_RIGHT_INSTANCE)) .build() ) util.replaceBatchProgram(programs) util.addTableSource[(Int, Long, String)]("T1", 'a, 'b, 'c) util.addTableSource[(Int, Long, String)]("T2", 'd, 'e, 'f) } @Test def testSemiJoinRightIsEmpty(): Unit = { util.verifyRelPlan("SELECT * FROM T1 WHERE a IN (SELECT d FROM T2 WHERE 1=0)") } @Test def testAntiJoinRightIsEmpty(): Unit = { util.verifyRelPlan("SELECT * FROM T1 WHERE a NOT IN (SELECT d FROM T2 WHERE 1=0)") } }
apache/flink
flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/plan/rules/logical/FlinkPruneEmptyRulesTest.scala
Scala
apache-2.0
2,583
/** * ************************************************************************** * * * (C) Copyright 2014 by Peter L Jones * * [email protected] * * * * This file is part of jTrapKATEditor. * * * * jTrapKATEditor is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either version 3 of the License, or * * (at your option) any later version. * * * * jTrapKATEditor is distributed in the hope that it will be useful, * * but WITHOUT ANY WARRANTY; without even the implied warranty of * * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * * GNU General Public License for more details. * * * * You should have received a copy of the GNU General Public License * * along with jTrapKATEditor. If not, see http://www.gnu.org/licenses/ * * * * ************************************************************************** */ package info.drealm.scala import scala.swing._ import info.drealm.scala.migPanel._ import info.drealm.scala.eventX._ import info.drealm.scala.layout._ import info.drealm.scala.{ Localization => L } object pnPads extends MigPanel("insets 0, gapx 2", "[grow][grow][grow][grow][grow][grow][grow][grow]", "[][][][]") { name = "pnPads" (for { row <- (0 to 3) zip Seq( List(0, 0, 18, 19, 20, 21, 0, 0), List(0, 17, 6, 7, 8, 9, 22, 0), List(16, 5, 1, 2, 3, 4, 10, 23), List(15, 0, 11, 12, 13, 14, 0, 24)); col <- (0 to 7) zip row._2; if col._2 != 0 } yield (s"cell ${col._1} ${row._1}", col._2)) foreach { pad => val pn = new Pad(pad._2) contents += (pn, pad._1 + ",grow") pn } peer.setFocusTraversalPolicy(new NameSeqOrderTraversalPolicy(this, ((1 to 24) flatMap { n => Seq(s"cbxPad${n}V3", s"cbxPad${n}V4") })) { override def getDefaultComponent(pn: java.awt.Container): java.awt.Component = jTrapKATEditor.currentPadNumber match { case x if x < 24 => { val _defaultCp = s"cbxPad${jTrapKATEditor.currentPadNumber + 1}${jTrapKATEditor.doV3V4V5("V3", "V4", "V4")}" if (containerValid(pn)) stepBy(getPeer(_defaultCp), _ + 1, true) else null } case _ => null } }) peer.setFocusTraversalPolicyProvider(true) }
pljones/jTrapKATEditor
src/main/scala/info/drealm/scala/pnPads.scala
Scala
gpl-3.0
3,119
package com.joescii.omniprop package providers import net.liftweb.util.Props object LiftPropsProvider extends PropertyProvider { def get(key:String) = Props.get(key) }
joescii/omniprop
src/main/scala/com/joescii/omniprop/providers/LiftPropsProvider.scala
Scala
apache-2.0
172
package sw.ds import org.apache.spark._ import org.apache.spark.sql.SQLContext import org.apache.spark.sql.functions._ import org.apache.spark.sql.expressions._ import sw.air._ object Datasets extends App { val sparkConf = new SparkConf() .setAppName(this.getClass.getName) .setMaster("local[*]") val sc = new SparkContext(sparkConf) val sqlCtx = new SQLContext(sc) import sqlCtx._ import sqlCtx.implicits._ val airports = Airport(sqlCtx) airports.as[Airport].filter(_.country == "Czech Republic").show() sc.stop() }
rabbitonweb/spark-workshop
src/main/scala/sw/ds/DataSets.scala
Scala
apache-2.0
552
package org.meritocracy.snippet import scala.xml._ import net.liftweb._ import common._ import http.{DispatchSnippet, S, SHtml, StatefulSnippet} import http.js.JsCmd import http.js.JsCmds._ import util._ import Helpers._ import net.liftmodules.mongoauth.model.ExtSession import org.meritocracy.model._ import org.meritocracy.lib._ object UserLogin extends Loggable { def render = { // form vars var password = "" var hasPassword = true var remember = User.loginCredentials.is.isRememberMe def doSubmit(): JsCmd = { S.param("email").map(e => { val email = e.toLowerCase.trim // save the email and remember entered in the session var User.loginCredentials(LoginCredentials(email, remember)) if (hasPassword && email.length > 0 && password.length > 0) { User.findByEmail(email) match { case Full(user) if (user.password.isMatch(password)) => logger.debug("pwd matched") User.logUserIn(user, true) if (remember) User.createExtSession(user.id.is) else ExtSession.deleteExtCookie() RedirectTo(Site.home.url) case _ => S.error("Invalid credentials") Noop } } else if (hasPassword && email.length <= 0 && password.length > 0) { S.error("id_email_err", "Please enter an email") Noop } else if (hasPassword && password.length <= 0 && email.length > 0) { S.error("id_password_err", "Please enter a password") Noop } else if (hasPassword) { S.error("id_email_err", "Please enter an email") S.error("id_password_err", "Please enter a password") Noop } else if (email.length > 0) { // see if email exists in the database User.findByEmail(email) match { case Full(user) => User.sendLoginToken(user) User.loginCredentials.remove() S.notice("An email has been sent to you with instructions for accessing your account") Noop case _ => RedirectTo(Site.register.url) } } else { S.error("id_email_err", "Please enter an email address") Noop } }) openOr { S.error("id_email_err", "Please enter an email address") Noop } } def cancel() = S.seeOther(Site.home.url); Noop "#id_email [value]" #> User.loginCredentials.is.email & "#id_password" #> SHtml.password(password, password = _) & "name=remember" #> SHtml.checkbox(remember, remember = _) & "#id_submit" #> SHtml.hidden(doSubmit) } } sealed trait UserSnippet extends AppHelpers with Loggable { protected def user: Box[User] protected def serve(snip: User => NodeSeq): NodeSeq = (for { u <- user ?~ "User not found" } yield { snip(u) }): NodeSeq protected def serve(html: NodeSeq)(snip: User => CssSel): NodeSeq = (for { u <- user ?~ "User not found" } yield { snip(u)(html) }): NodeSeq def header(xhtml: NodeSeq): NodeSeq = serve { user => <div id="user-header"> <h3>{name(xhtml)}</h3> </div> } def username(xhtml: NodeSeq): NodeSeq = serve { user => Text(user.username.is) } def name(xhtml: NodeSeq): NodeSeq = serve { user => if (user.name.is.length > 0) Text("%s (%s)".format(user.name.is, user.username.is)) else Text(user.username.is) } def title(xhtml: NodeSeq): NodeSeq = serve { user => <lift:head> <title lift="Menu.title">{"$name$: %*% - "+user.username.is}</title> </lift:head> } } object CurrentUser extends UserSnippet { protected def user = User.currentUser } object ProfileLocUser extends UserSnippet { protected def user = Site.profileLoc.currentValue import java.text.SimpleDateFormat val df = new SimpleDateFormat("MMM d, yyyy") def profile(html: NodeSeq): NodeSeq = serve(html) { user => val editLink: NodeSeq = if (User.currentUser.filter(_.id.is == user.id.is).isDefined) <a href={Site.editProfile.url} class="btn btn-info"><i class="icon-edit icon-white"></i> Edit Your Profile</a> else NodeSeq.Empty "#id_name *" #> <h3>{user.name.is}</h3> & "#id_location *" #> user.location.is & "#id_whencreated" #> df.format(user.whenCreated.toDate).toString & "#id_bio *" #> user.bio.is & "#id_editlink *" #> editLink } }
Rmanolis/meritocracy
src/main/scala/org/meritocracy/snippet/UserSnips.scala
Scala
apache-2.0
4,540
package redis.commands import redis.api.hyperloglog._ import redis.{ByteStringSerializer, Request} import scala.concurrent.Future trait HyperLogLog extends Request { def pfadd[V: ByteStringSerializer](key: String, values: V*): Future[Long] = send(Pfadd(key, values)) def pfcount(keys: String*): Future[Long] = send(Pfcount(keys)) def pfmerge(destKey: String, sourceKeys: String*): Future[Boolean] = send(Pfmerge(destKey, sourceKeys)) }
mspielberg/rediscala
src/main/scala/redis/commands/HyperLogLog.scala
Scala
apache-2.0
459
package iosr.worker import akka.actor.{ActorPath, ActorSystem, Props} import com.typesafe.config.ConfigFactory import iosr.worker.WorkerActor.Startup object WorkerApp extends App { val config = ConfigFactory.load() val system = ActorSystem("WorkerApp", config) val workerActor = system.actorOf(Props[WorkerActor]) val supervisorAddress = config.getString("supervisor.address") val supervisorPath = ActorPath.fromString(s"akka://$supervisorAddress/user/supervisorActor") workerActor ! Startup(supervisorPath) }
salceson/iosr-cloud-load-balancing
worker/src/main/scala/iosr/worker/WorkerApp.scala
Scala
mit
524
import java.net.{HttpURLConnection, URL} import java.util.concurrent.TimeoutException import sbt._ import sbt.Keys._ import sbt.Tests import scala.concurrent._ import scala.concurrent.duration._ import scala.util.control.NonFatal /** * This class is responsible for running integration tests. * @author Dmitriy Yefremov */ object IntegrationTestSettings { /** * Basic settings needed to enable integration testing of the project. */ private val itSettings = Defaults.itSettings ++ Seq ( libraryDependencies += "com.typesafe.play" %% "play-test" % play.core.PlayVersion.current % "it", unmanagedSourceDirectories in IntegrationTest <<= (baseDirectory in IntegrationTest)(base => Seq(base / "it")) ) /** * Settings need to enable integration testing with a real application instance running. */ val settings = itSettings ++ Seq( testOptions in IntegrationTest += Tests.Setup(() => setup()), testOptions in IntegrationTest += Tests.Cleanup(() => cleanup()) ) /** * HTTP port the application under test will be listening on. */ private val AppPort = 9000 /** * The URL to hit to check that the app is running. */ private val AppUrl = new URL(s"http://localhost:$AppPort") /** * Screen session name. It is only needed to kill the session by name later. */ private val ScreenName = "playFuncTest" /** * The command that runs the application. */ private val RunCommand = s"""screen -dmSL $ScreenName play run -Dhttp.port=$AppPort""" /** * The command that kills the running application. */ private val KillCommand = s"""screen -S $ScreenName -X quit""" /** * How long to wait for the application to start before failing. */ private val StartupTimeout = 60.seconds /** * How long to wait for the application to stop before failing. */ private val ShutdownTimeout = 10.seconds /** * Test initialization call back. This method should be called before running functional tests. */ private def setup(): Unit = { println("Launching the app...") println(RunCommand) RunCommand.run() // setup a shutdown hook to make sure the app is killed even if execution is interrupted sys.addShutdownHook(KillCommand.run()) // wait until the app is ready waitUntil(StartupTimeout) { println("Waiting for the app to start up...") isAppRunning() } println("The app is now ready") } /** * Test cleanup call back. This method should be called after running functional tests. */ private def cleanup(): Unit = { println("Killing the app...") println(KillCommand) KillCommand.run() waitUntil(ShutdownTimeout) { println("Waiting for the app to shutdown...") !isAppRunning() } } /** * Tests if the app is up and running. It also serves as a warm up call before running any tests (Play in dev mode will * not compile any classes before it receives the first request). */ private def isAppRunning(): Boolean = { try { val connection = AppUrl.openConnection().asInstanceOf[HttpURLConnection] connection.setRequestMethod("GET") connection.connect() true } catch { case NonFatal(e) => println(s"${e.getClass.getSimpleName}: ${e.getMessage}") false } } /** * Waits until either the given predicate returns `true` or the given timeout period is reached. * Throws a [[TimeoutException]] in case of a timeout. */ private def waitUntil(timeout: Duration)(predicate: => Boolean): Unit = { val startTimeMillis = System.currentTimeMillis while (!predicate) { Thread.sleep(5.seconds.toMillis) if ((System.currentTimeMillis - startTimeMillis).millis > timeout) { throw new TimeoutException } } } }
dmitriy-yefremov/play-functional-testing
project/IntegrationTestSettings.scala
Scala
apache-2.0
3,840
/* * SPDX-License-Identifier: Apache-2.0 * * Copyright 2015-2021 Andre White. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.truthencode.ddo.model.spells /** * Encapsulates spell range [[https://ddowiki.com/page/Spell#Range]] */ sealed trait SpellRange /** * Personal range means the spell basically has no range and is a self only spell that you can not * cast on anything other than yourself. */ trait Personal extends SpellRange /** * Touch range refers to pretty much as it sounds - you have to almost touching your target for the * spell to function. Though not quite, as of update 9, the range for touch spells was very slightly * extended to be more forgiving. It's roughly 1.5 times the width of a regular human character. */ trait Touch extends SpellRange /** * Very short: Very short range refers to a standardized fixed distance of about 5 feet. Roughly a * third of what most spells have. This short range is generally reserved only for very low level * spells which are later replaced by stronger, longer range and more damaging versions. */ trait VeryShort extends SpellRange /** * Standard range refers to a standardized fixed distance. For AOE buff type spells such as bless or * haste, you can see the actual area affected by the spells animation * - about 15 feet. Pretty much every AOE buff spell in the game have the same range as these 2 * spells, but some do not display a graphic, so use those as the guide. For Offensive spells * and targeted buff spells with standard range, the actual range is about double a standard * buff AOE, so roughly 30 feet. */ trait Standard extends SpellRange /** * Double range refers to exactly twice the range that standard spells have. Generally pretty much * every ray-type spell in DDO has this range type. It works exactly like having the enlarge feat * on, but it's a free benefit for the spell. */ trait Double extends SpellRange
adarro/ddo-calc
subprojects/common/ddo-core/src/main/scala/io/truthencode/ddo/model/spells/SpellRange.scala
Scala
apache-2.0
2,526
package org.crudible.core.binding.model import org.crudible.core.binding.FormInline import org.crudible.core.binding.traits.HasMax class InlineComponent extends HasMax { def form(form: FormInline): this.type = { this.decorateWith(form) } def form() = { this .getDecorator[FormInline] .getOrElse(throw new RuntimeException("This component requires a form to be set.")) } }
rehei/crudible
crudible-core/src/main/scala/org/crudible/core/binding/model/InlineComponent.scala
Scala
apache-2.0
404
/* * AriaProcessor.scala * * Copyright (c) 2014 Ronald Kurniawan. All rights reserved. * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, * MA 02110-1301 USA */ package net.fluxo.dd import net.fluxo.dd.dbo.{Task, AriaProcess} import java.util import scala.util.control.Breaks._ import org.joda.time.DateTime import org.apache.log4j.Level import java.util.concurrent.TimeUnit import org.apache.commons.io.FilenameUtils import org.apache.commons.exec._ /** * AriaProcessor process commands that deal with "aria2c". It also monitors the currently running download. Whenever a * download process is stopped for any reason, this class also restarts the download. AriaProcessor also cleans up finished * downloads and moves the download results to local target directory. * * @author Ronald Kurniawan (viper) * @version 0.4.5, 15/03/14 */ class AriaProcessor { private val _activeProcesses: util.ArrayList[AriaProcess] = new util.ArrayList private val _startingProcesses: util.ArrayList[Int] = new util.ArrayList /** * Return a list of active <code>net.fluxo.dd.dbo.AriaProcess</code>. * * @return <code>java.util.ArrayList</code> object */ def ActiveProcesses: util.ArrayList[AriaProcess] = _activeProcesses /** * Process a download request from the client. Before the download starts, the process finds a free port to bind our * aria2 process. If no free port is found, the process is aborted. Otherwise a new aria2 process is started. * * @param uri the URL (torrent/HTTP) to download * @param owner user ID associated with this download * @param isHttp is this a HTTP download? * @param httpUsername username for HTTP authentication (supply an empty string if not required) * @param httpPassword password for HTTP authentication (supply an empty string if not required) * @return status string of the request; "OK" followed by download ID or error messages */ def processRequest(uri: String, owner: String, isHttp: Boolean, httpUsername: String, httpPassword: String): String = { if (!isHttp) { // the uri should always starts with "magnet:" or ends with ".torrent" if (!(uri startsWith "magnet:") && !(uri endsWith ".torrent")) { return "ERR URI" } } else { if (!(uri startsWith "http://") && !(uri startsWith "https://")) { return "ERR URI" } } // find a free port between starting rpc port to (starting rpc port + limit) var rpcPort = -1 breakable { for (x <- OUtils.readConfig.RPCPort until OUtils.readConfig.RPCPort + OUtils.readConfig.RPCLimit) { if (!(OUtils portInUse x)) { rpcPort = x break() } } } if (rpcPort < 0) return "All download slots taken, try again later" var newGid = OUtils generateGID() while (DbControl isTaskGIDUsed newGid) newGid = OUtils generateGID() val ariaThread = new AriaThread(rpcPort, uri, newGid, isHttp) if (httpUsername.length > 0 && httpPassword.length > 0) { ariaThread setCredentials(httpUsername, httpPassword) } new Thread(ariaThread) start() stat(rpcPort, restarting = false, newGid, owner, uri, isHttp = isHttp, httpUsername, httpPassword, ariaThread getExecutor) "OK " + newGid } /** * Kill the aria2 process that is bound to a specified port. * * @param port the port number where the aria2 process is allegedly bound to */ def killProcess(port: Int) { val iterator = _activeProcesses iterator() breakable { while (iterator.hasNext) { val e = iterator.next if ((e AriaPort) == port) { e killAriaProcess() break() } } } } /** * Attempt to restart failed downloads. */ def restartDownloads() { val activeTasks = DbControl queryUnfinishedTasks() if (activeTasks.length > 0) LogWriter writeLog("Trying to restart " + activeTasks.length + " unfinished downloads...", Level.INFO) var rpcPort = -1 for (t <- activeTasks) { LogWriter writeLog("Resuming download for " + t.TaskGID.orNull, Level.INFO) breakable { for (x <- OUtils.readConfig.RPCPort to OUtils.readConfig.RPCPort + OUtils.readConfig.RPCLimit) { if (!(OUtils portInUse x)) { rpcPort = x break() } } } if (rpcPort < 0) { LogWriter writeLog("All download slots taken, cannot restart downloads", Level.INFO) return } val ariaThread = new AriaThread(rpcPort, t.TaskInput.orNull, t.TaskGID.orNull, t.TaskIsHttp) if (t.TaskIsHttp) { if (t.TaskHttpUsername.getOrElse("").length > 0 && t.TaskHttpPassword.getOrElse("").length > 0) { ariaThread setCredentials(t.TaskHttpUsername.getOrElse(""), t.TaskHttpPassword.getOrElse("")) } } _startingProcesses add rpcPort new Thread(ariaThread) start() stat(rpcPort, restarting = true, t.TaskGID.getOrElse(""), t.TaskOwner.getOrElse(""), t.TaskInput.getOrElse(""), isHttp = t.TaskIsHttp, t.TaskHttpUsername.getOrElse(""), t.TaskHttpPassword.getOrElse(""), ariaThread getExecutor) } } /** * Attempt to collect the statistics of a newly started aria2 process by querying its RPC port. If the call is * successful, the method updates the database where clients can query the download progress. * @param port port number where aria2 process is bound to * @param restarting is this process a restart or a fresh download? * @param gid ID for the download * @param owner user ID associated with this download * @param uri URL to download * @param isHttp is this a HTTP download? * @param httpUsername username for HTTP authentication (supply an empty string if not required) * @param httpPassword password for HTTP authentication (supply an empty string if not required) * @param executor a <code>org.apache.commons.exec.DefaultExecutor</code> object */ def stat(port:Int, restarting: Boolean, gid: String, owner: String, uri: String, isHttp: Boolean, httpUsername: String, httpPassword: String, executor: DefaultExecutor) { // we sleep for 3s, to allow the newly started process to settle... try { Thread sleep 3000 } catch { case ie: InterruptedException => } if (!restarting) { // DEBUG LogWriter writeLog ("Adding new task to DB", Level.DEBUG) DbControl addTask new Task { TaskGID_=(gid) TaskInput_=(uri) TaskOwner_=(owner) TaskStarted_=(DateTime.now.getMillis) TaskIsHttp_=(isHttp) if (httpUsername.length > 0 && httpPassword.length > 0) { TaskHttpUsername_=(httpUsername) TaskHttpPassword_=(httpPassword) } } } // DEBUG LogWriter writeLog ("Adding new active task to list...", Level.DEBUG) ActiveProcesses add new AriaProcess { AriaPort_=(port) AriaProcess_:(executor) AriaTaskGid_=(gid) AriaTaskRestarting_=(restarting) AriaHttpDownload_=(isHttp) } // set all necessary parameters if this is an HTTP download... if (isHttp) { DbControl updateTaskTailGID(gid, gid) try { TimeUnit.SECONDS.sleep(5) } catch { case ie: InterruptedException => } val rpcClient = OUtils getXmlRpcClient port val active = OUtils sendAriaTellActive rpcClient if (active.length > 0) { for (o <- active) { val jMap = o.asInstanceOf[java.util.HashMap[String, Object]] val tailGID = (OUtils extractValueFromHashMap(jMap, "gid")).toString val task = { if (tailGID.length > 0) DbControl queryTaskTailGID tailGID else null } val cl = OUtils.extractValueFromHashMap(jMap, "completedLength").toString.toLong task.TaskCompletedLength_=(cl) val tl = OUtils.extractValueFromHashMap(jMap, "totalLength").toString.toLong task.TaskTotalLength_=(tl) task.TaskStatus_=(OUtils.extractValueFromHashMap(jMap, "status").toString) task.TaskInfoHash_=("noinfohash") // now we extract the 'PACKAGE' name, which basically is the name of the directory of the downloaded files... val objFiles = OUtils.extractValueFromHashMap(jMap, "files").asInstanceOf[Array[Object]] if (objFiles.length > 0) { val files = objFiles(0).asInstanceOf[java.util.HashMap[String, Object]] val path = OUtils.extractValueFromHashMap(files, "path").asInstanceOf[String] task.TaskPackage_=(FilenameUtils.getName(path)) } val progress = (task.TaskCompletedLength * 100)/task.TaskTotalLength LogWriter writeLog ("UPDATE: " + ((task TaskPackage) getOrElse "") + " --> " + progress + "%", Level.INFO) DbControl updateTask task } } } } /** * AriaThread processes a new download process by calling aria2 through <code>DefaultExecutor</code>. * @param port port number where aria2 process is bound to * @param uri URL to download * @param gid ID for the download * @param isHttp is this a HTTP download? * @see java.lang.Runnable */ class AriaThread(port: Int, uri: String, gid: String, isHttp: Boolean) extends Runnable { private var _httpUsername: Option[String] = None private var _httpPassword: Option[String] = None /** * Set the username and password for HTTP authentication. * * @param username username (supply an empty string if not required) * @param password password (supply an empty string if not required) */ def setCredentials(username: String, password: String) { _httpUsername = Some(username) _httpPassword = Some(password) } private var _executor: Option[DefaultExecutor] = None /** * Return the <code>DefaultExecutor</code> for this process. * * @return a <code>org.apache.commons.exec.DefaultExecutor</code> object */ def getExecutor: DefaultExecutor = { _executor.orNull } /** * Starts the download by constructing the command line first and then starts the <code>DefaultExecutor</code>. */ override def run() { OUtils createUriFile (gid, uri) // DEBUG LogWriter writeLog("AriaProcessor STARTING!", Level.DEBUG) val sb = new StringBuilder sb append "aria2c" append " --enable-rpc" append " --rpc-listen-port=" append port append " --gid=" append gid if (isHttp && (_httpUsername getOrElse "").length > 0 && (_httpPassword getOrElse "").length > 0) { sb append " --http-user=" append _httpUsername.getOrElse("") append " --http-passwd=" append _httpPassword.getOrElse("") } else if (!isHttp) { sb append " --seed-time=0" append " --max-overall-upload-limit=1" append " --follow-torrent=mem" append " --seed-ratio=1" } sb append " --input-file=" append "uridir/" append gid append ".txt" // DEBUG LogWriter writeLog("command line: " + sb.toString(), Level.DEBUG) val cmdLine = CommandLine parse sb.toString() val watchdog = new ExecuteWatchdog(ExecuteWatchdog INFINITE_TIMEOUT) val executor = new DefaultExecutor executor setWatchdog watchdog _executor = Some(executor) val pumpsh = new PumpStreamHandler(new OStream) executor setStreamHandler pumpsh executor execute cmdLine } } /** * Process the output result from <code>DefaultExecutor</code> into the log. */ class OStream() extends LogOutputStream { override def processLine(line: String, level: Int) { if (((line trim) length) > 0) { LogWriter writeLog("Aria Processor: " + line, Level.INFO) } } } } /** * A Singleton object of AriaProcessor. */ object OAria extends AriaProcessor
fluxodesign/DownloadDaemon
src/main/scala/net/fluxo/dd/AriaProcessor.scala
Scala
gpl-2.0
11,815
package com.github.challenge class TestTester extends ProblemTester[AddProblem] { def makeDefaultProblems = { implicit val numGen = problem.NumGenerator.buildNumGenerator List( (AddProblem(1, 1), "2"), (AddProblem(2, 2), "4"), (AddProblem(1, 2), "3"), (AddProblem(123, 456), "579"), (AddProblem(5, 6), "11")) } override def makeExtraProblems = { implicit val numGen = problem.NumGenerator.buildNumGenerator Some(List( (AddProblem(1, 1), "2"), (AddProblem(2, 2), "4"), (AddProblem(1, 2), "3"), (AddProblem(123, 456), "579"), (AddProblem(5, 6), "11"))) } }
challenge-helper/challenge-helper
src/test/scala/com/github/challenge/TestTester.scala
Scala
apache-2.0
641
package scala.models.play import cats.implicits._ import io.apibuilder.generator.v0.models.{File, InvocationForm} import lib.generator.CodeGenerator object Play26Generator extends CodeGenerator { def prependHeader(contents: String, form: InvocationForm, formatHeader: scala.models.ApidocComments => String): String = s""" ${formatHeader(scala.models.ApidocComments(form.service.version, form.userAgent))} ${contents} """ def file(form: InvocationForm, suffix: String, contents: String, extension: Option[String]): File = generator.ServiceFileNames.toFile( form.service.namespace, form.service.organization.key, form.service.application.key, form.service.version, suffix, contents, extension ) def scalaFiles(form: InvocationForm): Either[Seq[String], List[File]] = List( ("Client", files.Client.contents(form)), ("MockClient", files.MockClient.contents(form)), ("Models", files.Models.contents(form)), ("ModelsBindables", files.ModelsBindables.contents(form)), ("ModelsBodyParsers", files.ModelsBodyParsers.contents(form)), ("ModelsGens", files.ModelsGens.contents(form)), ("ModelsJson", files.ModelsJson.contents(form)), ) .map { case (suffix, contents) => (suffix, prependHeader(contents, form, _.toJavaString)) } .traverse { case (suffix, contents) => utils.ScalaFormatter.format(contents).map((suffix, _)) } .map(_.map { case (suffix, contents) => file(form, suffix, contents, Some("scala")) }) .leftMap { t => Seq(t.toString) } def formatRoutes(contents: String): String = contents .trim .split("\\n") .map(_.trim) .mkString("\\n") def routesFile(form: InvocationForm): File = { val contents = files.Routes.contents(form) val headed = prependHeader(contents, form, _.forPlayRoutes) val formatted = formatRoutes(headed) File("routes", None, formatted) } override def invoke(form: InvocationForm): Either[Seq[String], Seq[File]] = for { scalaFiles <- this.scalaFiles(form) routesFile = this.routesFile(form) } yield scalaFiles :+ routesFile }
gheine/apidoc-generator
scala-generator/src/main/scala/models/play/Play26Generator.scala
Scala
mit
2,159
/* * VEGraph.scala * Induced graph for variable elimination. * * Created By: Avi Pfeffer ([email protected]) * Creation Date: Jan 1, 2009 * * Copyright 2013 Avrom J. Pfeffer and Charles River Analytics, Inc. * See http://www.cra.com or email [email protected] for information. * * See http://www.github.com/p2t2/figaro for a copy of the software license. */ package com.cra.figaro.algorithm.factored import com.cra.figaro.algorithm._ import scala.collection.mutable.PriorityQueue /** * Abstract factors with no rows associated with variables. */ case class AbstractFactor(variables: List[Variable[_]]) /** * Information associated with a variable during variable elimination, including the factors to which it * belongs and variables with which it shares a factor. * * @param factors The abstract factors to which this variable belongs * @param neighbors The variables that share a factor in common with this variable. */ case class VariableInfo(factors: Set[AbstractFactor], neighbors: Set[Variable[_]]) /** * Induced graph for variable elimination. * * @param info A map from variables to information about the variables describing the * factors to which it belongs and its neighbors. */ class VEGraph private (val info: Map[Variable[_], VariableInfo]) { /** * Create the initial induced graph from a set of factors. */ def this(factors: Traversable[Factor[_]]) = this(VEGraph.makeInfo(factors)) /** * Returns the new graph after eliminating the given variable. This includes a factor involving all the * variables appearing in a factor with the eliminated variable, and excludes all factors in which the * eliminated variable appears. */ def eliminate(variable: Variable[_]): VEGraph = { val VariableInfo(oldFactors, allVars) = info(variable) val newFactor = AbstractFactor((allVars - variable).toList) var newInfo = VEGraph.makeInfo(info, List(newFactor), oldFactors) def removeNeighbor(neighbor: Variable[_]) = { val VariableInfo(oldNeighborFactors, oldNeighborNeighbors) = newInfo(neighbor) newInfo += neighbor -> VariableInfo(oldNeighborFactors, oldNeighborNeighbors - variable) } newInfo(variable).neighbors foreach (removeNeighbor(_)) (new VEGraph(newInfo)) } /** * Returns the elimination score, which is the increase in cost between the new factor involving the * variable and the existing factors (we want to minimize score). */ def score(variable: Variable[_]): Double = { val VariableInfo(oldFactors, allVars) = info(variable) val oldCost = VEGraph.cost(oldFactors) val newFactor = AbstractFactor((allVars - variable).toList) val newCost = VEGraph.cost(newFactor) newCost - oldCost //Experimental: what if we just consider the new cost? //newCost } } object VEGraph { /** * The cost of a factor is the number of entries in it, which is the product of the ranges of its variables. */ def cost(factor: AbstractFactor): Double = (1.0 /: factor.variables)(_ * _.size.toDouble) /** * The cost of a set of factors is the sum of the costs of the individual factors. */ def cost(factors: Traversable[AbstractFactor]): Double = (0.0 /: factors)(_ + cost(_)) private def makeInfo(factors: Traversable[Factor[_]]): Map[Variable[_], VariableInfo] = makeInfo(Map(), factors map ((f: Factor[_]) => AbstractFactor(f.variables)), List()) private def makeInfo(initialInfo: Map[Variable[_], VariableInfo], factorsToAdd: Traversable[AbstractFactor], factorsToRemove: Traversable[AbstractFactor]): Map[Variable[_], VariableInfo] = { var info: Map[Variable[_], VariableInfo] = initialInfo def addFactorToVariable(factor: AbstractFactor, variable: Variable[_]): Unit = { val oldInfo = info.getOrElse(variable, VariableInfo(Set(), Set())) val newFactors = oldInfo.factors + factor val newNeighbors = oldInfo.neighbors ++ factor.variables info += variable -> VariableInfo(newFactors, newNeighbors) } def removeFactorFromVariable(factor: AbstractFactor, variable: Variable[_]) = { val oldInfo = info.getOrElse(variable, VariableInfo(Set(), Set())) val newFactors = oldInfo.factors - factor info += variable -> VariableInfo(newFactors, oldInfo.neighbors) } def addFactor(factor: AbstractFactor) = factor.variables foreach (addFactorToVariable(factor, _)) def removeFactor(factor: AbstractFactor) = factor.variables foreach (removeFactorFromVariable(factor, _)) factorsToAdd foreach (addFactor(_)) factorsToRemove foreach (removeFactor(_)) info } }
bruttenberg/figaro
Figaro/src/main/scala/com/cra/figaro/algorithm/factored/VEGraph.scala
Scala
bsd-3-clause
4,619
package com.outr.arango.api.model import io.circe.Json case class GraphEdgeDefinition(collection: Option[String] = None, from: Option[List[String]] = None, to: Option[List[String]] = None)
outr/arangodb-scala
api/src/main/scala/com/outr/arango/api/model/GraphEdgeDefinition.scala
Scala
mit
253
package scdbpf import org.scalatest.{WordSpec, Matchers} import Experimental._ class ExperimentalSpec extends WordSpec with Matchers { "PreviewEffect" should { "be constructible from resource" in { val eff = PreviewEffect(0x12345678, "road_puzzlepiece002") import rapture.core.strategy.throwExceptions BufferedEntry(Tgi.Blank.copy(Tgi.EffDir), eff, compressed = true).toRawEntry } } }
memo33/scdbpf
src/test/scala/scdbpf/ExperimentalSpec.scala
Scala
mit
418
package skarn.push import skarn.push.PushRequestHandleActorProtocol.Ex import org.scalatest.{MustMatchers, WordSpecLike} /** * Created by yusuke on 15/07/10. */ class GCMJsonTest extends WordSpecLike with MustMatchers { "GCMentity" must { "JSONシリアライズできる" in { import spray.json._ import GCMProtocol._ import GCMJsonProtocol._ GCMEntity(Vector("deviceToken"), None, data = Some(List(Ex("message", """{"message_text":"message"}""")))).toJson.prettyPrint must be( """|{ | "registration_ids": ["deviceToken"], | "data": { | "message": "{\\"message_text\\":\\"message\\"}" | } |}""".stripMargin) } } "List[Ex]" must { import spray.json._ import PushRequestHandleActorJsonFormat._ import PushRequestHandleActorProtocol._ "serialize to single object" in { List(Ex("a", "b"), Ex("c", "d"), Ex("e", "f")).toJson.prettyPrint must be( """|{ | "e": "f", | "c": "d", | "a": "b" |}""".stripMargin) } "parse array of object with `key` and `value` fields" in { """ |[ | {"key": "e", "value": "f"}, | {"key": "c", "value": "d"}, | {"key": "a", "value": "b"} |] """.stripMargin.parseJson.convertTo[List[Ex]] must be(List(Ex("e", "f"), Ex("c", "d"), Ex("a", "b"))) } } }
trifort/skarn
src/test/scala/skarn/push/GCMJsonTest.scala
Scala
mit
1,419
package com.twitter.finatra.thrift import com.twitter.finagle.ThriftMux import com.twitter.finagle.stats.NullStatsReceiver import com.twitter.finagle.thrift.ClientId import com.twitter.finagle.thrift.service.Filterable import com.twitter.finagle.thrift.service.MethodPerEndpointBuilder import com.twitter.finagle.thrift.service.ServicePerEndpointBuilder import com.twitter.inject.server.EmbeddedTwitterServer import com.twitter.inject.server.PortUtils import com.twitter.inject.server.Ports import com.twitter.inject.server.info import com.twitter.scrooge.AsClosableMethodName import com.twitter.util.Await import com.twitter.util.Closable import com.twitter.util.Duration import com.twitter.util.Future import com.twitter.util.Promise import scala.reflect.ClassTag trait ThriftClient { self: EmbeddedTwitterServer => /* Abstract */ /** * Underlying Embedded TwitterServer exposed as a [[com.twitter.inject.server.Ports]] * @return the underlying TwitterServer as a [[com.twitter.inject.server.Ports]]. */ def twitterServer: Ports /** * The expected flag that sets the external port for serving the underlying Thrift service. * @return a String representing the Thrift port flag. * @see [[com.twitter.app.Flag]] */ def thriftPortFlag: String = "thrift.port" /* Overrides */ /** Logs the external thrift host and port of the underlying embedded TwitterServer */ override protected[twitter] def logStartup(): Unit = { self.logStartup() info(s"ExternalThrift -> thrift://$externalThriftHostAndPort\\n", disableLogging) } /** * Adds the [[thriftPortFlag]] with a value pointing to the ephemeral loopback address to * the list of flags to be passed to the underlying server. * @see [[PortUtils.ephemeralLoopback]] * * @note this flag is also added in the EmbeddedThriftServer constructor but needs to be added * here for when this trait is mixed into an EmbeddedTwitterServer or an EmbeddedHttpServer. * The flags are de-duped prior to starting the underlying server. */ override protected[twitter] def combineArgs(): Array[String] = { s"-$thriftPortFlag=${PortUtils.ephemeralLoopback}" +: self.combineArgs } /* Public */ /** * The base ThriftMux.Client to the underlying Embedded TwitterServer. */ def thriftMuxClient(clientId: String): ThriftMux.Client = { self.start() if (clientId != null && clientId.nonEmpty) { ThriftMux.client .withStatsReceiver(NullStatsReceiver) .withClientId(ClientId(clientId)) } else { ThriftMux.client .withStatsReceiver(NullStatsReceiver) } } def thriftMuxClient: ThriftMux.Client = thriftMuxClient(null) /** * Host and bound external Thrift port combination as a String, e.g., 127.0.0.1:9990. */ lazy val externalThriftHostAndPort: String = PortUtils.loopbackAddressForPort(thriftExternalPort) /* * We need to wait on the external ports to be bound, which can happen after a server * is started and marked as healthy. */ private[this] val ready: Promise[Unit] = EmbeddedTwitterServer.isPortReady( twitterServer, twitterServer.thriftPort.isDefined && twitterServer.thriftPort.get != 0) /** * Bound external Thrift port for the Embedded TwitterServer. * @return the bound external port on which the Embedded TwitterServer is serving the Thrift service. */ def thriftExternalPort: Int = { self.start() // need to wait until we know the ports are bound Await.ready(ready, Duration.fromSeconds(5)) twitterServer.thriftPort.get } /** * Builds a Thrift client to the EmbeddedTwitterServer in the form of the higher-kinded client type * or the method-per-endpoint type, e.g., * * {{{ * val client: MyService.MethodPerEndpoint = * server.thriftClient[MyService.MethodPerEndpoint](clientId = "client123") * * ... or ... * * val client: MyService.MethodPerEndpoint = * server.thriftClient[MyService.MethodPerEndpoint](clientId = "client123") * }}} * * @param clientId the client Id to use in creating the thrift client. * * @return a Finagle Thrift client in the given form. * @see [[com.twitter.finagle.ThriftMux.Client.build(dest: String)]] * @see [[https://twitter.github.io/scrooge/Finagle.html#id1 Scrooge Finagle Integration - MethodPerEndpoint]] */ def thriftClient[ThriftService: ClassTag]( clientId: String = null ): ThriftService = ensureClosedOnExit { thriftMuxClient(clientId) .build[ThriftService](externalThriftHostAndPort) } /** * Builds a Thrift client to the EmbeddedTwitterServer in the form of a service-per-endpoint or a * "Req/Rep" service-per-endpoint. * * {{{ * val client: MyService.ServicePerEndpoint = * server.servicePerEndpoint[MyService.ServicePerEndpoint](clientId = "client123") * * ... or ... * * val client: [MyService.ReqRepServicePerEndpoint = * server.servicePerEndpoint[MyService.ReqRepServicePerEndpoint](clientId = "client123") * }}} * * @param clientId the client Id to use in creating the thrift client. * * @return a Finagle Thrift client in the given form. * @see [[com.twitter.finagle.ThriftMux.Client.servicePerEndpoint]] * @see [[https://twitter.github.io/scrooge/Finagle.html#id2 Scrooge Finagle Integration - ServicePerEndpoint]] * @see [[https://twitter.github.io/scrooge/Finagle.html#id3 Scrooge Finagle Integration - ReqRepServicePerEndpoint]] */ def servicePerEndpoint[ServicePerEndpoint <: Filterable[ServicePerEndpoint]]( clientId: String = null )( implicit builder: ServicePerEndpointBuilder[ServicePerEndpoint] ): ServicePerEndpoint = ensureClosedOnExit { val label = if (clientId != null) clientId else "" thriftMuxClient(clientId) .servicePerEndpoint[ServicePerEndpoint](externalThriftHostAndPort, label) } /** * Builds a Thrift client to the EmbeddedTwitterServer in the form of a `MethodPerEndpoint` which * wraps a given `ServicePerEndpoint`. Converts the `ServicePerEndpoint` to a * `MethodPerEndpoint` interface, e.g., `MyService.MethodPerEndpoint`. * * {{{ * val servicePerEndpoint = MyService.ServicePerEndpoint = * server.servicePerEndpoint[MyService.ServicePerEndpoint](clientId = "client123") * val client: MyService.MethodPerEndpoint = * server.methodPerEndpoint[MyService.ServicePerEndpoint, MyService.MethodPerEndpoint](servicePerEndpoint) * }}} * * This is useful if you want to be able to filter calls to the Thrift service but only want to * expose or interact with the RPC-style (method-per-endpoint) client interface. * * @param servicePerEndpoint the service-per-endpoint to convert to a method-per-endpoint. * * @return a Finagle Thrift client in the `MyService.MethodPerEndpoint` form of a * method-per-endpoint. * @see [[com.twitter.finagle.ThriftMux.Client.methodPerEndpoint]] * @see [[https://twitter.github.io/scrooge/Finagle.html#id1 Scrooge Finagle Integration - MethodPerEndpoint]] */ def methodPerEndpoint[ServicePerEndpoint, MethodPerEndpoint]( servicePerEndpoint: ServicePerEndpoint )( implicit builder: MethodPerEndpointBuilder[ServicePerEndpoint, MethodPerEndpoint] ): MethodPerEndpoint = ensureClosedOnExit { ThriftMux.Client .methodPerEndpoint[ServicePerEndpoint, MethodPerEndpoint](servicePerEndpoint) } private[this] def ensureClosedOnExit[T](f: => T): T = { val clnt = f closeOnExit(asClosableThriftService(clnt)) clnt } // copied from our ThriftClientModuleTrait because we can't depend on it here without // creating a circular dependency private[this] def asClosableThriftService(thriftService: Any): Closable = { val close = thriftService match { case closable: Closable => closable case _ => val asClosableMethodOpt = thriftService.getClass.getMethods .find(_.getName == AsClosableMethodName) asClosableMethodOpt match { case Some(method) => try { method.invoke(thriftService).asInstanceOf[Closable] } catch { case _: java.lang.ClassCastException => System.err.println( s"Unable to cast result of ${AsClosableMethodName} invocation to a " + s"${Closable.getClass.getName.dropRight(1)} type." ) Closable.nop } case _ => System.err.println( s"${AsClosableMethodName} not found for instance: ${thriftService.getClass.getName}" ) Closable.nop } } Closable.all( Closable.make { _ => info(s"Closing Embedded ThriftClient '$thriftService''", disableLogging) Future.Done }, close ) } }
twitter/finatra
thrift/src/test/scala/com/twitter/finatra/thrift/ThriftClient.scala
Scala
apache-2.0
8,886
package org.scalameta package build import java.lang.ProcessBuilder._ import java.nio.file._ import java.nio.file.Files._ import scala.collection.JavaConverters._ import sbt._ import sbt.Keys._ import sbt.plugins._ object Build extends AutoPlugin { override def requires: Plugins = JvmPlugin override def trigger: PluginTrigger = allRequirements import autoImport._ object autoImport { trait BenchSuite { def initCommands: List[String] = List( "bench/clean", "wow " + Versions.LatestScala212 ) def metacpBenches: List[String] def metacpCommands: List[String] = { if (metacpBenches.isEmpty) Nil else List("bench/jmh:run " + metacpBenches.mkString(" ")) } def scalacBenches: List[String] def scalacCommands: List[String] = { if (scalacBenches.isEmpty) Nil else List("bench/jmh:run " + scalacBenches.mkString(" ")) } def scalametaBenches: List[String] def scalametaCommands: List[String] = { if (scalametaBenches.isEmpty) Nil else List("bench/jmh:run " + scalametaBenches.mkString(" ")) } final def command: String = { val benchCommands = metacpCommands ++ scalacCommands ++ scalametaCommands (initCommands ++ benchCommands).map(c => s";$c ").mkString("") } } object benchLSP extends BenchSuite { def metacpBenches = List("Metacp") def scalacBenches = List("ScalacBaseline") def scalametaBenches = List("ScalametaBaseline") } object benchAll extends BenchSuite { def metacpBenches = List("Metacp") def scalacBenches = List("Scalac") def scalametaBenches = List("Scalameta") } object benchQuick extends BenchSuite { def metacpBenches = List("Metacp") def scalacBenches = Nil def scalametaBenches = List("ScalametaBaseline") } // https://stackoverflow.com/questions/41229451/how-to-disable-slow-tagged-scalatests-by-default-allow-execution-with-option lazy val Fast = config("fast").extend(Test) lazy val Slow = config("slow").extend(Test) lazy val All = config("all").extend(Test) val javacSemanticdbDirectory = SettingKey[File]( "javacSemanticdbDirectory", "location of semanticdb produced by semanticdb-javac") } }
olafurpg/scalameta
project/Build.scala
Scala
bsd-3-clause
2,321
package com.github.andyglow.relaxed import argonaut._ import Argonaut._ case class ArgonautSupport(json: Json) extends Reader with ReaderSupport[Json] { def isNull: (Json) => Boolean = _.isNull def ctor: (Json) => ArgonautSupport = ArgonautSupport.apply def get(field: String): Option[Json] = json.field(field) override def opt[T: Reads](field: String): Option[T] = { implicit val _ = implicitly[Reads[T]].asInstanceOf[DecodeJson[T]] json.field(field) flatMap (_.as[T].toOption) } override def optOpt[T: Reads](field: String): Option[Option[T]] = { implicit val _ = implicitly[Reads[T]].asInstanceOf[DecodeJson[T]] getOpt(field) map {_ flatMap {_.as[T].toOption}} } } object ArgonautSupport { implicit def readsImpl[T](implicit x: DecodeJson[T]): Reads[T] = new DecodeJson[T]() with Reads[T] { override def decode(c: HCursor): DecodeResult[T] = x decode c } }
andyglow/relaxed-json-update
argonaut/src/main/scala-2.11/com/github/andyglow/relaxed/ArgonautSupport.scala
Scala
lgpl-3.0
903
package org.jetbrains.plugins.scala.failed.resolve import org.jetbrains.plugins.scala.PerfCycleTests import org.junit.experimental.categories.Category /** * @author Nikolay.Tropin */ @Category(Array(classOf[PerfCycleTests])) class JavaFieldResolveTest extends FailedResolveTest("javaField") { def testScl6925() = doTest() def testScl12413() = doTest() def testScl12630() = doTest() }
jastice/intellij-scala
scala/scala-impl/test/org/jetbrains/plugins/scala/failed/resolve/JavaFieldResolveTest.scala
Scala
apache-2.0
396
/** * This file is part of mycollab-web. * * mycollab-web is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * mycollab-web is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with mycollab-web. If not, see <http://www.gnu.org/licenses/>. */ package com.esofthead.mycollab.module.project.view.message import com.esofthead.mycollab.common.UrlTokenizer import com.esofthead.mycollab.core.arguments.SetSearchField import com.esofthead.mycollab.eventmanager.EventBusFactory import com.esofthead.mycollab.module.project.domain.criteria.MessageSearchCriteria import com.esofthead.mycollab.module.project.events.ProjectEvent import com.esofthead.mycollab.module.project.view.ProjectUrlResolver import com.esofthead.mycollab.module.project.view.parameters.{MessageScreenData, ProjectScreenData} import com.esofthead.mycollab.vaadin.mvp.PageActionChain /** * @author MyCollab Ltd * @since 5.0.9 */ class MessageUrlResolver extends ProjectUrlResolver { this.addSubResolver("list", new ListUrlResolver) this.addSubResolver("preview", new PreviewUrlResolver) private class ListUrlResolver extends ProjectUrlResolver { protected override def handlePage(params: String*) { val projectId = new UrlTokenizer(params(0)).getInt val messageSearchCriteria = new MessageSearchCriteria messageSearchCriteria.setProjectids(new SetSearchField[Integer](projectId)) val chain = new PageActionChain(new ProjectScreenData.Goto(projectId), new MessageScreenData.Search(messageSearchCriteria)) EventBusFactory.getInstance.post(new ProjectEvent.GotoMyProject(this, chain)) } } private class PreviewUrlResolver extends ProjectUrlResolver { protected override def handlePage(params: String*) { val token = new UrlTokenizer(params(0)) val projectId = token.getInt val messageId = token.getInt val chain = new PageActionChain(new ProjectScreenData.Goto(projectId), new MessageScreenData.Read(messageId)) EventBusFactory.getInstance.post(new ProjectEvent.GotoMyProject(this, chain)) } } }
uniteddiversity/mycollab
mycollab-web/src/main/scala/com/esofthead/mycollab/module/project/view/message/MessageUrlResolver.scala
Scala
agpl-3.0
2,622
/** * This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]]. */ // DO NOT EDIT MANUALLY package sbt.protocol.testing final class TestStringEvent private ( val value: String) extends sbt.protocol.testing.TestMessage() with Serializable { override def equals(o: Any): Boolean = o match { case x: TestStringEvent => (this.value == x.value) case _ => false } override def hashCode: Int = { 37 * (37 * (17 + "TestStringEvent".##) + value.##) } override def toString: String = { value } protected[this] def copy(value: String = value): TestStringEvent = { new TestStringEvent(value) } def withValue(value: String): TestStringEvent = { copy(value = value) } } object TestStringEvent { def apply(value: String): TestStringEvent = new TestStringEvent(value) }
Duhemm/sbt
testing/src/main/contraband-scala/sbt/protocol/testing/TestStringEvent.scala
Scala
bsd-3-clause
845
package com.gmail.at.pukanito.view.metadata.attributedefinition class UpdateAttributeDefinition { }
pukanito/bigdatanalysis
src/main/scala/com/gmail/at/pukanito/view/metadata/attributedefinition/UpdateAttributeDefinition.scala
Scala
gpl-3.0
102
// Project: angulate2 (https://github.com/jokade/angulate2) // Description: // Copyright (c) 2017 Johannes.Kastner <[email protected]> // Distributed under the MIT License (see included LICENSE file) package angulate2.router import de.surfice.smacrotools.createJS import rxjs.ValOrObs @createJS trait Resolve[T] { def resolve(route: ActivatedRouteSnapshot, state: RouterStateSnapshot): ValOrObs[T] }
jokade/angulate2
bindings/src/main/scala/angulate2/router/interfaces.scala
Scala
mit
425
/* * Copyright 2016 The BigDL Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intel.analytics.bigdl.nn import com.intel.analytics.bigdl.tensor.Tensor import com.intel.analytics.bigdl._ import com.intel.analytics.bigdl.utils.serializer.ModuleSerializationTest import org.scalatest.{FlatSpec, Matchers} import scala.util.Random @com.intel.analytics.bigdl.tags.Parallel class SpatialCrossMapLRNSpec extends FlatSpec with Matchers { private def referenceLRNForwardAcrossChannels (input: Tensor[Double], alpha: Double, beta: Double, size: Int): Tensor[Double] = { val output = Tensor[Double]() output.resizeAs(input) val batch = input.size(1) val channel = input.size(2) val height = input.size(3) val width = input.size(4) for (n <- 0 until batch) { for (c <- 0 until channel) { for (h <- 0 until height) { for (w <- 0 until width) { var cStart = c - (size - 1) / 2 val cEnd = math.min(cStart + size, channel) cStart = math.max(cStart, 0) var scale = 1.0 for (i <- cStart until cEnd) { val value = input.valueAt(n + 1, i + 1, h + 1, w + 1) scale += value * value * alpha / size } output.setValue(n + 1, c + 1, h + 1, w + 1, input.valueAt(n + 1, c + 1, h + 1, w + 1) * math.pow(scale, -beta)) } } } } output } private def referenceLRNForwardAcrossChannels (input: Tensor[Float], alpha: Float, beta: Float, size: Int): Tensor[Float] = { val output = Tensor[Float]() output.resizeAs(input) val batch = input.size(1) val channel = input.size(2) val height = input.size(3) val width = input.size(4) for (n <- 0 until batch) { for (c <- 0 until channel) { for (h <- 0 until height) { for (w <- 0 until width) { var cStart = c - (size - 1) / 2 val cEnd = math.min(cStart + size, channel) cStart = math.max(cStart, 0) var scale = 1.0f for (i <- cStart until cEnd) { val value = input.valueAt(n + 1, i + 1, h + 1, w + 1) scale += value * value * alpha / size } output.setValue(n + 1, c + 1, h + 1, w + 1, input.valueAt(n + 1, c + 1, h + 1, w + 1) * math.pow(scale, -beta).toFloat) } } } } output } "LocalNormalizationAcrossChannels Forward Double" should "be correct" in { val layer = new SpatialCrossMapLRN[Double](5, 0.0001, 0.75, 1.0) val input = Tensor[Double](2, 7, 3, 3) input.rand() val outputRef = referenceLRNForwardAcrossChannels(input, 0.0001, 0.75, 5) layer.forward(input) val output = layer.forward(input) output should be(outputRef) } "LocalNormalizationAcrossChannels Backward Double" should "be correct" in { val layer = new SpatialCrossMapLRN[Double](5, 0.0001, 0.75, 1.0) val input = Tensor[Double](2, 7, 3, 3) input.rand() val checker = new GradientChecker(1e-2, 1e-2) checker.checkLayer(layer, input) should be(true) } "LocalNormalizationAcrossChannels Backward Float" should "be correct" in { val layer = new SpatialCrossMapLRN[Float](5, 0.0001, 0.75, 1.0) val input = Tensor[Float](2, 7, 3, 3) input.rand() val checker = new GradientChecker(1e-2, 1e-2) checker.checkLayer[Float](layer, input) should be(true) } "LocalNormalizationAcrossChannels with Large Region Backward Double" should "be correct" in { val layer = new SpatialCrossMapLRN[Double](15, 0.0001, 0.75, 1.0) val input = Tensor[Double](2, 7, 3, 3) input.rand() val checker = new GradientChecker(1e-2, 1e-2) checker.checkLayer(layer, input) should be(true) } "LocalNormalizationAcrossChannels with Large Region Backward Float" should "be correct" in { val layer = new SpatialCrossMapLRN[Float](15, 0.0001, 0.75, 1.0) val input = Tensor[Float](2, 7, 3, 3) input.rand() val checker = new GradientChecker(1e-2, 1e-2) checker.checkLayer(layer, input) should be(true) } "LocalNormalizationAcrossChannels with Large Region Forward Double" should "be correct" in { val layer = new SpatialCrossMapLRN[Double](15, 0.0001, 0.75, 1.0) val input = Tensor[Double](2, 7, 3, 3) input.rand() val outputRef = referenceLRNForwardAcrossChannels(input, 0.0001, 0.75, 15) val output = layer.forward(input) output should be(outputRef) } "LocalNormalizationAcrossChannels Forward Float" should "be correct" in { val layer = new SpatialCrossMapLRN[Float](5, 0.0001f, 0.75f, 1.0f) val input = Tensor[Float](2, 7, 3, 3) input.rand() val outputRef = referenceLRNForwardAcrossChannels(input, 0.0001f, 0.75f, 5) val output = layer.forward(input) output should be(outputRef) } "LocalNormalizationAcrossChannels with Large Region Forward Float" should "be correct" in { val layer = new SpatialCrossMapLRN[Float](15, 0.0001f, 0.75f, 1.0f) val input = Tensor[Float](2, 7, 3, 3) input.rand() val outputRef = referenceLRNForwardAcrossChannels(input, 0.0001f, 0.75f, 15) val output = layer.forward(input) output should be(outputRef) } } class SpatialCrossMapLRNSerialTest extends ModuleSerializationTest { override def test(): Unit = { val spatialCrossMapLRN = SpatialCrossMapLRN[Float](5, 0.01, 0.75, 1.0). setName("spatialCrossMapLRN") val input = Tensor[Float](2, 2, 2, 2).apply1( e => Random.nextFloat()) runSerializationTest(spatialCrossMapLRN, input) } }
yiheng/BigDL
spark/dl/src/test/scala/com/intel/analytics/bigdl/nn/SpatialCrossMapLRNSpec.scala
Scala
apache-2.0
6,119
package com.twitter.finagle.serverset2 import com.twitter.finagle.common.io.JsonCodec import com.twitter.finagle.common.zookeeper.ServerSets import com.twitter.conversions.DurationOps._ import com.twitter.finagle.serverset2.ServiceDiscoverer.ClientHealth import com.twitter.finagle.serverset2.ZkOp.{GetData, GetChildrenWatch, ExistsWatch} import com.twitter.finagle.stats.{NullStatsReceiver, StatsReceiver} import com.twitter.finagle.serverset2.client._ import com.twitter.finagle.util.DefaultTimer import com.twitter.io.Buf import com.twitter.io.Buf.ByteArray import com.twitter.thrift import com.twitter.thrift.ServiceInstance import com.twitter.util._ import org.mockito.Mockito.when import org.scalatest.concurrent.{Eventually, IntegrationPatience} import org.scalatest.FunSuite import org.scalatestplus.mockito.MockitoSugar import java.util.concurrent.atomic.AtomicReference class ServiceDiscovererTest extends FunSuite with MockitoSugar with Eventually with IntegrationPatience { class ServiceDiscovererWithExposedCache( varZkSession: Var[ZkSession], statsReceiver: StatsReceiver, timer: Timer = DefaultTimer) extends ServiceDiscoverer(varZkSession, statsReceiver, ForeverEpoch, timer) { val cache = new ZkEntryCache("/foo/bar", NullStatsReceiver) cache.setSession(varZkSession.sample) override val entriesOf = Memoize { path: String => entitiesOf(path, cache, NullStatsReceiver.stat("meh"), ServiceDiscoverer.EndpointGlob) } } def ep(port: Int) = Endpoint(Array(null), "localhost", port, Int.MinValue, Endpoint.Status.Alive, port.toString) val ForeverEpoch = Epoch(Duration.Top, new MockTimer) val retryStream = RetryStream() def createEntry(id: Int): Buf = { val jsonCodec = JsonCodec.create(classOf[ServiceInstance]) val serviceInstance = new ServiceInstance() serviceInstance.setShard(1) serviceInstance.setStatus(thrift.Status.ALIVE) serviceInstance.setServiceEndpoint(new thrift.Endpoint(s"$id.0.0.12", 32123)) ByteArray.Owned(ServerSets.serializeServiceInstance(serviceInstance, jsonCodec)) } test("ServiceDiscoverer.zipWithWeights") { val port1 = 80 // not bound val port2 = 53 // ditto val ents = Seq[Entry](ep(port1), ep(port2), ep(3), ep(4)) val v1 = Vector( Seq( Descriptor(Selector.Host("localhost", port1), 1.1, 1), Descriptor(Selector.Host("localhost", port2), 1.4, 1), Descriptor(Selector.Member("3"), 3.1, 1) ) ) val v2 = Vector(Seq(Descriptor(Selector.Member(port2.toString), 2.0, 1))) val vecs = Seq(v1, v2) assert( ServiceDiscoverer .zipWithWeights(ents, vecs.toSet) .toSet == Set(ep(port1) -> 1.1, ep(port2) -> 2.8, ep(3) -> 3.1, ep(4) -> 1.0) ) } test("New observation do not cause reads; entries are cached") { implicit val timer = new MockTimer val watchedZk = Watched(new OpqueueZkReader(), Var(WatchState.Pending)) val sd = new ServiceDiscoverer( Var.value(new ZkSession(retryStream, watchedZk, NullStatsReceiver)), NullStatsReceiver, ForeverEpoch, timer ) val f1 = sd("/foo/bar").states.filter(_ != Activity.Pending).toFuture() val ew @ ExistsWatch("/foo/bar") = watchedZk.value.opq(0) val ewwatchv = Var[WatchState](WatchState.Pending) ew.res() = Return(Watched(Some(Data.Stat(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)), ewwatchv)) val gw @ GetChildrenWatch("/foo/bar") = watchedZk.value.opq(1) gw.res() = Return(Watched(Node.Children(Seq("member_1"), null), Var.value(WatchState.Pending))) assert(!f1.isDefined) val gd @ GetData("/foo/bar/member_1") = watchedZk.value.opq(2) gd.res() = Return(Node.Data(None, null)) assert(f1.isDefined) val f2 = sd("/foo/bar").states.filter(_ != Activity.Pending).toFuture() assert(f2.isDefined) } test("Removed entries are removed from cache") { implicit val timer = new MockTimer val watchedZk = Watched(new OpqueueZkReader(), Var(WatchState.Pending)) val sd = new ServiceDiscovererWithExposedCache( Var.value(new ZkSession(retryStream, watchedZk, NullStatsReceiver)), NullStatsReceiver ) val f1 = sd("/foo/bar").states.filter(_ != Activity.Pending).toFuture() val cache = sd.cache val ew @ ExistsWatch("/foo/bar") = watchedZk.value.opq(0) val ewwatchv = Var[WatchState](WatchState.Pending) ew.res() = Return(Watched(Some(Data.Stat(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)), ewwatchv)) assert(cache.keys == Set.empty) val gw @ GetChildrenWatch("/foo/bar") = watchedZk.value.opq(1) gw.res() = Return( Watched( Node.Children(Seq("member_1"), null), Var.value(new WatchState.Determined(NodeEvent.Created)) ) ) val gd @ GetData("/foo/bar/member_1") = watchedZk.value.opq(2) gd.res() = Return(Node.Data(None, null)) assert(cache.keys == Set("member_1")) val gw2 @ GetChildrenWatch("/foo/bar") = watchedZk.value.opq(3) gw2.res() = Return( Watched( Node.Children(Seq.empty, null), Var.value(new WatchState.Determined(NodeEvent.Created)) ) ) assert(cache.keys == Set.empty) val gw3 @ GetChildrenWatch("/foo/bar") = watchedZk.value.opq(4) gw3.res() = Return( Watched( Node.Children(Seq("member_2"), null), Var.value(new WatchState.Determined(NodeEvent.Created)) ) ) val gd2 @ GetData("/foo/bar/member_2") = watchedZk.value.opq(5) gd2.res() = Return(Node.Data(None, null)) assert(cache.keys == Set("member_2")) val gw4 @ GetChildrenWatch("/foo/bar") = watchedZk.value.opq(6) gw4.res() = Return( Watched( Node.Children(Seq("member_3", "member_4"), null), Var.value(new WatchState.Determined(NodeEvent.Created)) ) ) val gd3 @ GetData("/foo/bar/member_3") = watchedZk.value.opq(7) gd3.res() = Return(Node.Data(None, null)) val gd4 @ GetData("/foo/bar/member_4") = watchedZk.value.opq(8) gd4.res() = Return(Node.Data(None, null)) assert(cache.keys == Set("member_3", "member_4")) } test("If all reads fail the serverset is in Failed state") { implicit val timer = new MockTimer val watchedZk = Watched(new OpqueueZkReader(), Var(WatchState.Pending)) val sd = new ServiceDiscovererWithExposedCache( Var.value(new ZkSession(retryStream, watchedZk, NullStatsReceiver)), NullStatsReceiver, timer ) val f1 = sd("/foo/bar").states.filter(_ != Activity.Pending).toFuture() val cache = sd.cache val ew @ ExistsWatch("/foo/bar") = watchedZk.value.opq(0) val ewwatchv = Var[WatchState](WatchState.Pending) ew.res() = Return(Watched(Some(Data.Stat(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)), ewwatchv)) val gw @ GetChildrenWatch("/foo/bar") = watchedZk.value.opq(1) gw.res() = Return( Watched( Node.Children(Seq("member_1", "member_2"), null), Var.value(new WatchState.Determined(NodeEvent.Created)) ) ) val gd @ GetData("/foo/bar/member_1") = watchedZk.value.opq(2) gd.res() = Throw(new Exception) val gd2 @ GetData("/foo/bar/member_2") = watchedZk.value.opq(3) gd2.res() = Throw(new Exception) Await.result(f1, 1.second) match { case Activity.Failed(ServiceDiscoverer.EntryLookupFailureException) => // great! case other => fail(s"Expected entry lookup exception. Received $other") } } test("Partial failures are successful and retried") { Time.withCurrentTimeFrozen { timeControl => implicit val timer = new MockTimer val watchedZk = Watched(new OpqueueZkReader(), Var(WatchState.Pending)) val sd = new ServiceDiscovererWithExposedCache( Var.value(new ZkSession(retryStream, watchedZk, NullStatsReceiver)), NullStatsReceiver, timer ) val currentValue = new AtomicReference[Activity.State[Seq[(Entry, Double)]]] // Test will become flaky if we don't capture this as the Closeable will be occasionally // closed by the CollectCloseables thread val holdRef = sd("/foo/bar").states.filter(_ != Activity.Pending).register(Witness(currentValue)) val cache = sd.cache val ew @ ExistsWatch("/foo/bar") = watchedZk.value.opq(0) val ewwatchv = Var[WatchState](WatchState.Pending) ew.res() = Return(Watched(Some(Data.Stat(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)), ewwatchv)) val gw @ GetChildrenWatch("/foo/bar") = watchedZk.value.opq(1) gw.res() = Return( Watched(Node.Children(Seq("member_1", "member_2"), null), Var.value(WatchState.Pending)) ) val gd @ GetData("/foo/bar/member_1") = watchedZk.value.opq(2) gd.res() = Throw(new Exception) val gd2 @ GetData("/foo/bar/member_2") = watchedZk.value.opq(3) gd2.res() = Return(Node.Data(Some(createEntry(1)), null)) // Should succeed with only 1 resolved value eventually { currentValue.get match { case Activity.Ok(seq) => assert(seq.size == 1) // member_2 has good data case other => fail(s"Expected entry lookup exception. Received $other") } } // member_1 will be requeried for eventually eventually { timeControl.advance(2.minutes) timer.tick() val gd3 @ GetData("/foo/bar/member_1") = watchedZk.value.opq(4) gd3.res() = Return(Node.Data(Some(createEntry(2)), null)) } // Then we should see 2 values in the serverset currentValue.get match { case Activity.Ok(seq) => assert(seq.size == 2) // both have good values now case other => fail(s"Expected entry lookup exception. Received $other") } } } test("Consecutive observations do not cause reads; entries are cached") { implicit val timer = new MockTimer val watchedZk = Watched(new OpqueueZkReader(), Var(WatchState.Pending)) val sd = new ServiceDiscoverer( Var.value(new ZkSession(retryStream, watchedZk, NullStatsReceiver)), NullStatsReceiver, ForeverEpoch, timer ) val f1 = sd("/foo/bar").states.filter(_ != Activity.Pending).toFuture() val f2 = sd("/foo/bar").states.filter(_ != Activity.Pending).toFuture() val ew @ ExistsWatch("/foo/bar") = watchedZk.value.opq(0) val ewwatchv = Var[WatchState](WatchState.Pending) ew.res() = Return(Watched(Some(Data.Stat(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)), ewwatchv)) val gw @ GetChildrenWatch("/foo/bar") = watchedZk.value.opq(1) gw.res() = Return(Watched(Node.Children(Seq("member_1"), null), Var.value(WatchState.Pending))) assert(!f1.isDefined) assert(!f2.isDefined) val gd @ GetData("/foo/bar/member_1") = watchedZk.value.opq(2) gd.res() = Return(Node.Data(None, null)) // ensure that we are hitting the cache: even though we called // GetData only once, the two observations are fulfilled. assert(f1.isDefined) assert(f2.isDefined) } test("New sessions are used") { implicit val timer = new MockTimer val fakeWatchedZk = Watched(new OpqueueZkReader(), Var(WatchState.Pending)) val watchedZk = Watched(new OpqueueZkReader(), Var(WatchState.Pending)) val watchedZkVar = new ReadWriteVar(new ZkSession(retryStream, fakeWatchedZk, NullStatsReceiver)) val sd = new ServiceDiscoverer(watchedZkVar, NullStatsReceiver, ForeverEpoch, timer) val f1 = sd("/foo/bar").states.filter(_ != Activity.Pending).toFuture() val f2 = sd("/foo/bar").states.filter(_ != Activity.Pending).toFuture() watchedZkVar.update(new ZkSession(retryStream, watchedZk, NullStatsReceiver)) val ew @ ExistsWatch("/foo/bar") = watchedZk.value.opq(0) val ewwatchv = Var[WatchState](WatchState.Pending) ew.res() = Return(Watched(Some(Data.Stat(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)), ewwatchv)) val gw @ GetChildrenWatch("/foo/bar") = watchedZk.value.opq(1) gw.res() = Return(Watched(Node.Children(Seq("member_1"), null), Var.value(WatchState.Pending))) assert(!f1.isDefined) assert(!f2.isDefined) val gd @ GetData("/foo/bar/member_1") = watchedZk.value.opq(2) gd.res() = Return(Node.Data(None, null)) // ensure that we are hitting the cache: even though we called // GetData only once, the two observations are fulfilled. assert(f1.isDefined) assert(f2.isDefined) } def newZkSession(): (ZkSession, Witness[WatchState]) = { val mockZkSession = mock[ZkSession] val watchStateEvent = Event[WatchState]() val watchStateVar = Var[WatchState](WatchState.Pending, watchStateEvent) when(mockZkSession.state).thenReturn(watchStateVar) (mockZkSession, watchStateEvent) } test("ServiceDiscoverer stable health is reported correctly") { Time.withCurrentTimeFrozen { timeControl => val zkSession = Event[ZkSession]() val varZkSession = Var[ZkSession](ZkSession.nil, zkSession) val period = 1.second implicit val timer = new MockTimer val sd = new ServiceDiscoverer(varZkSession, NullStatsReceiver, Epoch(period, timer), timer) val stabilizedHealth = new AtomicReference[ClientHealth](ClientHealth.Healthy) sd.health.changes.register(Witness { stabilizedHealth }) // should start as unknown until updated otherwise assert(stabilizedHealth.get == ClientHealth.Unknown) val (session1, state1) = newZkSession() state1.notify(WatchState.SessionState(SessionState.SyncConnected)) zkSession.notify(session1) assert(stabilizedHealth.get == ClientHealth.Healthy) // make unhealthy without turning the clock state1.notify(WatchState.SessionState(SessionState.Expired)) assert(stabilizedHealth.get == ClientHealth.Healthy) timer.tick() //advance past the health period to make the stabilized health unhealthy timeControl.advance(period) timer.tick() assert(stabilizedHealth.get == ClientHealth.Unhealthy) // flip to a new session val (session2, state2) = newZkSession() state2.notify(WatchState.SessionState(SessionState.SyncConnected)) zkSession.notify(session2) assert(stabilizedHealth.get == ClientHealth.Healthy) } } test("ServiceDiscoverer rawHealth is reported correctly") { val zkSession = Event[ZkSession]() val varZkSession = Var[ZkSession](ZkSession.nil, zkSession) val sd = new ServiceDiscoverer(varZkSession, NullStatsReceiver, ForeverEpoch, DefaultTimer) val health = new AtomicReference[ClientHealth](ClientHealth.Healthy) sd.rawHealth.changes.register(Witness { health }) // should start as unknown until updated otherwise assert(health.get == ClientHealth.Unknown) val (session1, state1) = newZkSession() state1.notify(WatchState.SessionState(SessionState.SyncConnected)) zkSession.notify(session1) assert(health.get == ClientHealth.Healthy) // make unhealthy state1.notify(WatchState.SessionState(SessionState.Expired)) assert(health.get == ClientHealth.Unhealthy) // flip to a new session val (session2, state2) = newZkSession() state2.notify(WatchState.SessionState(SessionState.SyncConnected)) zkSession.notify(session2) assert(health.get == ClientHealth.Healthy) // pulse the bad session (which is NOT the current session) and ensure we stay healthy state1.notify(WatchState.SessionState(SessionState.Disconnected)) assert(health.get == ClientHealth.Healthy) // pulse the current session with an event that should be ignored state2.notify(WatchState.Pending) assert(health.get == ClientHealth.Healthy) } }
luciferous/finagle
finagle-serversets/src/test/scala/com/twitter/finagle/serverset2/ServiceDiscovererTest.scala
Scala
apache-2.0
15,603
Here are a few properties: * The sum of the empty list is 0 * The sum of a list whose elements are all equal to `x` is just the list's length multiplied by `x`. We might express this as: `sum(List.fill(n)(x)) == n*x` * For any list, `l`, `sum(l) == sum(l.reverse)`, since addition is commutative * Given a list, `List(x,y,z,p,q)`, `sum(List(x,y,z,p,q)) == sum(List(x,y)) + sum(List(z,p,q))`, since addition is associative. More generally, we can partition a list into two subsequences whose sum is equal to the sum of the overall list. * The sum of 1,2,3...n is `n*(n+1)/2`
galarragas/FpInScala
answerkey/testing/1.answer.scala
Scala
mit
574
package dao.generic import generated.Tables.profile.api._ import play.api.db.slick._ import slick.jdbc.JdbcProfile import slick.lifted.CanBeQueryCondition import scala.concurrent._ /** * Generic DAO definition */ trait GenericDao[T <: Table[E] with IdentifyableTable[PK], E <: Entity[PK], PK] extends HasDatabaseConfigProvider[JdbcProfile] { //------------------------------------------------------------------------ // public //------------------------------------------------------------------------ /** * Returns the row count for this Model * @return the row count for this Model */ def count(): Future[Int] //------------------------------------------------------------------------ /** * Returns the matching entity for the given id * @param id identifier * @return the matching entity for the given id */ def findById(id: PK): Future[Option[E]] //------------------------------------------------------------------------ /** * Returns all entities in this model * @return all entities in this model */ def findAll(): Future[Seq[E]] //------------------------------------------------------------------------ /** * Returns entities that satisfy the filter expression. * @param expr input filter expression * @param wt * @tparam C * @return entities that satisfy the filter expression. */ def filter[C <: Rep[_]](expr: T => C)(implicit wt: CanBeQueryCondition[C]): Future[Seq[E]] //------------------------------------------------------------------------ /** * Creates (and forgets) a new entity, returns a unit future * @param entity entity to create, input id is ignored * @return returns a unit future */ def create(entity: E): Future[Unit] //------------------------------------------------------------------------ /** * Returns number of inserted entities * @param entities to be inserted * @return number of inserted entities */ def create(entities: Seq[E]): Future[Unit] //------------------------------------------------------------------------ /** * Updates the given entity and returns a Future * @param update Entity to update (by id) * @return returns a Future */ def update(update: E): Future[Unit] //------------------------------------------------------------------------ /** * Deletes the given entity by Id and returns a Future * @param id The Id to delete * @return returns a Future */ def delete(id: PK): Future[Unit] //------------------------------------------------------------------------ /** * Deletes all entities and returns a Future * @return returns a Future */ def deleteAll: Future[Unit] }
bravegag/play-authenticate-usage-scala
app/dao/generic/GenericDao.scala
Scala
apache-2.0
2,734
/** * Copyright 2012-2013 StackMob * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.stackmob.newman.serialization.common import scalaz._ import Scalaz._ import net.liftweb.json._ import net.liftweb.json.scalaz.JsonScalaz._ import java.net.URL object URLSerialization extends SerializationBase[URL] { implicit override val writer = new JSONW[URL] { override def write(u: URL): JValue = JString(u.toString) } implicit override val reader = new JSONR[URL] { override def read(jValue: JValue): Result[URL] = jValue match { case JString(s) => new URL(s).successNel[Error] case j => UnexpectedJSONError(j, classOf[JString]).failureNel[URL] } } }
megamsys/newman
src/main/scala/com/stackmob/newman/serialization/common/URLSerialization.scala
Scala
apache-2.0
1,198
/* Copyright 2013 Twitter, Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.twitter.summingbird.online.executor import com.twitter.summingbird.online.Queue import com.twitter.summingbird.online.option.{ MaxWaitingFutures, MaxFutureWaitTime, MaxEmitPerExecute } import com.twitter.util.{ Await, Duration, Future } import scala.util.{ Try, Success, Failure } import java.util.concurrent.TimeoutException import org.slf4j.{ LoggerFactory, Logger } abstract class AsyncBase[I, O, S, D, RC](maxWaitingFutures: MaxWaitingFutures, maxWaitingTime: MaxFutureWaitTime, maxEmitPerExec: MaxEmitPerExecute) extends Serializable with OperationContainer[I, O, S, D, RC] { @transient protected lazy val logger: Logger = LoggerFactory.getLogger(getClass) /** * If you can use Future.value below, do so. The double Future is here to deal with * cases that need to complete operations after or before doing a FlatMapOperation or * doing a store merge */ def apply(state: S, in: I): Future[TraversableOnce[(Seq[S], Future[TraversableOnce[O]])]] def tick: Future[TraversableOnce[(Seq[S], Future[TraversableOnce[O]])]] = Future.value(Nil) private lazy val outstandingFutures = Queue.linkedNonBlocking[Future[Unit]] private lazy val responses = Queue.linkedNonBlocking[(Seq[S], Try[TraversableOnce[O]])] override def executeTick = finishExecute(tick.onFailure { thr => responses.put(((Seq(), Failure(thr)))) }) override def execute(state: S, data: I) = finishExecute(apply(state, data).onFailure { thr => responses.put(((List(state), Failure(thr)))) }) private def finishExecute(fIn: Future[TraversableOnce[(Seq[S], Future[TraversableOnce[O]])]]) = { addOutstandingFuture(handleSuccess(fIn).unit) // always empty the responses emptyQueue } private def handleSuccess(fut: Future[TraversableOnce[(Seq[S], Future[TraversableOnce[O]])]]) = fut.onSuccess { iter: TraversableOnce[(Seq[S], Future[TraversableOnce[O]])] => // Collect the result onto our responses val iterSize = iter.foldLeft(0) { case (iterSize, (tups, res)) => res.onSuccess { t => responses.put(((tups, Success(t)))) } res.onFailure { t => responses.put(((tups, Failure(t)))) } // Make sure there are not too many outstanding: if (addOutstandingFuture(res.unit)) { iterSize + 1 } else { iterSize } } if (outstandingFutures.size > maxWaitingFutures.get) { /* * This can happen on large key expansion. * May indicate maxWaitingFutures is too low. */ logger.debug( "Exceeded maxWaitingFutures({}), put {} futures", maxWaitingFutures.get, iterSize ) } } private def addOutstandingFuture(fut: Future[Unit]): Boolean = if (!fut.isDefined) { outstandingFutures.put(fut) true } else { false } private def forceExtraFutures { outstandingFutures.dequeueAll(_.isDefined) val toForce = outstandingFutures.trimTo(maxWaitingFutures.get).toIndexedSeq if (!toForce.isEmpty) { try { Await.ready(Future.collect(toForce), maxWaitingTime.get) } catch { case te: TimeoutException => logger.error("forceExtra failed on %d Futures".format(toForce.size), te) } } } private def emptyQueue = { // don't let too many futures build up forceExtraFutures // Take all results that have been placed for writing to the network responses.take(maxEmitPerExec.get) } }
zirpins/summingbird
summingbird-online/src/main/scala/com/twitter/summingbird/online/executor/AsyncBase.scala
Scala
apache-2.0
4,046
package com.twitter.finatra.http.tests.integration.doeverything.main.jsonpatch case class Level0CaseClass(level0: Level1CaseClass) case class Level1CaseClass(level1: Level2CaseClass) case class Level2CaseClass(level2: ExampleCaseClass) case class ExampleCaseClass(hello: String) case class RootCaseClass(root: DuoCaseClass) case class DuoCaseClass(left: DuoStringCaseClass, right: DuoStringCaseClass) case class DuoStringCaseClass(left: String, right: String) case class InnerSeqCaseClass(bears: Seq[String]) case class NestedSeqCaseClass(animalFamilies: Seq[AnimalFamily]) case class AnimalFamily(name: String, animals: Seq[String])
twitter/finatra
http-server/src/test/scala/com/twitter/finatra/http/tests/integration/doeverything/main/jsonpatch/ExampleCaseClass.scala
Scala
apache-2.0
635
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.catalyst.parser import java.util.Locale import org.apache.spark.sql.AnalysisException import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, GlobalTempView, LocalTempView, PersistedView, UnresolvedAttribute, UnresolvedFunc, UnresolvedNamespace, UnresolvedPartitionSpec, UnresolvedRelation, UnresolvedStar, UnresolvedTable, UnresolvedTableOrView} import org.apache.spark.sql.catalyst.catalog.{ArchiveResource, BucketSpec, FileResource, FunctionResource, JarResource} import org.apache.spark.sql.catalyst.expressions.{EqualTo, Literal} import org.apache.spark.sql.catalyst.plans.logical._ import org.apache.spark.sql.connector.catalog.TableChange.ColumnPosition.{after, first} import org.apache.spark.sql.connector.expressions.{ApplyTransform, BucketTransform, DaysTransform, FieldReference, HoursTransform, IdentityTransform, LiteralValue, MonthsTransform, Transform, YearsTransform} import org.apache.spark.sql.types.{IntegerType, LongType, StringType, StructType, TimestampType} import org.apache.spark.unsafe.types.UTF8String class DDLParserSuite extends AnalysisTest { import CatalystSqlParser._ private def assertUnsupported(sql: String, containsThesePhrases: Seq[String] = Seq()): Unit = { val e = intercept[ParseException] { parsePlan(sql) } assert(e.getMessage.toLowerCase(Locale.ROOT).contains("operation not allowed")) containsThesePhrases.foreach { p => assert(e.getMessage.toLowerCase(Locale.ROOT).contains(p.toLowerCase(Locale.ROOT))) } } private def intercept(sqlCommand: String, messages: String*): Unit = interceptParseException(parsePlan)(sqlCommand, messages: _*) private def parseCompare(sql: String, expected: LogicalPlan): Unit = { comparePlans(parsePlan(sql), expected, checkAnalysis = false) } test("create/replace table using - schema") { val createSql = "CREATE TABLE my_tab(a INT COMMENT 'test', b STRING NOT NULL) USING parquet" val replaceSql = "REPLACE TABLE my_tab(a INT COMMENT 'test', b STRING NOT NULL) USING parquet" val expectedTableSpec = TableSpec( Seq("my_tab"), Some(new StructType() .add("a", IntegerType, nullable = true, "test") .add("b", StringType, nullable = false)), Seq.empty[Transform], None, Map.empty[String, String], Some("parquet"), Map.empty[String, String], None, None) Seq(createSql, replaceSql).foreach { sql => testCreateOrReplaceDdl(sql, expectedTableSpec, expectedIfNotExists = false) } intercept("CREATE TABLE my_tab(a: INT COMMENT 'test', b: STRING) USING parquet", "no viable alternative at input") } test("create/replace table - with IF NOT EXISTS") { val sql = "CREATE TABLE IF NOT EXISTS my_tab(a INT, b STRING) USING parquet" testCreateOrReplaceDdl( sql, TableSpec( Seq("my_tab"), Some(new StructType().add("a", IntegerType).add("b", StringType)), Seq.empty[Transform], None, Map.empty[String, String], Some("parquet"), Map.empty[String, String], None, None), expectedIfNotExists = true) } test("create/replace table - with partitioned by") { val createSql = "CREATE TABLE my_tab(a INT comment 'test', b STRING) " + "USING parquet PARTITIONED BY (a)" val replaceSql = "REPLACE TABLE my_tab(a INT comment 'test', b STRING) " + "USING parquet PARTITIONED BY (a)" val expectedTableSpec = TableSpec( Seq("my_tab"), Some(new StructType() .add("a", IntegerType, nullable = true, "test") .add("b", StringType)), Seq(IdentityTransform(FieldReference("a"))), None, Map.empty[String, String], Some("parquet"), Map.empty[String, String], None, None) Seq(createSql, replaceSql).foreach { sql => testCreateOrReplaceDdl(sql, expectedTableSpec, expectedIfNotExists = false) } } test("create/replace table - partitioned by transforms") { val createSql = """ |CREATE TABLE my_tab (a INT, b STRING, ts TIMESTAMP) USING parquet |PARTITIONED BY ( | a, | bucket(16, b), | years(ts), | months(ts), | days(ts), | hours(ts), | foo(a, "bar", 34)) """.stripMargin val replaceSql = """ |REPLACE TABLE my_tab (a INT, b STRING, ts TIMESTAMP) USING parquet |PARTITIONED BY ( | a, | bucket(16, b), | years(ts), | months(ts), | days(ts), | hours(ts), | foo(a, "bar", 34)) """.stripMargin val expectedTableSpec = TableSpec( Seq("my_tab"), Some(new StructType() .add("a", IntegerType) .add("b", StringType) .add("ts", TimestampType)), Seq( IdentityTransform(FieldReference("a")), BucketTransform(LiteralValue(16, IntegerType), Seq(FieldReference("b"))), YearsTransform(FieldReference("ts")), MonthsTransform(FieldReference("ts")), DaysTransform(FieldReference("ts")), HoursTransform(FieldReference("ts")), ApplyTransform("foo", Seq( FieldReference("a"), LiteralValue(UTF8String.fromString("bar"), StringType), LiteralValue(34, IntegerType)))), None, Map.empty[String, String], Some("parquet"), Map.empty[String, String], None, None) Seq(createSql, replaceSql).foreach { sql => testCreateOrReplaceDdl(sql, expectedTableSpec, expectedIfNotExists = false) } } test("create/replace table - with bucket") { val createSql = "CREATE TABLE my_tab(a INT, b STRING) USING parquet " + "CLUSTERED BY (a) SORTED BY (b) INTO 5 BUCKETS" val replaceSql = "REPLACE TABLE my_tab(a INT, b STRING) USING parquet " + "CLUSTERED BY (a) SORTED BY (b) INTO 5 BUCKETS" val expectedTableSpec = TableSpec( Seq("my_tab"), Some(new StructType().add("a", IntegerType).add("b", StringType)), Seq.empty[Transform], Some(BucketSpec(5, Seq("a"), Seq("b"))), Map.empty[String, String], Some("parquet"), Map.empty[String, String], None, None) Seq(createSql, replaceSql).foreach { sql => testCreateOrReplaceDdl(sql, expectedTableSpec, expectedIfNotExists = false) } } test("create/replace table - with comment") { val createSql = "CREATE TABLE my_tab(a INT, b STRING) USING parquet COMMENT 'abc'" val replaceSql = "REPLACE TABLE my_tab(a INT, b STRING) USING parquet COMMENT 'abc'" val expectedTableSpec = TableSpec( Seq("my_tab"), Some(new StructType().add("a", IntegerType).add("b", StringType)), Seq.empty[Transform], None, Map.empty[String, String], Some("parquet"), Map.empty[String, String], None, Some("abc")) Seq(createSql, replaceSql).foreach{ sql => testCreateOrReplaceDdl(sql, expectedTableSpec, expectedIfNotExists = false) } } test("create/replace table - with table properties") { val createSql = "CREATE TABLE my_tab(a INT, b STRING) USING parquet" + " TBLPROPERTIES('test' = 'test')" val replaceSql = "REPLACE TABLE my_tab(a INT, b STRING) USING parquet" + " TBLPROPERTIES('test' = 'test')" val expectedTableSpec = TableSpec( Seq("my_tab"), Some(new StructType().add("a", IntegerType).add("b", StringType)), Seq.empty[Transform], None, Map("test" -> "test"), Some("parquet"), Map.empty[String, String], None, None) Seq(createSql, replaceSql).foreach { sql => testCreateOrReplaceDdl(sql, expectedTableSpec, expectedIfNotExists = false) } } test("create/replace table - with location") { val createSql = "CREATE TABLE my_tab(a INT, b STRING) USING parquet LOCATION '/tmp/file'" val replaceSql = "REPLACE TABLE my_tab(a INT, b STRING) USING parquet LOCATION '/tmp/file'" val expectedTableSpec = TableSpec( Seq("my_tab"), Some(new StructType().add("a", IntegerType).add("b", StringType)), Seq.empty[Transform], None, Map.empty[String, String], Some("parquet"), Map.empty[String, String], Some("/tmp/file"), None) Seq(createSql, replaceSql).foreach { sql => testCreateOrReplaceDdl(sql, expectedTableSpec, expectedIfNotExists = false) } } test("create/replace table - byte length literal table name") { val createSql = "CREATE TABLE 1m.2g(a INT) USING parquet" val replaceSql = "REPLACE TABLE 1m.2g(a INT) USING parquet" val expectedTableSpec = TableSpec( Seq("1m", "2g"), Some(new StructType().add("a", IntegerType)), Seq.empty[Transform], None, Map.empty[String, String], Some("parquet"), Map.empty[String, String], None, None) Seq(createSql, replaceSql).foreach { sql => testCreateOrReplaceDdl(sql, expectedTableSpec, expectedIfNotExists = false) } } test("Duplicate clauses - create/replace table") { def createTableHeader(duplicateClause: String): String = { s"CREATE TABLE my_tab(a INT, b STRING) USING parquet $duplicateClause $duplicateClause" } def replaceTableHeader(duplicateClause: String): String = { s"CREATE TABLE my_tab(a INT, b STRING) USING parquet $duplicateClause $duplicateClause" } intercept(createTableHeader("TBLPROPERTIES('test' = 'test2')"), "Found duplicate clauses: TBLPROPERTIES") intercept(createTableHeader("LOCATION '/tmp/file'"), "Found duplicate clauses: LOCATION") intercept(createTableHeader("COMMENT 'a table'"), "Found duplicate clauses: COMMENT") intercept(createTableHeader("CLUSTERED BY(b) INTO 256 BUCKETS"), "Found duplicate clauses: CLUSTERED BY") intercept(createTableHeader("PARTITIONED BY (b)"), "Found duplicate clauses: PARTITIONED BY") intercept(replaceTableHeader("TBLPROPERTIES('test' = 'test2')"), "Found duplicate clauses: TBLPROPERTIES") intercept(replaceTableHeader("LOCATION '/tmp/file'"), "Found duplicate clauses: LOCATION") intercept(replaceTableHeader("COMMENT 'a table'"), "Found duplicate clauses: COMMENT") intercept(replaceTableHeader("CLUSTERED BY(b) INTO 256 BUCKETS"), "Found duplicate clauses: CLUSTERED BY") intercept(replaceTableHeader("PARTITIONED BY (b)"), "Found duplicate clauses: PARTITIONED BY") } test("support for other types in OPTIONS") { val createSql = """ |CREATE TABLE table_name USING json |OPTIONS (a 1, b 0.1, c TRUE) """.stripMargin val replaceSql = """ |REPLACE TABLE table_name USING json |OPTIONS (a 1, b 0.1, c TRUE) """.stripMargin Seq(createSql, replaceSql).foreach { sql => testCreateOrReplaceDdl( sql, TableSpec( Seq("table_name"), Some(new StructType), Seq.empty[Transform], Option.empty[BucketSpec], Map.empty[String, String], Some("json"), Map("a" -> "1", "b" -> "0.1", "c" -> "true"), None, None), expectedIfNotExists = false) } } test("Test CTAS against native tables") { val s1 = """ |CREATE TABLE IF NOT EXISTS mydb.page_view |USING parquet |COMMENT 'This is the staging page view table' |LOCATION '/user/external/page_view' |TBLPROPERTIES ('p1'='v1', 'p2'='v2') |AS SELECT * FROM src """.stripMargin val s2 = """ |CREATE TABLE IF NOT EXISTS mydb.page_view |USING parquet |LOCATION '/user/external/page_view' |COMMENT 'This is the staging page view table' |TBLPROPERTIES ('p1'='v1', 'p2'='v2') |AS SELECT * FROM src """.stripMargin val s3 = """ |CREATE TABLE IF NOT EXISTS mydb.page_view |USING parquet |COMMENT 'This is the staging page view table' |LOCATION '/user/external/page_view' |TBLPROPERTIES ('p1'='v1', 'p2'='v2') |AS SELECT * FROM src """.stripMargin val s4 = """ |REPLACE TABLE mydb.page_view |USING parquet |COMMENT 'This is the staging page view table' |LOCATION '/user/external/page_view' |TBLPROPERTIES ('p1'='v1', 'p2'='v2') |AS SELECT * FROM src """.stripMargin val expectedTableSpec = TableSpec( Seq("mydb", "page_view"), None, Seq.empty[Transform], None, Map("p1" -> "v1", "p2" -> "v2"), Some("parquet"), Map.empty[String, String], Some("/user/external/page_view"), Some("This is the staging page view table")) Seq(s1, s2, s3, s4).foreach { sql => testCreateOrReplaceDdl(sql, expectedTableSpec, expectedIfNotExists = true) } } test("drop table") { parseCompare("DROP TABLE testcat.ns1.ns2.tbl", DropTable( UnresolvedTableOrView(Seq("testcat", "ns1", "ns2", "tbl")), ifExists = false, purge = false)) parseCompare(s"DROP TABLE db.tab", DropTable( UnresolvedTableOrView(Seq("db", "tab")), ifExists = false, purge = false)) parseCompare(s"DROP TABLE IF EXISTS db.tab", DropTable( UnresolvedTableOrView(Seq("db", "tab")), ifExists = true, purge = false)) parseCompare(s"DROP TABLE tab", DropTable( UnresolvedTableOrView(Seq("tab")), ifExists = false, purge = false)) parseCompare(s"DROP TABLE IF EXISTS tab", DropTable( UnresolvedTableOrView(Seq("tab")), ifExists = true, purge = false)) parseCompare(s"DROP TABLE tab PURGE", DropTable( UnresolvedTableOrView(Seq("tab")), ifExists = false, purge = true)) parseCompare(s"DROP TABLE IF EXISTS tab PURGE", DropTable( UnresolvedTableOrView(Seq("tab")), ifExists = true, purge = true)) } test("drop view") { parseCompare(s"DROP VIEW testcat.db.view", DropViewStatement(Seq("testcat", "db", "view"), ifExists = false)) parseCompare(s"DROP VIEW db.view", DropViewStatement(Seq("db", "view"), ifExists = false)) parseCompare(s"DROP VIEW IF EXISTS db.view", DropViewStatement(Seq("db", "view"), ifExists = true)) parseCompare(s"DROP VIEW view", DropViewStatement(Seq("view"), ifExists = false)) parseCompare(s"DROP VIEW IF EXISTS view", DropViewStatement(Seq("view"), ifExists = true)) } private def testCreateOrReplaceDdl( sqlStatement: String, tableSpec: TableSpec, expectedIfNotExists: Boolean): Unit = { val parsedPlan = parsePlan(sqlStatement) val newTableToken = sqlStatement.split(" ")(0).trim.toUpperCase(Locale.ROOT) parsedPlan match { case create: CreateTableStatement if newTableToken == "CREATE" => assert(create.ifNotExists == expectedIfNotExists) case ctas: CreateTableAsSelectStatement if newTableToken == "CREATE" => assert(ctas.ifNotExists == expectedIfNotExists) case replace: ReplaceTableStatement if newTableToken == "REPLACE" => case replace: ReplaceTableAsSelectStatement if newTableToken == "REPLACE" => case other => fail("First token in statement does not match the expected parsed plan; CREATE TABLE" + " should create a CreateTableStatement, and REPLACE TABLE should create a" + s" ReplaceTableStatement. Statement: $sqlStatement, plan type:" + s" ${parsedPlan.getClass.getName}.") } assert(TableSpec(parsedPlan) === tableSpec) } // ALTER VIEW view_name SET TBLPROPERTIES ('comment' = new_comment); // ALTER VIEW view_name UNSET TBLPROPERTIES [IF EXISTS] ('comment', 'key'); test("alter view: alter view properties") { val sql1_view = "ALTER VIEW table_name SET TBLPROPERTIES ('test' = 'test', " + "'comment' = 'new_comment')" val sql2_view = "ALTER VIEW table_name UNSET TBLPROPERTIES ('comment', 'test')" val sql3_view = "ALTER VIEW table_name UNSET TBLPROPERTIES IF EXISTS ('comment', 'test')" comparePlans(parsePlan(sql1_view), AlterViewSetPropertiesStatement( Seq("table_name"), Map("test" -> "test", "comment" -> "new_comment"))) comparePlans(parsePlan(sql2_view), AlterViewUnsetPropertiesStatement( Seq("table_name"), Seq("comment", "test"), ifExists = false)) comparePlans(parsePlan(sql3_view), AlterViewUnsetPropertiesStatement( Seq("table_name"), Seq("comment", "test"), ifExists = true)) } // ALTER TABLE table_name SET TBLPROPERTIES ('comment' = new_comment); // ALTER TABLE table_name UNSET TBLPROPERTIES [IF EXISTS] ('comment', 'key'); test("alter table: alter table properties") { val sql1_table = "ALTER TABLE table_name SET TBLPROPERTIES ('test' = 'test', " + "'comment' = 'new_comment')" val sql2_table = "ALTER TABLE table_name UNSET TBLPROPERTIES ('comment', 'test')" val sql3_table = "ALTER TABLE table_name UNSET TBLPROPERTIES IF EXISTS ('comment', 'test')" comparePlans( parsePlan(sql1_table), AlterTableSetPropertiesStatement( Seq("table_name"), Map("test" -> "test", "comment" -> "new_comment"))) comparePlans( parsePlan(sql2_table), AlterTableUnsetPropertiesStatement( Seq("table_name"), Seq("comment", "test"), ifExists = false)) comparePlans( parsePlan(sql3_table), AlterTableUnsetPropertiesStatement( Seq("table_name"), Seq("comment", "test"), ifExists = true)) } test("alter table: add column") { comparePlans( parsePlan("ALTER TABLE table_name ADD COLUMN x int"), AlterTableAddColumnsStatement(Seq("table_name"), Seq( QualifiedColType(Seq("x"), IntegerType, true, None, None) ))) } test("alter table: add multiple columns") { comparePlans( parsePlan("ALTER TABLE table_name ADD COLUMNS x int, y string"), AlterTableAddColumnsStatement(Seq("table_name"), Seq( QualifiedColType(Seq("x"), IntegerType, true, None, None), QualifiedColType(Seq("y"), StringType, true, None, None) ))) } test("alter table: add column with COLUMNS") { comparePlans( parsePlan("ALTER TABLE table_name ADD COLUMNS x int"), AlterTableAddColumnsStatement(Seq("table_name"), Seq( QualifiedColType(Seq("x"), IntegerType, true, None, None) ))) } test("alter table: add column with COLUMNS (...)") { comparePlans( parsePlan("ALTER TABLE table_name ADD COLUMNS (x int)"), AlterTableAddColumnsStatement(Seq("table_name"), Seq( QualifiedColType(Seq("x"), IntegerType, true, None, None) ))) } test("alter table: add column with COLUMNS (...) and COMMENT") { comparePlans( parsePlan("ALTER TABLE table_name ADD COLUMNS (x int COMMENT 'doc')"), AlterTableAddColumnsStatement(Seq("table_name"), Seq( QualifiedColType(Seq("x"), IntegerType, true, Some("doc"), None) ))) } test("alter table: add non-nullable column") { comparePlans( parsePlan("ALTER TABLE table_name ADD COLUMN x int NOT NULL"), AlterTableAddColumnsStatement(Seq("table_name"), Seq( QualifiedColType(Seq("x"), IntegerType, false, None, None) ))) } test("alter table: add column with COMMENT") { comparePlans( parsePlan("ALTER TABLE table_name ADD COLUMN x int COMMENT 'doc'"), AlterTableAddColumnsStatement(Seq("table_name"), Seq( QualifiedColType(Seq("x"), IntegerType, true, Some("doc"), None) ))) } test("alter table: add column with position") { comparePlans( parsePlan("ALTER TABLE table_name ADD COLUMN x int FIRST"), AlterTableAddColumnsStatement(Seq("table_name"), Seq( QualifiedColType(Seq("x"), IntegerType, true, None, Some(first())) ))) comparePlans( parsePlan("ALTER TABLE table_name ADD COLUMN x int AFTER y"), AlterTableAddColumnsStatement(Seq("table_name"), Seq( QualifiedColType(Seq("x"), IntegerType, true, None, Some(after("y"))) ))) } test("alter table: add column with nested column name") { comparePlans( parsePlan("ALTER TABLE table_name ADD COLUMN x.y.z int COMMENT 'doc'"), AlterTableAddColumnsStatement(Seq("table_name"), Seq( QualifiedColType(Seq("x", "y", "z"), IntegerType, true, Some("doc"), None) ))) } test("alter table: add multiple columns with nested column name") { comparePlans( parsePlan("ALTER TABLE table_name ADD COLUMN x.y.z int COMMENT 'doc', a.b string FIRST"), AlterTableAddColumnsStatement(Seq("table_name"), Seq( QualifiedColType(Seq("x", "y", "z"), IntegerType, true, Some("doc"), None), QualifiedColType(Seq("a", "b"), StringType, true, None, Some(first())) ))) } test("alter table: set location") { comparePlans( parsePlan("ALTER TABLE a.b.c SET LOCATION 'new location'"), AlterTableSetLocationStatement(Seq("a", "b", "c"), None, "new location")) comparePlans( parsePlan("ALTER TABLE a.b.c PARTITION(ds='2017-06-10') SET LOCATION 'new location'"), AlterTableSetLocationStatement( Seq("a", "b", "c"), Some(Map("ds" -> "2017-06-10")), "new location")) } test("alter table: rename column") { comparePlans( parsePlan("ALTER TABLE table_name RENAME COLUMN a.b.c TO d"), AlterTableRenameColumnStatement( Seq("table_name"), Seq("a", "b", "c"), "d")) } test("alter table: update column type using ALTER") { comparePlans( parsePlan("ALTER TABLE table_name ALTER COLUMN a.b.c TYPE bigint"), AlterTableAlterColumnStatement( Seq("table_name"), Seq("a", "b", "c"), Some(LongType), None, None, None)) } test("alter table: update column type invalid type") { val msg = intercept[ParseException] { parsePlan("ALTER TABLE table_name ALTER COLUMN a.b.c TYPE bad_type") }.getMessage assert(msg.contains("DataType bad_type is not supported")) } test("alter table: update column type") { comparePlans( parsePlan("ALTER TABLE table_name CHANGE COLUMN a.b.c TYPE bigint"), AlterTableAlterColumnStatement( Seq("table_name"), Seq("a", "b", "c"), Some(LongType), None, None, None)) } test("alter table: update column comment") { comparePlans( parsePlan("ALTER TABLE table_name CHANGE COLUMN a.b.c COMMENT 'new comment'"), AlterTableAlterColumnStatement( Seq("table_name"), Seq("a", "b", "c"), None, None, Some("new comment"), None)) } test("alter table: update column position") { comparePlans( parsePlan("ALTER TABLE table_name CHANGE COLUMN a.b.c FIRST"), AlterTableAlterColumnStatement( Seq("table_name"), Seq("a", "b", "c"), None, None, None, Some(first()))) } test("alter table: mutiple property changes are not allowed") { intercept[ParseException] { parsePlan("ALTER TABLE table_name ALTER COLUMN a.b.c " + "TYPE bigint COMMENT 'new comment'")} intercept[ParseException] { parsePlan("ALTER TABLE table_name ALTER COLUMN a.b.c " + "TYPE bigint COMMENT AFTER d")} intercept[ParseException] { parsePlan("ALTER TABLE table_name ALTER COLUMN a.b.c " + "TYPE bigint COMMENT 'new comment' AFTER d")} } test("alter table: SET/DROP NOT NULL") { comparePlans( parsePlan("ALTER TABLE table_name ALTER COLUMN a.b.c SET NOT NULL"), AlterTableAlterColumnStatement( Seq("table_name"), Seq("a", "b", "c"), None, Some(false), None, None)) comparePlans( parsePlan("ALTER TABLE table_name ALTER COLUMN a.b.c DROP NOT NULL"), AlterTableAlterColumnStatement( Seq("table_name"), Seq("a", "b", "c"), None, Some(true), None, None)) } test("alter table: drop column") { comparePlans( parsePlan("ALTER TABLE table_name DROP COLUMN a.b.c"), AlterTableDropColumnsStatement(Seq("table_name"), Seq(Seq("a", "b", "c")))) } test("alter table: drop multiple columns") { val sql = "ALTER TABLE table_name DROP COLUMN x, y, a.b.c" Seq(sql, sql.replace("COLUMN", "COLUMNS")).foreach { drop => comparePlans( parsePlan(drop), AlterTableDropColumnsStatement( Seq("table_name"), Seq(Seq("x"), Seq("y"), Seq("a", "b", "c")))) } } test("alter table: hive style change column") { val sql1 = "ALTER TABLE table_name CHANGE COLUMN a.b.c c INT" val sql2 = "ALTER TABLE table_name CHANGE COLUMN a.b.c c INT COMMENT 'new_comment'" val sql3 = "ALTER TABLE table_name CHANGE COLUMN a.b.c c INT AFTER other_col" comparePlans( parsePlan(sql1), AlterTableAlterColumnStatement( Seq("table_name"), Seq("a", "b", "c"), Some(IntegerType), None, None, None)) comparePlans( parsePlan(sql2), AlterTableAlterColumnStatement( Seq("table_name"), Seq("a", "b", "c"), Some(IntegerType), None, Some("new_comment"), None)) comparePlans( parsePlan(sql3), AlterTableAlterColumnStatement( Seq("table_name"), Seq("a", "b", "c"), Some(IntegerType), None, None, Some(after("other_col")))) // renaming column not supported in hive style ALTER COLUMN. intercept("ALTER TABLE table_name CHANGE COLUMN a.b.c new_name INT", "please run RENAME COLUMN instead") // ALTER COLUMN for a partition is not supported. intercept("ALTER TABLE table_name PARTITION (a='1') CHANGE COLUMN a.b.c c INT") } test("alter table: hive style replace columns") { val sql1 = "ALTER TABLE table_name REPLACE COLUMNS (x string)" val sql2 = "ALTER TABLE table_name REPLACE COLUMNS (x string COMMENT 'x1')" val sql3 = "ALTER TABLE table_name REPLACE COLUMNS (x string COMMENT 'x1', y int)" val sql4 = "ALTER TABLE table_name REPLACE COLUMNS (x string COMMENT 'x1', y int COMMENT 'y1')" comparePlans( parsePlan(sql1), AlterTableReplaceColumnsStatement( Seq("table_name"), Seq(QualifiedColType(Seq("x"), StringType, true, None, None)))) comparePlans( parsePlan(sql2), AlterTableReplaceColumnsStatement( Seq("table_name"), Seq(QualifiedColType(Seq("x"), StringType, true, Some("x1"), None)))) comparePlans( parsePlan(sql3), AlterTableReplaceColumnsStatement( Seq("table_name"), Seq( QualifiedColType(Seq("x"), StringType, true, Some("x1"), None), QualifiedColType(Seq("y"), IntegerType, true, None, None) ))) comparePlans( parsePlan(sql4), AlterTableReplaceColumnsStatement( Seq("table_name"), Seq( QualifiedColType(Seq("x"), StringType, true, Some("x1"), None), QualifiedColType(Seq("y"), IntegerType, true, Some("y1"), None) ))) intercept("ALTER TABLE table_name PARTITION (a='1') REPLACE COLUMNS (x string)", "Operation not allowed: ALTER TABLE table PARTITION partition_spec REPLACE COLUMNS") intercept("ALTER TABLE table_name REPLACE COLUMNS (x string NOT NULL)", "NOT NULL is not supported in Hive-style REPLACE COLUMNS") intercept("ALTER TABLE table_name REPLACE COLUMNS (x string FIRST)", "Column position is not supported in Hive-style REPLACE COLUMNS") } test("alter table/view: rename table/view") { comparePlans( parsePlan("ALTER TABLE a.b.c RENAME TO x.y.z"), RenameTableStatement(Seq("a", "b", "c"), Seq("x", "y", "z"), isView = false)) comparePlans( parsePlan("ALTER VIEW a.b.c RENAME TO x.y.z"), RenameTableStatement(Seq("a", "b", "c"), Seq("x", "y", "z"), isView = true)) } test("describe table column") { comparePlans(parsePlan("DESCRIBE t col"), DescribeColumn( UnresolvedTableOrView(Seq("t")), Seq("col"), isExtended = false)) comparePlans(parsePlan("DESCRIBE t `abc.xyz`"), DescribeColumn( UnresolvedTableOrView(Seq("t")), Seq("abc.xyz"), isExtended = false)) comparePlans(parsePlan("DESCRIBE t abc.xyz"), DescribeColumn( UnresolvedTableOrView(Seq("t")), Seq("abc", "xyz"), isExtended = false)) comparePlans(parsePlan("DESCRIBE t `a.b`.`x.y`"), DescribeColumn( UnresolvedTableOrView(Seq("t")), Seq("a.b", "x.y"), isExtended = false)) comparePlans(parsePlan("DESCRIBE TABLE t col"), DescribeColumn( UnresolvedTableOrView(Seq("t")), Seq("col"), isExtended = false)) comparePlans(parsePlan("DESCRIBE TABLE EXTENDED t col"), DescribeColumn( UnresolvedTableOrView(Seq("t")), Seq("col"), isExtended = true)) comparePlans(parsePlan("DESCRIBE TABLE FORMATTED t col"), DescribeColumn( UnresolvedTableOrView(Seq("t")), Seq("col"), isExtended = true)) val caught = intercept[AnalysisException]( parsePlan("DESCRIBE TABLE t PARTITION (ds='1970-01-01') col")) assert(caught.getMessage.contains( "DESC TABLE COLUMN for a specific partition is not supported")) } test("describe database") { val sql1 = "DESCRIBE DATABASE EXTENDED a.b" val sql2 = "DESCRIBE DATABASE a.b" comparePlans(parsePlan(sql1), DescribeNamespace(UnresolvedNamespace(Seq("a", "b")), extended = true)) comparePlans(parsePlan(sql2), DescribeNamespace(UnresolvedNamespace(Seq("a", "b")), extended = false)) } test("SPARK-17328 Fix NPE with EXPLAIN DESCRIBE TABLE") { comparePlans(parsePlan("describe t"), DescribeRelation(UnresolvedTableOrView(Seq("t")), Map.empty, isExtended = false)) comparePlans(parsePlan("describe table t"), DescribeRelation(UnresolvedTableOrView(Seq("t")), Map.empty, isExtended = false)) comparePlans(parsePlan("describe table extended t"), DescribeRelation(UnresolvedTableOrView(Seq("t")), Map.empty, isExtended = true)) comparePlans(parsePlan("describe table formatted t"), DescribeRelation(UnresolvedTableOrView(Seq("t")), Map.empty, isExtended = true)) } test("insert table: basic append") { Seq( "INSERT INTO TABLE testcat.ns1.ns2.tbl SELECT * FROM source", "INSERT INTO testcat.ns1.ns2.tbl SELECT * FROM source" ).foreach { sql => parseCompare(sql, InsertIntoStatement( UnresolvedRelation(Seq("testcat", "ns1", "ns2", "tbl")), Map.empty, Project(Seq(UnresolvedStar(None)), UnresolvedRelation(Seq("source"))), overwrite = false, ifPartitionNotExists = false)) } } test("insert table: append from another catalog") { parseCompare("INSERT INTO TABLE testcat.ns1.ns2.tbl SELECT * FROM testcat2.db.tbl", InsertIntoStatement( UnresolvedRelation(Seq("testcat", "ns1", "ns2", "tbl")), Map.empty, Project(Seq(UnresolvedStar(None)), UnresolvedRelation(Seq("testcat2", "db", "tbl"))), overwrite = false, ifPartitionNotExists = false)) } test("insert table: append with partition") { parseCompare( """ |INSERT INTO testcat.ns1.ns2.tbl |PARTITION (p1 = 3, p2) |SELECT * FROM source """.stripMargin, InsertIntoStatement( UnresolvedRelation(Seq("testcat", "ns1", "ns2", "tbl")), Map("p1" -> Some("3"), "p2" -> None), Project(Seq(UnresolvedStar(None)), UnresolvedRelation(Seq("source"))), overwrite = false, ifPartitionNotExists = false)) } test("insert table: overwrite") { Seq( "INSERT OVERWRITE TABLE testcat.ns1.ns2.tbl SELECT * FROM source", "INSERT OVERWRITE testcat.ns1.ns2.tbl SELECT * FROM source" ).foreach { sql => parseCompare(sql, InsertIntoStatement( UnresolvedRelation(Seq("testcat", "ns1", "ns2", "tbl")), Map.empty, Project(Seq(UnresolvedStar(None)), UnresolvedRelation(Seq("source"))), overwrite = true, ifPartitionNotExists = false)) } } test("insert table: overwrite with partition") { parseCompare( """ |INSERT OVERWRITE TABLE testcat.ns1.ns2.tbl |PARTITION (p1 = 3, p2) |SELECT * FROM source """.stripMargin, InsertIntoStatement( UnresolvedRelation(Seq("testcat", "ns1", "ns2", "tbl")), Map("p1" -> Some("3"), "p2" -> None), Project(Seq(UnresolvedStar(None)), UnresolvedRelation(Seq("source"))), overwrite = true, ifPartitionNotExists = false)) } test("insert table: overwrite with partition if not exists") { parseCompare( """ |INSERT OVERWRITE TABLE testcat.ns1.ns2.tbl |PARTITION (p1 = 3) IF NOT EXISTS |SELECT * FROM source """.stripMargin, InsertIntoStatement( UnresolvedRelation(Seq("testcat", "ns1", "ns2", "tbl")), Map("p1" -> Some("3")), Project(Seq(UnresolvedStar(None)), UnresolvedRelation(Seq("source"))), overwrite = true, ifPartitionNotExists = true)) } test("insert table: if not exists with dynamic partition fails") { val exc = intercept[AnalysisException] { parsePlan( """ |INSERT OVERWRITE TABLE testcat.ns1.ns2.tbl |PARTITION (p1 = 3, p2) IF NOT EXISTS |SELECT * FROM source """.stripMargin) } assert(exc.getMessage.contains("IF NOT EXISTS with dynamic partitions")) assert(exc.getMessage.contains("p2")) } test("insert table: if not exists without overwrite fails") { val exc = intercept[AnalysisException] { parsePlan( """ |INSERT INTO TABLE testcat.ns1.ns2.tbl |PARTITION (p1 = 3) IF NOT EXISTS |SELECT * FROM source """.stripMargin) } assert(exc.getMessage.contains("INSERT INTO ... IF NOT EXISTS")) } test("delete from table: delete all") { parseCompare("DELETE FROM testcat.ns1.ns2.tbl", DeleteFromTable( UnresolvedRelation(Seq("testcat", "ns1", "ns2", "tbl")), None)) } test("delete from table: with alias and where clause") { parseCompare("DELETE FROM testcat.ns1.ns2.tbl AS t WHERE t.a = 2", DeleteFromTable( SubqueryAlias("t", UnresolvedRelation(Seq("testcat", "ns1", "ns2", "tbl"))), Some(EqualTo(UnresolvedAttribute("t.a"), Literal(2))))) } test("delete from table: columns aliases is not allowed") { val exc = intercept[ParseException] { parsePlan("DELETE FROM testcat.ns1.ns2.tbl AS t(a,b,c,d) WHERE d = 2") } assert(exc.getMessage.contains("Columns aliases are not allowed in DELETE.")) } test("update table: basic") { parseCompare( """ |UPDATE testcat.ns1.ns2.tbl |SET a='Robert', b=32 """.stripMargin, UpdateTable( UnresolvedRelation(Seq("testcat", "ns1", "ns2", "tbl")), Seq(Assignment(UnresolvedAttribute("a"), Literal("Robert")), Assignment(UnresolvedAttribute("b"), Literal(32))), None)) } test("update table: with alias and where clause") { parseCompare( """ |UPDATE testcat.ns1.ns2.tbl AS t |SET t.a='Robert', t.b=32 |WHERE t.c=2 """.stripMargin, UpdateTable( SubqueryAlias("t", UnresolvedRelation(Seq("testcat", "ns1", "ns2", "tbl"))), Seq(Assignment(UnresolvedAttribute("t.a"), Literal("Robert")), Assignment(UnresolvedAttribute("t.b"), Literal(32))), Some(EqualTo(UnresolvedAttribute("t.c"), Literal(2))))) } test("update table: columns aliases is not allowed") { val exc = intercept[ParseException] { parsePlan( """ |UPDATE testcat.ns1.ns2.tbl AS t(a,b,c,d) |SET b='Robert', c=32 |WHERE d=2 """.stripMargin) } assert(exc.getMessage.contains("Columns aliases are not allowed in UPDATE.")) } test("merge into table: basic") { parseCompare( """ |MERGE INTO testcat1.ns1.ns2.tbl AS target |USING testcat2.ns1.ns2.tbl AS source |ON target.col1 = source.col1 |WHEN MATCHED AND (target.col2='delete') THEN DELETE |WHEN MATCHED AND (target.col2='update') THEN UPDATE SET target.col2 = source.col2 |WHEN NOT MATCHED AND (target.col2='insert') |THEN INSERT (target.col1, target.col2) values (source.col1, source.col2) """.stripMargin, MergeIntoTable( SubqueryAlias("target", UnresolvedRelation(Seq("testcat1", "ns1", "ns2", "tbl"))), SubqueryAlias("source", UnresolvedRelation(Seq("testcat2", "ns1", "ns2", "tbl"))), EqualTo(UnresolvedAttribute("target.col1"), UnresolvedAttribute("source.col1")), Seq(DeleteAction(Some(EqualTo(UnresolvedAttribute("target.col2"), Literal("delete")))), UpdateAction(Some(EqualTo(UnresolvedAttribute("target.col2"), Literal("update"))), Seq(Assignment(UnresolvedAttribute("target.col2"), UnresolvedAttribute("source.col2"))))), Seq(InsertAction(Some(EqualTo(UnresolvedAttribute("target.col2"), Literal("insert"))), Seq(Assignment(UnresolvedAttribute("target.col1"), UnresolvedAttribute("source.col1")), Assignment(UnresolvedAttribute("target.col2"), UnresolvedAttribute("source.col2"))))))) } test("merge into table: using subquery") { parseCompare( """ |MERGE INTO testcat1.ns1.ns2.tbl AS target |USING (SELECT * FROM testcat2.ns1.ns2.tbl) AS source |ON target.col1 = source.col1 |WHEN MATCHED AND (target.col2='delete') THEN DELETE |WHEN MATCHED AND (target.col2='update') THEN UPDATE SET target.col2 = source.col2 |WHEN NOT MATCHED AND (target.col2='insert') |THEN INSERT (target.col1, target.col2) values (source.col1, source.col2) """.stripMargin, MergeIntoTable( SubqueryAlias("target", UnresolvedRelation(Seq("testcat1", "ns1", "ns2", "tbl"))), SubqueryAlias("source", Project(Seq(UnresolvedStar(None)), UnresolvedRelation(Seq("testcat2", "ns1", "ns2", "tbl")))), EqualTo(UnresolvedAttribute("target.col1"), UnresolvedAttribute("source.col1")), Seq(DeleteAction(Some(EqualTo(UnresolvedAttribute("target.col2"), Literal("delete")))), UpdateAction(Some(EqualTo(UnresolvedAttribute("target.col2"), Literal("update"))), Seq(Assignment(UnresolvedAttribute("target.col2"), UnresolvedAttribute("source.col2"))))), Seq(InsertAction(Some(EqualTo(UnresolvedAttribute("target.col2"), Literal("insert"))), Seq(Assignment(UnresolvedAttribute("target.col1"), UnresolvedAttribute("source.col1")), Assignment(UnresolvedAttribute("target.col2"), UnresolvedAttribute("source.col2"))))))) } test("merge into table: cte") { parseCompare( """ |MERGE INTO testcat1.ns1.ns2.tbl AS target |USING (WITH s as (SELECT * FROM testcat2.ns1.ns2.tbl) SELECT * FROM s) AS source |ON target.col1 = source.col1 |WHEN MATCHED AND (target.col2='delete') THEN DELETE |WHEN MATCHED AND (target.col2='update') THEN UPDATE SET target.col2 = source.col2 |WHEN NOT MATCHED AND (target.col2='insert') |THEN INSERT (target.col1, target.col2) values (source.col1, source.col2) """.stripMargin, MergeIntoTable( SubqueryAlias("target", UnresolvedRelation(Seq("testcat1", "ns1", "ns2", "tbl"))), SubqueryAlias("source", With(Project(Seq(UnresolvedStar(None)), UnresolvedRelation(Seq("s"))), Seq("s" -> SubqueryAlias("s", Project(Seq(UnresolvedStar(None)), UnresolvedRelation(Seq("testcat2", "ns1", "ns2", "tbl"))))))), EqualTo(UnresolvedAttribute("target.col1"), UnresolvedAttribute("source.col1")), Seq(DeleteAction(Some(EqualTo(UnresolvedAttribute("target.col2"), Literal("delete")))), UpdateAction(Some(EqualTo(UnresolvedAttribute("target.col2"), Literal("update"))), Seq(Assignment(UnresolvedAttribute("target.col2"), UnresolvedAttribute("source.col2"))))), Seq(InsertAction(Some(EqualTo(UnresolvedAttribute("target.col2"), Literal("insert"))), Seq(Assignment(UnresolvedAttribute("target.col1"), UnresolvedAttribute("source.col1")), Assignment(UnresolvedAttribute("target.col2"), UnresolvedAttribute("source.col2"))))))) } test("merge into table: no additional condition") { parseCompare( """ |MERGE INTO testcat1.ns1.ns2.tbl AS target |USING testcat2.ns1.ns2.tbl AS source |ON target.col1 = source.col1 |WHEN MATCHED THEN UPDATE SET target.col2 = source.col2 |WHEN NOT MATCHED |THEN INSERT (target.col1, target.col2) values (source.col1, source.col2) """.stripMargin, MergeIntoTable( SubqueryAlias("target", UnresolvedRelation(Seq("testcat1", "ns1", "ns2", "tbl"))), SubqueryAlias("source", UnresolvedRelation(Seq("testcat2", "ns1", "ns2", "tbl"))), EqualTo(UnresolvedAttribute("target.col1"), UnresolvedAttribute("source.col1")), Seq(UpdateAction(None, Seq(Assignment(UnresolvedAttribute("target.col2"), UnresolvedAttribute("source.col2"))))), Seq(InsertAction(None, Seq(Assignment(UnresolvedAttribute("target.col1"), UnresolvedAttribute("source.col1")), Assignment(UnresolvedAttribute("target.col2"), UnresolvedAttribute("source.col2"))))))) } test("merge into table: star") { parseCompare( """ |MERGE INTO testcat1.ns1.ns2.tbl AS target |USING testcat2.ns1.ns2.tbl AS source |ON target.col1 = source.col1 |WHEN MATCHED AND (target.col2='delete') THEN DELETE |WHEN MATCHED AND (target.col2='update') THEN UPDATE SET * |WHEN NOT MATCHED AND (target.col2='insert') |THEN INSERT * """.stripMargin, MergeIntoTable( SubqueryAlias("target", UnresolvedRelation(Seq("testcat1", "ns1", "ns2", "tbl"))), SubqueryAlias("source", UnresolvedRelation(Seq("testcat2", "ns1", "ns2", "tbl"))), EqualTo(UnresolvedAttribute("target.col1"), UnresolvedAttribute("source.col1")), Seq(DeleteAction(Some(EqualTo(UnresolvedAttribute("target.col2"), Literal("delete")))), UpdateAction(Some(EqualTo(UnresolvedAttribute("target.col2"), Literal("update"))), Seq())), Seq(InsertAction(Some(EqualTo(UnresolvedAttribute("target.col2"), Literal("insert"))), Seq())))) } test("merge into table: columns aliases are not allowed") { Seq("target(c1, c2)" -> "source", "target" -> "source(c1, c2)").foreach { case (targetAlias, sourceAlias) => val exc = intercept[ParseException] { parsePlan( s""" |MERGE INTO testcat1.ns1.ns2.tbl AS $targetAlias |USING testcat2.ns1.ns2.tbl AS $sourceAlias |ON target.col1 = source.col1 |WHEN MATCHED AND (target.col2='delete') THEN DELETE |WHEN MATCHED AND (target.col2='update') THEN UPDATE SET target.col2 = source.col2 |WHEN NOT MATCHED AND (target.col2='insert') |THEN INSERT (target.col1, target.col2) values (source.col1, source.col2) """.stripMargin) } assert(exc.getMessage.contains("Columns aliases are not allowed in MERGE.")) } } test("merge into table: multi matched and not matched clauses") { parseCompare( """ |MERGE INTO testcat1.ns1.ns2.tbl AS target |USING testcat2.ns1.ns2.tbl AS source |ON target.col1 = source.col1 |WHEN MATCHED AND (target.col2='delete') THEN DELETE |WHEN MATCHED AND (target.col2='update1') THEN UPDATE SET target.col2 = 1 |WHEN MATCHED AND (target.col2='update2') THEN UPDATE SET target.col2 = 2 |WHEN NOT MATCHED AND (target.col2='insert1') |THEN INSERT (target.col1, target.col2) values (source.col1, 1) |WHEN NOT MATCHED AND (target.col2='insert2') |THEN INSERT (target.col1, target.col2) values (source.col1, 2) """.stripMargin, MergeIntoTable( SubqueryAlias("target", UnresolvedRelation(Seq("testcat1", "ns1", "ns2", "tbl"))), SubqueryAlias("source", UnresolvedRelation(Seq("testcat2", "ns1", "ns2", "tbl"))), EqualTo(UnresolvedAttribute("target.col1"), UnresolvedAttribute("source.col1")), Seq(DeleteAction(Some(EqualTo(UnresolvedAttribute("target.col2"), Literal("delete")))), UpdateAction(Some(EqualTo(UnresolvedAttribute("target.col2"), Literal("update1"))), Seq(Assignment(UnresolvedAttribute("target.col2"), Literal(1)))), UpdateAction(Some(EqualTo(UnresolvedAttribute("target.col2"), Literal("update2"))), Seq(Assignment(UnresolvedAttribute("target.col2"), Literal(2))))), Seq(InsertAction(Some(EqualTo(UnresolvedAttribute("target.col2"), Literal("insert1"))), Seq(Assignment(UnresolvedAttribute("target.col1"), UnresolvedAttribute("source.col1")), Assignment(UnresolvedAttribute("target.col2"), Literal(1)))), InsertAction(Some(EqualTo(UnresolvedAttribute("target.col2"), Literal("insert2"))), Seq(Assignment(UnresolvedAttribute("target.col1"), UnresolvedAttribute("source.col1")), Assignment(UnresolvedAttribute("target.col2"), Literal(2))))))) } test("merge into table: only the last matched clause can omit the condition") { val exc = intercept[ParseException] { parsePlan( """ |MERGE INTO testcat1.ns1.ns2.tbl AS target |USING testcat2.ns1.ns2.tbl AS source |ON target.col1 = source.col1 |WHEN MATCHED AND (target.col2 == 'update1') THEN UPDATE SET target.col2 = 1 |WHEN MATCHED THEN UPDATE SET target.col2 = 2 |WHEN MATCHED THEN DELETE |WHEN NOT MATCHED AND (target.col2='insert') |THEN INSERT (target.col1, target.col2) values (source.col1, source.col2) """.stripMargin) } assert(exc.getMessage.contains("only the last MATCHED clause can omit the condition")) } test("merge into table: only the last not matched clause can omit the condition") { val exc = intercept[ParseException] { parsePlan( """ |MERGE INTO testcat1.ns1.ns2.tbl AS target |USING testcat2.ns1.ns2.tbl AS source |ON target.col1 = source.col1 |WHEN MATCHED AND (target.col2 == 'update') THEN UPDATE SET target.col2 = source.col2 |WHEN MATCHED THEN DELETE |WHEN NOT MATCHED AND (target.col2='insert1') |THEN INSERT (target.col1, target.col2) values (source.col1, 1) |WHEN NOT MATCHED |THEN INSERT (target.col1, target.col2) values (source.col1, 2) |WHEN NOT MATCHED |THEN INSERT (target.col1, target.col2) values (source.col1, source.col2) """.stripMargin) } assert(exc.getMessage.contains("only the last NOT MATCHED clause can omit the condition")) } test("merge into table: there must be a when (not) matched condition") { val exc = intercept[ParseException] { parsePlan( """ |MERGE INTO testcat1.ns1.ns2.tbl AS target |USING testcat2.ns1.ns2.tbl AS source |ON target.col1 = source.col1 """.stripMargin) } assert(exc.getMessage.contains("There must be at least one WHEN clause in a MERGE statement")) } test("show views") { comparePlans( parsePlan("SHOW VIEWS"), ShowViews(UnresolvedNamespace(Seq.empty[String]), None)) comparePlans( parsePlan("SHOW VIEWS '*test*'"), ShowViews(UnresolvedNamespace(Seq.empty[String]), Some("*test*"))) comparePlans( parsePlan("SHOW VIEWS LIKE '*test*'"), ShowViews(UnresolvedNamespace(Seq.empty[String]), Some("*test*"))) comparePlans( parsePlan("SHOW VIEWS FROM testcat.ns1.ns2.tbl"), ShowViews(UnresolvedNamespace(Seq("testcat", "ns1", "ns2", "tbl")), None)) comparePlans( parsePlan("SHOW VIEWS IN testcat.ns1.ns2.tbl"), ShowViews(UnresolvedNamespace(Seq("testcat", "ns1", "ns2", "tbl")), None)) comparePlans( parsePlan("SHOW VIEWS IN ns1 '*test*'"), ShowViews(UnresolvedNamespace(Seq("ns1")), Some("*test*"))) comparePlans( parsePlan("SHOW VIEWS IN ns1 LIKE '*test*'"), ShowViews(UnresolvedNamespace(Seq("ns1")), Some("*test*"))) } test("create namespace -- backward compatibility with DATABASE/DBPROPERTIES") { val expected = CreateNamespaceStatement( Seq("a", "b", "c"), ifNotExists = true, Map( "a" -> "a", "b" -> "b", "c" -> "c", "comment" -> "namespace_comment", "location" -> "/home/user/db")) comparePlans( parsePlan( """ |CREATE NAMESPACE IF NOT EXISTS a.b.c |WITH PROPERTIES ('a'='a', 'b'='b', 'c'='c') |COMMENT 'namespace_comment' LOCATION '/home/user/db' """.stripMargin), expected) comparePlans( parsePlan( """ |CREATE DATABASE IF NOT EXISTS a.b.c |WITH DBPROPERTIES ('a'='a', 'b'='b', 'c'='c') |COMMENT 'namespace_comment' LOCATION '/home/user/db' """.stripMargin), expected) } test("create namespace -- check duplicates") { def createDatabase(duplicateClause: String): String = { s""" |CREATE NAMESPACE IF NOT EXISTS a.b.c |$duplicateClause |$duplicateClause """.stripMargin } val sql1 = createDatabase("COMMENT 'namespace_comment'") val sql2 = createDatabase("LOCATION '/home/user/db'") val sql3 = createDatabase("WITH PROPERTIES ('a'='a', 'b'='b', 'c'='c')") val sql4 = createDatabase("WITH DBPROPERTIES ('a'='a', 'b'='b', 'c'='c')") intercept(sql1, "Found duplicate clauses: COMMENT") intercept(sql2, "Found duplicate clauses: LOCATION") intercept(sql3, "Found duplicate clauses: WITH PROPERTIES") intercept(sql4, "Found duplicate clauses: WITH DBPROPERTIES") } test("create namespace - property values must be set") { assertUnsupported( sql = "CREATE NAMESPACE a.b.c WITH PROPERTIES('key_without_value', 'key_with_value'='x')", containsThesePhrases = Seq("key_without_value")) } test("create namespace -- either PROPERTIES or DBPROPERTIES is allowed") { val sql = s""" |CREATE NAMESPACE IF NOT EXISTS a.b.c |WITH PROPERTIES ('a'='a', 'b'='b', 'c'='c') |WITH DBPROPERTIES ('a'='a', 'b'='b', 'c'='c') """.stripMargin intercept(sql, "Either PROPERTIES or DBPROPERTIES is allowed") } test("create namespace - support for other types in PROPERTIES") { val sql = """ |CREATE NAMESPACE a.b.c |LOCATION '/home/user/db' |WITH PROPERTIES ('a'=1, 'b'=0.1, 'c'=TRUE) """.stripMargin comparePlans( parsePlan(sql), CreateNamespaceStatement( Seq("a", "b", "c"), ifNotExists = false, Map( "a" -> "1", "b" -> "0.1", "c" -> "true", "location" -> "/home/user/db"))) } test("drop namespace") { comparePlans( parsePlan("DROP NAMESPACE a.b.c"), DropNamespace( UnresolvedNamespace(Seq("a", "b", "c")), ifExists = false, cascade = false)) comparePlans( parsePlan("DROP NAMESPACE IF EXISTS a.b.c"), DropNamespace( UnresolvedNamespace(Seq("a", "b", "c")), ifExists = true, cascade = false)) comparePlans( parsePlan("DROP NAMESPACE IF EXISTS a.b.c RESTRICT"), DropNamespace( UnresolvedNamespace(Seq("a", "b", "c")), ifExists = true, cascade = false)) comparePlans( parsePlan("DROP NAMESPACE IF EXISTS a.b.c CASCADE"), DropNamespace( UnresolvedNamespace(Seq("a", "b", "c")), ifExists = true, cascade = true)) comparePlans( parsePlan("DROP NAMESPACE a.b.c CASCADE"), DropNamespace( UnresolvedNamespace(Seq("a", "b", "c")), ifExists = false, cascade = true)) } test("set namespace properties") { comparePlans( parsePlan("ALTER DATABASE a.b.c SET PROPERTIES ('a'='a', 'b'='b', 'c'='c')"), AlterNamespaceSetProperties( UnresolvedNamespace(Seq("a", "b", "c")), Map("a" -> "a", "b" -> "b", "c" -> "c"))) comparePlans( parsePlan("ALTER SCHEMA a.b.c SET PROPERTIES ('a'='a')"), AlterNamespaceSetProperties( UnresolvedNamespace(Seq("a", "b", "c")), Map("a" -> "a"))) comparePlans( parsePlan("ALTER NAMESPACE a.b.c SET PROPERTIES ('b'='b')"), AlterNamespaceSetProperties( UnresolvedNamespace(Seq("a", "b", "c")), Map("b" -> "b"))) comparePlans( parsePlan("ALTER DATABASE a.b.c SET DBPROPERTIES ('a'='a', 'b'='b', 'c'='c')"), AlterNamespaceSetProperties( UnresolvedNamespace(Seq("a", "b", "c")), Map("a" -> "a", "b" -> "b", "c" -> "c"))) comparePlans( parsePlan("ALTER SCHEMA a.b.c SET DBPROPERTIES ('a'='a')"), AlterNamespaceSetProperties( UnresolvedNamespace(Seq("a", "b", "c")), Map("a" -> "a"))) comparePlans( parsePlan("ALTER NAMESPACE a.b.c SET DBPROPERTIES ('b'='b')"), AlterNamespaceSetProperties( UnresolvedNamespace(Seq("a", "b", "c")), Map("b" -> "b"))) } test("set namespace location") { comparePlans( parsePlan("ALTER DATABASE a.b.c SET LOCATION '/home/user/db'"), AlterNamespaceSetLocation( UnresolvedNamespace(Seq("a", "b", "c")), "/home/user/db")) comparePlans( parsePlan("ALTER SCHEMA a.b.c SET LOCATION '/home/user/db'"), AlterNamespaceSetLocation( UnresolvedNamespace(Seq("a", "b", "c")), "/home/user/db")) comparePlans( parsePlan("ALTER NAMESPACE a.b.c SET LOCATION '/home/user/db'"), AlterNamespaceSetLocation( UnresolvedNamespace(Seq("a", "b", "c")), "/home/user/db")) } test("show databases: basic") { comparePlans( parsePlan("SHOW DATABASES"), ShowNamespaces(UnresolvedNamespace(Seq.empty[String]), None)) comparePlans( parsePlan("SHOW DATABASES LIKE 'defau*'"), ShowNamespaces(UnresolvedNamespace(Seq.empty[String]), Some("defau*"))) } test("show databases: FROM/IN operator is not allowed") { def verify(sql: String): Unit = { val exc = intercept[ParseException] { parsePlan(sql) } assert(exc.getMessage.contains("FROM/IN operator is not allowed in SHOW DATABASES")) } verify("SHOW DATABASES FROM testcat.ns1.ns2") verify("SHOW DATABASES IN testcat.ns1.ns2") } test("show namespaces") { comparePlans( parsePlan("SHOW NAMESPACES"), ShowNamespaces(UnresolvedNamespace(Seq.empty[String]), None)) comparePlans( parsePlan("SHOW NAMESPACES FROM testcat.ns1.ns2"), ShowNamespaces(UnresolvedNamespace(Seq("testcat", "ns1", "ns2")), None)) comparePlans( parsePlan("SHOW NAMESPACES IN testcat.ns1.ns2"), ShowNamespaces(UnresolvedNamespace(Seq("testcat", "ns1", "ns2")), None)) comparePlans( parsePlan("SHOW NAMESPACES IN testcat.ns1 LIKE '*pattern*'"), ShowNamespaces(UnresolvedNamespace(Seq("testcat", "ns1")), Some("*pattern*"))) } test("analyze table statistics") { comparePlans(parsePlan("analyze table a.b.c compute statistics"), AnalyzeTable( UnresolvedTableOrView(Seq("a", "b", "c"), allowTempView = false), Map.empty, noScan = false)) comparePlans(parsePlan("analyze table a.b.c compute statistics noscan"), AnalyzeTable( UnresolvedTableOrView(Seq("a", "b", "c"), allowTempView = false), Map.empty, noScan = true)) comparePlans(parsePlan("analyze table a.b.c partition (a) compute statistics nOscAn"), AnalyzeTable( UnresolvedTableOrView(Seq("a", "b", "c"), allowTempView = false), Map("a" -> None), noScan = true)) // Partitions specified comparePlans( parsePlan("ANALYZE TABLE a.b.c PARTITION(ds='2008-04-09', hr=11) COMPUTE STATISTICS"), AnalyzeTable( UnresolvedTableOrView(Seq("a", "b", "c"), allowTempView = false), Map("ds" -> Some("2008-04-09"), "hr" -> Some("11")), noScan = false)) comparePlans( parsePlan("ANALYZE TABLE a.b.c PARTITION(ds='2008-04-09', hr=11) COMPUTE STATISTICS noscan"), AnalyzeTable( UnresolvedTableOrView(Seq("a", "b", "c"), allowTempView = false), Map("ds" -> Some("2008-04-09"), "hr" -> Some("11")), noScan = true)) comparePlans( parsePlan("ANALYZE TABLE a.b.c PARTITION(ds='2008-04-09') COMPUTE STATISTICS noscan"), AnalyzeTable( UnresolvedTableOrView(Seq("a", "b", "c"), allowTempView = false), Map("ds" -> Some("2008-04-09")), noScan = true)) comparePlans( parsePlan("ANALYZE TABLE a.b.c PARTITION(ds='2008-04-09', hr) COMPUTE STATISTICS"), AnalyzeTable( UnresolvedTableOrView(Seq("a", "b", "c"), allowTempView = false), Map("ds" -> Some("2008-04-09"), "hr" -> None), noScan = false)) comparePlans( parsePlan("ANALYZE TABLE a.b.c PARTITION(ds='2008-04-09', hr) COMPUTE STATISTICS noscan"), AnalyzeTable( UnresolvedTableOrView(Seq("a", "b", "c"), allowTempView = false), Map("ds" -> Some("2008-04-09"), "hr" -> None), noScan = true)) comparePlans( parsePlan("ANALYZE TABLE a.b.c PARTITION(ds, hr=11) COMPUTE STATISTICS noscan"), AnalyzeTable( UnresolvedTableOrView(Seq("a", "b", "c"), allowTempView = false), Map("ds" -> None, "hr" -> Some("11")), noScan = true)) comparePlans( parsePlan("ANALYZE TABLE a.b.c PARTITION(ds, hr) COMPUTE STATISTICS"), AnalyzeTable( UnresolvedTableOrView(Seq("a", "b", "c"), allowTempView = false), Map("ds" -> None, "hr" -> None), noScan = false)) comparePlans( parsePlan("ANALYZE TABLE a.b.c PARTITION(ds, hr) COMPUTE STATISTICS noscan"), AnalyzeTable( UnresolvedTableOrView(Seq("a", "b", "c"), allowTempView = false), Map("ds" -> None, "hr" -> None), noScan = true)) intercept("analyze table a.b.c compute statistics xxxx", "Expected `NOSCAN` instead of `xxxx`") intercept("analyze table a.b.c partition (a) compute statistics xxxx", "Expected `NOSCAN` instead of `xxxx`") } test("analyze table column statistics") { intercept("ANALYZE TABLE a.b.c COMPUTE STATISTICS FOR COLUMNS", "") comparePlans( parsePlan("ANALYZE TABLE a.b.c COMPUTE STATISTICS FOR COLUMNS key, value"), AnalyzeColumn( UnresolvedTableOrView(Seq("a", "b", "c")), Option(Seq("key", "value")), allColumns = false)) // Partition specified - should be ignored comparePlans( parsePlan( s""" |ANALYZE TABLE a.b.c PARTITION(ds='2017-06-10') |COMPUTE STATISTICS FOR COLUMNS key, value """.stripMargin), AnalyzeColumn( UnresolvedTableOrView(Seq("a", "b", "c")), Option(Seq("key", "value")), allColumns = false)) // Partition specified should be ignored in case of COMPUTE STATISTICS FOR ALL COLUMNS comparePlans( parsePlan( s""" |ANALYZE TABLE a.b.c PARTITION(ds='2017-06-10') |COMPUTE STATISTICS FOR ALL COLUMNS """.stripMargin), AnalyzeColumn( UnresolvedTableOrView(Seq("a", "b", "c")), None, allColumns = true)) intercept("ANALYZE TABLE a.b.c COMPUTE STATISTICS FOR ALL COLUMNS key, value", "mismatched input 'key' expecting {<EOF>, ';'}") intercept("ANALYZE TABLE a.b.c COMPUTE STATISTICS FOR ALL", "missing 'COLUMNS' at '<EOF>'") } test("MSCK REPAIR TABLE") { comparePlans( parsePlan("MSCK REPAIR TABLE a.b.c"), RepairTableStatement(Seq("a", "b", "c"))) } test("LOAD DATA INTO table") { comparePlans( parsePlan("LOAD DATA INPATH 'filepath' INTO TABLE a.b.c"), LoadData(UnresolvedTable(Seq("a", "b", "c")), "filepath", false, false, None)) comparePlans( parsePlan("LOAD DATA LOCAL INPATH 'filepath' INTO TABLE a.b.c"), LoadData(UnresolvedTable(Seq("a", "b", "c")), "filepath", true, false, None)) comparePlans( parsePlan("LOAD DATA LOCAL INPATH 'filepath' OVERWRITE INTO TABLE a.b.c"), LoadData(UnresolvedTable(Seq("a", "b", "c")), "filepath", true, true, None)) comparePlans( parsePlan( s""" |LOAD DATA LOCAL INPATH 'filepath' OVERWRITE INTO TABLE a.b.c |PARTITION(ds='2017-06-10') """.stripMargin), LoadData( UnresolvedTable(Seq("a", "b", "c")), "filepath", true, true, Some(Map("ds" -> "2017-06-10")))) } test("SHOW CREATE table") { comparePlans( parsePlan("SHOW CREATE TABLE a.b.c"), ShowCreateTable(UnresolvedTableOrView(Seq("a", "b", "c"), allowTempView = false))) comparePlans( parsePlan("SHOW CREATE TABLE a.b.c AS SERDE"), ShowCreateTable( UnresolvedTableOrView(Seq("a", "b", "c"), allowTempView = false), asSerde = true)) } test("CACHE TABLE") { comparePlans( parsePlan("CACHE TABLE a.b.c"), CacheTableStatement(Seq("a", "b", "c"), None, false, Map.empty)) comparePlans( parsePlan("CACHE LAZY TABLE a.b.c"), CacheTableStatement(Seq("a", "b", "c"), None, true, Map.empty)) comparePlans( parsePlan("CACHE LAZY TABLE a.b.c OPTIONS('storageLevel' 'DISK_ONLY')"), CacheTableStatement(Seq("a", "b", "c"), None, true, Map("storageLevel" -> "DISK_ONLY"))) intercept("CACHE TABLE a.b.c AS SELECT * FROM testData", "It is not allowed to add catalog/namespace prefix a.b") } test("UNCACHE TABLE") { comparePlans( parsePlan("UNCACHE TABLE a.b.c"), UncacheTableStatement(Seq("a", "b", "c"), ifExists = false)) comparePlans( parsePlan("UNCACHE TABLE IF EXISTS a.b.c"), UncacheTableStatement(Seq("a", "b", "c"), ifExists = true)) } test("TRUNCATE table") { comparePlans( parsePlan("TRUNCATE TABLE a.b.c"), TruncateTableStatement(Seq("a", "b", "c"), None)) comparePlans( parsePlan("TRUNCATE TABLE a.b.c PARTITION(ds='2017-06-10')"), TruncateTableStatement(Seq("a", "b", "c"), Some(Map("ds" -> "2017-06-10")))) } test("REFRESH TABLE") { comparePlans( parsePlan("REFRESH TABLE a.b.c"), RefreshTable(UnresolvedTableOrView(Seq("a", "b", "c")))) } test("show columns") { val sql1 = "SHOW COLUMNS FROM t1" val sql2 = "SHOW COLUMNS IN db1.t1" val sql3 = "SHOW COLUMNS FROM t1 IN db1" val sql4 = "SHOW COLUMNS FROM db1.t1 IN db1" val parsed1 = parsePlan(sql1) val expected1 = ShowColumnsStatement(Seq("t1"), None) val parsed2 = parsePlan(sql2) val expected2 = ShowColumnsStatement(Seq("db1", "t1"), None) val parsed3 = parsePlan(sql3) val expected3 = ShowColumnsStatement(Seq("t1"), Some(Seq("db1"))) val parsed4 = parsePlan(sql4) val expected4 = ShowColumnsStatement(Seq("db1", "t1"), Some(Seq("db1"))) comparePlans(parsed1, expected1) comparePlans(parsed2, expected2) comparePlans(parsed3, expected3) comparePlans(parsed4, expected4) } test("alter table: recover partitions") { comparePlans( parsePlan("ALTER TABLE a.b.c RECOVER PARTITIONS"), AlterTableRecoverPartitionsStatement(Seq("a", "b", "c"))) } test("alter table: add partition") { val sql1 = """ |ALTER TABLE a.b.c ADD IF NOT EXISTS PARTITION |(dt='2008-08-08', country='us') LOCATION 'location1' PARTITION |(dt='2009-09-09', country='uk') """.stripMargin val sql2 = "ALTER TABLE a.b.c ADD PARTITION (dt='2008-08-08') LOCATION 'loc'" val parsed1 = parsePlan(sql1) val parsed2 = parsePlan(sql2) val expected1 = AlterTableAddPartition( UnresolvedTable(Seq("a", "b", "c")), Seq( UnresolvedPartitionSpec(Map("dt" -> "2008-08-08", "country" -> "us"), Some("location1")), UnresolvedPartitionSpec(Map("dt" -> "2009-09-09", "country" -> "uk"), None)), ifNotExists = true) val expected2 = AlterTableAddPartition( UnresolvedTable(Seq("a", "b", "c")), Seq(UnresolvedPartitionSpec(Map("dt" -> "2008-08-08"), Some("loc"))), ifNotExists = false) comparePlans(parsed1, expected1) comparePlans(parsed2, expected2) } test("alter view: add partition (not supported)") { assertUnsupported( """ |ALTER VIEW a.b.c ADD IF NOT EXISTS PARTITION |(dt='2008-08-08', country='us') PARTITION |(dt='2009-09-09', country='uk') """.stripMargin) } test("alter table: rename partition") { val sql1 = """ |ALTER TABLE table_name PARTITION (dt='2008-08-08', country='us') |RENAME TO PARTITION (dt='2008-09-09', country='uk') """.stripMargin val parsed1 = parsePlan(sql1) val expected1 = AlterTableRenamePartitionStatement( Seq("table_name"), Map("dt" -> "2008-08-08", "country" -> "us"), Map("dt" -> "2008-09-09", "country" -> "uk")) comparePlans(parsed1, expected1) val sql2 = """ |ALTER TABLE a.b.c PARTITION (ds='2017-06-10') |RENAME TO PARTITION (ds='2018-06-10') """.stripMargin val parsed2 = parsePlan(sql2) val expected2 = AlterTableRenamePartitionStatement( Seq("a", "b", "c"), Map("ds" -> "2017-06-10"), Map("ds" -> "2018-06-10")) comparePlans(parsed2, expected2) } // ALTER TABLE table_name DROP [IF EXISTS] PARTITION spec1[, PARTITION spec2, ...] // ALTER VIEW table_name DROP [IF EXISTS] PARTITION spec1[, PARTITION spec2, ...] test("alter table: drop partition") { val sql1_table = """ |ALTER TABLE table_name DROP IF EXISTS PARTITION |(dt='2008-08-08', country='us'), PARTITION (dt='2009-09-09', country='uk') """.stripMargin val sql2_table = """ |ALTER TABLE table_name DROP PARTITION |(dt='2008-08-08', country='us'), PARTITION (dt='2009-09-09', country='uk') """.stripMargin val sql1_view = sql1_table.replace("TABLE", "VIEW") val sql2_view = sql2_table.replace("TABLE", "VIEW") val parsed1_table = parsePlan(sql1_table) val parsed2_table = parsePlan(sql2_table) val parsed1_purge = parsePlan(sql1_table + " PURGE") assertUnsupported(sql1_view) assertUnsupported(sql2_view) val expected1_table = AlterTableDropPartition( UnresolvedTable(Seq("table_name")), Seq( UnresolvedPartitionSpec(Map("dt" -> "2008-08-08", "country" -> "us")), UnresolvedPartitionSpec(Map("dt" -> "2009-09-09", "country" -> "uk"))), ifExists = true, purge = false, retainData = false) val expected2_table = expected1_table.copy(ifExists = false) val expected1_purge = expected1_table.copy(purge = true) comparePlans(parsed1_table, expected1_table) comparePlans(parsed2_table, expected2_table) comparePlans(parsed1_purge, expected1_purge) val sql3_table = "ALTER TABLE a.b.c DROP IF EXISTS PARTITION (ds='2017-06-10')" val expected3_table = AlterTableDropPartition( UnresolvedTable(Seq("a", "b", "c")), Seq(UnresolvedPartitionSpec(Map("ds" -> "2017-06-10"))), ifExists = true, purge = false, retainData = false) val parsed3_table = parsePlan(sql3_table) comparePlans(parsed3_table, expected3_table) } test("show current namespace") { comparePlans( parsePlan("SHOW CURRENT NAMESPACE"), ShowCurrentNamespaceStatement()) } test("alter table: SerDe properties") { val sql1 = "ALTER TABLE table_name SET SERDE 'org.apache.class'" val parsed1 = parsePlan(sql1) val expected1 = AlterTableSerDePropertiesStatement( Seq("table_name"), Some("org.apache.class"), None, None) comparePlans(parsed1, expected1) val sql2 = """ |ALTER TABLE table_name SET SERDE 'org.apache.class' |WITH SERDEPROPERTIES ('columns'='foo,bar', 'field.delim' = ',') """.stripMargin val parsed2 = parsePlan(sql2) val expected2 = AlterTableSerDePropertiesStatement( Seq("table_name"), Some("org.apache.class"), Some(Map("columns" -> "foo,bar", "field.delim" -> ",")), None) comparePlans(parsed2, expected2) val sql3 = """ |ALTER TABLE table_name |SET SERDEPROPERTIES ('columns'='foo,bar', 'field.delim' = ',') """.stripMargin val parsed3 = parsePlan(sql3) val expected3 = AlterTableSerDePropertiesStatement( Seq("table_name"), None, Some(Map("columns" -> "foo,bar", "field.delim" -> ",")), None) comparePlans(parsed3, expected3) val sql4 = """ |ALTER TABLE table_name PARTITION (test=1, dt='2008-08-08', country='us') |SET SERDE 'org.apache.class' |WITH SERDEPROPERTIES ('columns'='foo,bar', 'field.delim' = ',') """.stripMargin val parsed4 = parsePlan(sql4) val expected4 = AlterTableSerDePropertiesStatement( Seq("table_name"), Some("org.apache.class"), Some(Map("columns" -> "foo,bar", "field.delim" -> ",")), Some(Map("test" -> "1", "dt" -> "2008-08-08", "country" -> "us"))) comparePlans(parsed4, expected4) val sql5 = """ |ALTER TABLE table_name PARTITION (test=1, dt='2008-08-08', country='us') |SET SERDEPROPERTIES ('columns'='foo,bar', 'field.delim' = ',') """.stripMargin val parsed5 = parsePlan(sql5) val expected5 = AlterTableSerDePropertiesStatement( Seq("table_name"), None, Some(Map("columns" -> "foo,bar", "field.delim" -> ",")), Some(Map("test" -> "1", "dt" -> "2008-08-08", "country" -> "us"))) comparePlans(parsed5, expected5) val sql6 = """ |ALTER TABLE a.b.c SET SERDE 'org.apache.class' |WITH SERDEPROPERTIES ('columns'='foo,bar', 'field.delim' = ',') """.stripMargin val parsed6 = parsePlan(sql6) val expected6 = AlterTableSerDePropertiesStatement( Seq("a", "b", "c"), Some("org.apache.class"), Some(Map("columns" -> "foo,bar", "field.delim" -> ",")), None) comparePlans(parsed6, expected6) val sql7 = """ |ALTER TABLE a.b.c PARTITION (test=1, dt='2008-08-08', country='us') |SET SERDEPROPERTIES ('columns'='foo,bar', 'field.delim' = ',') """.stripMargin val parsed7 = parsePlan(sql7) val expected7 = AlterTableSerDePropertiesStatement( Seq("a", "b", "c"), None, Some(Map("columns" -> "foo,bar", "field.delim" -> ",")), Some(Map("test" -> "1", "dt" -> "2008-08-08", "country" -> "us"))) comparePlans(parsed7, expected7) } test("alter view: AS Query") { val parsed = parsePlan("ALTER VIEW a.b.c AS SELECT 1") val expected = AlterViewAsStatement( Seq("a", "b", "c"), "SELECT 1", parsePlan("SELECT 1")) comparePlans(parsed, expected) } test("create view -- basic") { val v1 = "CREATE VIEW view1 AS SELECT * FROM tab1" val parsed1 = parsePlan(v1) val expected1 = CreateViewStatement( Seq("view1"), Seq.empty[(String, Option[String])], None, Map.empty[String, String], Some("SELECT * FROM tab1"), parsePlan("SELECT * FROM tab1"), false, false, PersistedView) comparePlans(parsed1, expected1) val v2 = "CREATE TEMPORARY VIEW a.b.c AS SELECT * FROM tab1" val parsed2 = parsePlan(v2) val expected2 = CreateViewStatement( Seq("a", "b", "c"), Seq.empty[(String, Option[String])], None, Map.empty[String, String], Some("SELECT * FROM tab1"), parsePlan("SELECT * FROM tab1"), false, false, LocalTempView) comparePlans(parsed2, expected2) } test("create view - full") { val v1 = """ |CREATE OR REPLACE VIEW view1 |(col1, col3 COMMENT 'hello') |TBLPROPERTIES('prop1Key'="prop1Val") |COMMENT 'BLABLA' |AS SELECT * FROM tab1 """.stripMargin val parsed1 = parsePlan(v1) val expected1 = CreateViewStatement( Seq("view1"), Seq("col1" -> None, "col3" -> Some("hello")), Some("BLABLA"), Map("prop1Key" -> "prop1Val"), Some("SELECT * FROM tab1"), parsePlan("SELECT * FROM tab1"), false, true, PersistedView) comparePlans(parsed1, expected1) val v2 = """ |CREATE OR REPLACE GLOBAL TEMPORARY VIEW a.b.c |(col1, col3 COMMENT 'hello') |COMMENT 'BLABLA' |AS SELECT * FROM tab1 """.stripMargin val parsed2 = parsePlan(v2) val expected2 = CreateViewStatement( Seq("a", "b", "c"), Seq("col1" -> None, "col3" -> Some("hello")), Some("BLABLA"), Map(), Some("SELECT * FROM tab1"), parsePlan("SELECT * FROM tab1"), false, true, GlobalTempView) comparePlans(parsed2, expected2) } test("create view -- partitioned view") { val v1 = "CREATE VIEW view1 partitioned on (ds, hr) as select * from srcpart" intercept[ParseException] { parsePlan(v1) } } test("create view - duplicate clauses") { def createViewStatement(duplicateClause: String): String = { s""" |CREATE OR REPLACE VIEW view1 |(col1, col3 COMMENT 'hello') |$duplicateClause |$duplicateClause |AS SELECT * FROM tab1 """.stripMargin } val sql1 = createViewStatement("COMMENT 'BLABLA'") val sql2 = createViewStatement("TBLPROPERTIES('prop1Key'=\\"prop1Val\\")") intercept(sql1, "Found duplicate clauses: COMMENT") intercept(sql2, "Found duplicate clauses: TBLPROPERTIES") } test("SPARK-32374: create temporary view with properties not allowed") { assertUnsupported( sql = """ |CREATE OR REPLACE TEMPORARY VIEW a.b.c |(col1, col3 COMMENT 'hello') |TBLPROPERTIES('prop1Key'="prop1Val") |AS SELECT * FROM tab1 """.stripMargin, containsThesePhrases = Seq("TBLPROPERTIES can't coexist with CREATE TEMPORARY VIEW")) } test("SHOW TBLPROPERTIES table") { comparePlans( parsePlan("SHOW TBLPROPERTIES a.b.c"), ShowTableProperties(UnresolvedTableOrView(Seq("a", "b", "c")), None)) comparePlans( parsePlan("SHOW TBLPROPERTIES a.b.c('propKey1')"), ShowTableProperties(UnresolvedTableOrView(Seq("a", "b", "c")), Some("propKey1"))) } test("DESCRIBE FUNCTION") { comparePlans( parsePlan("DESC FUNCTION a"), DescribeFunction(UnresolvedFunc(Seq("a")), false)) comparePlans( parsePlan("DESCRIBE FUNCTION a"), DescribeFunction(UnresolvedFunc(Seq("a")), false)) comparePlans( parsePlan("DESCRIBE FUNCTION a.b.c"), DescribeFunction(UnresolvedFunc(Seq("a", "b", "c")), false)) comparePlans( parsePlan("DESCRIBE FUNCTION EXTENDED a.b.c"), DescribeFunction(UnresolvedFunc(Seq("a", "b", "c")), true)) } test("SHOW FUNCTIONS") { comparePlans( parsePlan("SHOW FUNCTIONS"), ShowFunctions(None, true, true, None)) comparePlans( parsePlan("SHOW USER FUNCTIONS"), ShowFunctions(None, true, false, None)) comparePlans( parsePlan("SHOW user FUNCTIONS"), ShowFunctions(None, true, false, None)) comparePlans( parsePlan("SHOW SYSTEM FUNCTIONS"), ShowFunctions(None, false, true, None)) comparePlans( parsePlan("SHOW ALL FUNCTIONS"), ShowFunctions(None, true, true, None)) comparePlans( parsePlan("SHOW FUNCTIONS LIKE 'funct*'"), ShowFunctions(None, true, true, Some("funct*"))) comparePlans( parsePlan("SHOW FUNCTIONS LIKE a.b.c"), ShowFunctions(Some(UnresolvedFunc(Seq("a", "b", "c"))), true, true, None)) val sql = "SHOW other FUNCTIONS" intercept(sql, s"$sql not supported") } test("DROP FUNCTION") { comparePlans( parsePlan("DROP FUNCTION a"), DropFunction(UnresolvedFunc(Seq("a")), false, false)) comparePlans( parsePlan("DROP FUNCTION a.b.c"), DropFunction(UnresolvedFunc(Seq("a", "b", "c")), false, false)) comparePlans( parsePlan("DROP TEMPORARY FUNCTION a.b.c"), DropFunction(UnresolvedFunc(Seq("a", "b", "c")), false, true)) comparePlans( parsePlan("DROP FUNCTION IF EXISTS a.b.c"), DropFunction(UnresolvedFunc(Seq("a", "b", "c")), true, false)) comparePlans( parsePlan("DROP TEMPORARY FUNCTION IF EXISTS a.b.c"), DropFunction(UnresolvedFunc(Seq("a", "b", "c")), true, true)) } test("CREATE FUNCTION") { parseCompare("CREATE FUNCTION a as 'fun'", CreateFunctionStatement(Seq("a"), "fun", Seq(), false, false, false)) parseCompare("CREATE FUNCTION a.b.c as 'fun'", CreateFunctionStatement(Seq("a", "b", "c"), "fun", Seq(), false, false, false)) parseCompare("CREATE OR REPLACE FUNCTION a.b.c as 'fun'", CreateFunctionStatement(Seq("a", "b", "c"), "fun", Seq(), false, false, true)) parseCompare("CREATE TEMPORARY FUNCTION a.b.c as 'fun'", CreateFunctionStatement(Seq("a", "b", "c"), "fun", Seq(), true, false, false)) parseCompare("CREATE FUNCTION IF NOT EXISTS a.b.c as 'fun'", CreateFunctionStatement(Seq("a", "b", "c"), "fun", Seq(), false, true, false)) parseCompare("CREATE FUNCTION a as 'fun' USING JAR 'j'", CreateFunctionStatement(Seq("a"), "fun", Seq(FunctionResource(JarResource, "j")), false, false, false)) parseCompare("CREATE FUNCTION a as 'fun' USING ARCHIVE 'a'", CreateFunctionStatement(Seq("a"), "fun", Seq(FunctionResource(ArchiveResource, "a")), false, false, false)) parseCompare("CREATE FUNCTION a as 'fun' USING FILE 'f'", CreateFunctionStatement(Seq("a"), "fun", Seq(FunctionResource(FileResource, "f")), false, false, false)) parseCompare("CREATE FUNCTION a as 'fun' USING JAR 'j', ARCHIVE 'a', FILE 'f'", CreateFunctionStatement(Seq("a"), "fun", Seq(FunctionResource(JarResource, "j"), FunctionResource(ArchiveResource, "a"), FunctionResource(FileResource, "f")), false, false, false)) intercept("CREATE FUNCTION a as 'fun' USING OTHER 'o'", "Operation not allowed: CREATE FUNCTION with resource type 'other'") } test("REFRESH FUNCTION") { parseCompare("REFRESH FUNCTION c", RefreshFunction(UnresolvedFunc(Seq("c")))) parseCompare("REFRESH FUNCTION b.c", RefreshFunction(UnresolvedFunc(Seq("b", "c")))) parseCompare("REFRESH FUNCTION a.b.c", RefreshFunction(UnresolvedFunc(Seq("a", "b", "c")))) } private case class TableSpec( name: Seq[String], schema: Option[StructType], partitioning: Seq[Transform], bucketSpec: Option[BucketSpec], properties: Map[String, String], provider: Option[String], options: Map[String, String], location: Option[String], comment: Option[String]) private object TableSpec { def apply(plan: LogicalPlan): TableSpec = { plan match { case create: CreateTableStatement => TableSpec( create.tableName, Some(create.tableSchema), create.partitioning, create.bucketSpec, create.properties, create.provider, create.options, create.location, create.comment) case replace: ReplaceTableStatement => TableSpec( replace.tableName, Some(replace.tableSchema), replace.partitioning, replace.bucketSpec, replace.properties, replace.provider, replace.options, replace.location, replace.comment) case ctas: CreateTableAsSelectStatement => TableSpec( ctas.tableName, Some(ctas.asSelect).filter(_.resolved).map(_.schema), ctas.partitioning, ctas.bucketSpec, ctas.properties, ctas.provider, ctas.options, ctas.location, ctas.comment) case rtas: ReplaceTableAsSelectStatement => TableSpec( rtas.tableName, Some(rtas.asSelect).filter(_.resolved).map(_.schema), rtas.partitioning, rtas.bucketSpec, rtas.properties, rtas.provider, rtas.options, rtas.location, rtas.comment) case other => fail(s"Expected to parse Create, CTAS, Replace, or RTAS plan" + s" from query, got ${other.getClass.getName}.") } } } test("comment on") { comparePlans( parsePlan("COMMENT ON DATABASE a.b.c IS NULL"), CommentOnNamespace(UnresolvedNamespace(Seq("a", "b", "c")), "")) comparePlans( parsePlan("COMMENT ON DATABASE a.b.c IS 'NULL'"), CommentOnNamespace(UnresolvedNamespace(Seq("a", "b", "c")), "NULL")) comparePlans( parsePlan("COMMENT ON NAMESPACE a.b.c IS ''"), CommentOnNamespace(UnresolvedNamespace(Seq("a", "b", "c")), "")) comparePlans( parsePlan("COMMENT ON TABLE a.b.c IS 'xYz'"), CommentOnTable(UnresolvedTable(Seq("a", "b", "c")), "xYz")) } // TODO: ignored by SPARK-31707, restore the test after create table syntax unification ignore("create table - without using") { val sql = "CREATE TABLE 1m.2g(a INT)" val expectedTableSpec = TableSpec( Seq("1m", "2g"), Some(new StructType().add("a", IntegerType)), Seq.empty[Transform], None, Map.empty[String, String], None, Map.empty[String, String], None, None) testCreateOrReplaceDdl(sql, expectedTableSpec, expectedIfNotExists = false) } }
shuangshuangwang/spark
sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala
Scala
apache-2.0
82,598
package common import akka.serialization._ import org.json4s.jackson.Serialization.{ read, write } import org.json4s.{ DefaultFormats, Formats, jackson, Serialization } abstract class JsonSerializer extends Serializer { implicit val serialization = jackson.Serialization implicit def formats: Formats def includeManifest: Boolean = false def toString(obj: AnyRef): String = { serialization.write(obj) } def fromString(json: String): AnyRef = { serialization.read[AnyRef](json) } def toBinary(obj: AnyRef): Array[Byte] = { toString(obj).getBytes } def fromBinary( bytes: Array[Byte], clazz: Option[Class[_]]): AnyRef = { fromString(new String(bytes)) } }
enpassant/rapids
modules/common/src/main/scala/common/JsonSerializer.scala
Scala
apache-2.0
709
package org.scalajs.junit import com.novocode.junit.{Ansi, RichLogger} import Ansi._ import sbt.testing._ import org.scalajs.testinterface.TestUtils import scala.util.{Try, Success, Failure} final class JUnitTask(val taskDef: TaskDef, runner: JUnitBaseRunner) extends sbt.testing.Task { def tags: Array[String] = Array.empty def execute(eventHandler: EventHandler, loggers: Array[Logger], continuation: Array[Task] => Unit): Unit = { val richLogger = new RichLogger(loggers, runner.runSettings, taskDef.fullyQualifiedName) if (runner.runSettings.verbose) richLogger.info(c("Test run started", INFO)) val startTime = System.currentTimeMillis val tasks = execute(eventHandler, loggers) if (runner.runSettings.verbose) { val time = System.currentTimeMillis - startTime val failed = runner.taskFailedCount() val ignored = runner.taskIgnoredCount() val total = runner.taskTotalCount() val msg = Seq( c("Test run finished:", INFO), c(s"$failed failed,", if (failed == 0) INFO else ERRCOUNT), c(s"$ignored ignored,", if (ignored == 0) INFO else IGNCOUNT), c(s"$total total,", INFO), c(s"${time.toDouble / 1000}s", INFO)) richLogger.info(msg.mkString(" ")) } continuation(tasks) } def execute(eventHandler: EventHandler, loggers: Array[Logger]): Array[Task] = { val richLogger = new RichLogger(loggers, runner.runSettings, taskDef.fullyQualifiedName) val hookName = taskDef.fullyQualifiedName + "$scalajs$junit$hook" Try(TestUtils.loadModule(hookName, runner.testClassLoader)) match { case Success(classMetadata: JUnitTestMetadata) => new JUnitExecuteTest(taskDef.fullyQualifiedName, runner, classMetadata, richLogger).executeTests() case Success(_) => richLogger.error("Error while loading test class: " + taskDef.fullyQualifiedName + ", expected " + hookName + " to extend JUnitTestMetadata") case Failure(exception) => richLogger.error("Error while loading test class: " + taskDef.fullyQualifiedName, exception) } runner.taskDone() Array() } private class DummyEvent(taskDef: TaskDef, t: Option[Throwable]) extends Event { val fullyQualifiedName: String = taskDef.fullyQualifiedName val fingerprint: Fingerprint = taskDef.fingerprint val selector: Selector = new SuiteSelector val status: Status = if (t.isDefined) Status.Error else Status.Success val throwable: OptionalThrowable = t.fold(new OptionalThrowable)(new OptionalThrowable(_)) val duration: Long = -1L } }
nicolasstucki/scala-js-junit
runtime/src/main/scala/org/scalajs/junit/JUnitTask.scala
Scala
bsd-3-clause
2,694
/** * Copyright (c) 2013 Saddle Development Team * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. **/ package org.saddle.index import org.saddle._ /** * An IndexMaker takes some input of type I and returns an Index whose * elements are of type O. * * The basic use case is to take a Tuple,,N,, of Seq-like instances and * return an Index whose entries are instances of Tuple,,N,, corresponding * to the elements of the original Seqs. * * @tparam I Type of input with which to make index * @tparam O Type of contents of output index */ trait IndexMaker[I, O] { def apply(in: I): Index[O] } /** * Companion object which houses implicit instances of IndexMaker */ object IndexMaker extends IndexMakerLowPriority { // ------------------------- // IndexMaker instances implicit def make2V[T[K] <: SeqLike[K], I1: ST: ORD, I2: ST: ORD] = new IndexMaker[(T[I1], T[I2]), (I1, I2)] { def apply(in: (T[I1], T[I2])) = zip2V(in._1, in._2) } implicit def make3V[T[K] <: SeqLike[K], I1: ST: ORD, I2: ST: ORD, I3: ST: ORD] = new IndexMaker[(T[I1], T[I2], T[I3]), (I1, I2, I3)] { def apply(in: (T[I1], T[I2], T[I3])) = zip3V(in._1, in._2, in._3) } implicit def make4V[T[K] <: SeqLike[K], I1: ST: ORD, I2: ST: ORD, I3: ST: ORD, I4: ST: ORD] = new IndexMaker[(T[I1], T[I2], T[I3], T[I4]), (I1, I2, I3, I4)] { def apply(in: (T[I1], T[I2], T[I3], T[I4])) = zip4V(in._1, in._2, in._3, in._4) } implicit def make5V[T[K] <: SeqLike[K], I1: ST: ORD, I2: ST: ORD, I3: ST: ORD, I4: ST: ORD, I5: ST: ORD] = new IndexMaker[(T[I1], T[I2], T[I3], T[I4], T[I5]), (I1, I2, I3, I4, I5)] { def apply(in: (T[I1], T[I2], T[I3], T[I4], T[I5])) = zip5V(in._1, in._2, in._3, in._4, in._5) } // ------------------------- // Zip helpers private def zip2V[T[K] <: SeqLike[K], A: ST: ORD, B: ST: ORD](a: T[A], b: T[B]): Index[(A, B)] = { require(a.length == b.length, "Arguments must have same length") val sz = a.length val arr = Array.ofDim[(A, B)](sz) var i = 0 while (i < sz) { arr(i) = (a(i), b(i)) i += 1 } Index(arr) } private def zip3V[T[K] <: SeqLike[K], A: ST: ORD, B: ST: ORD, C: ST: ORD]( a: T[A], b: T[B], c: T[C]): Index[(A, B, C)] = { require(a.length == b.length && b.length == c.length, "Arguments must have same length") val sz = a.length val arr = Array.ofDim[(A, B, C)](sz) var i = 0 while (i < sz) { arr(i) = (a(i), b(i), c(i)) i += 1 } Index(arr) } private def zip4V[T[K] <: SeqLike[K], A: ST: ORD, B: ST: ORD, C: ST: ORD, D: ST: ORD]( a: T[A], b: T[B], c: T[C], d: T[D]): Index[(A, B, C, D)] = { require(a.length == b.length && b.length == c.length && c.length == d.length, "Arguments must have same length") val sz = a.length val arr = Array.ofDim[(A, B, C, D)](sz) var i = 0 while (i < sz) { arr(i) = (a(i), b(i), c(i), d(i)) i += 1 } Index(arr) } private def zip5V[T[K] <: SeqLike[K], A: ST: ORD, B: ST: ORD, C: ST: ORD, D: ST: ORD, E: ST: ORD]( a: T[A], b: T[B], c: T[C], d: T[D], e: T[E]): Index[(A, B, C, D, E)] = { require(a.length == b.length && b.length == c.length && c.length == d.length && d.length == e.length, "Arguments must have same length") val sz = a.length val arr = Array.ofDim[(A, B, C, D, E)](sz) var i = 0 while (i < sz) { arr(i) = (a(i), b(i), c(i), d(i), e(i)) i += 1 } Index(arr) } } trait IndexMakerLowPriority { type SeqLike[K] = { def length: Int; def apply(i: Int): K } implicit def make1V[T[K] <: SeqLike[K], A: ST: ORD] = new IndexMaker[T[A], A] { def apply(in: T[A]): Index[A] = { val sz = in.length val arr = Array.ofDim[A](sz) var i = 0 while (i < sz) { arr(i) = in(i) i += 1 } Index(arr) } } }
jyt109/saddle
saddle-core/src/main/scala/org/saddle/index/IndexMaker.scala
Scala
apache-2.0
4,425