code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
package controllers.v1
import javax.inject._
import models.ConversionStatus._
import play.api.libs.json._
import play.api.mvc._
import services.WorkerService
@Singleton
class WorkerController @Inject()(workerService: WorkerService, cc: ControllerComponents) extends AbstractController(cc) {
def status = Action {
Ok(Json.toJson(workerService.status))
}
def log(offset: Int) = Action {
Ok(Json.toJson(workerService.log(offset)))
}
// def cancel = {
// Ok(Json.toJson(worker.cancel))
// }
//
// def request = Action.async { request =>
// request.body.asJson.map { json =>
// json.validate[List[String]].map {
// case req: List[String] => {
// Future {
// worker.request(req)
// }.map {
// case Some(job) => Ok(Json.toJson(job))
// case None => NoContent
// }
// }
// }.recoverTotal {
// e => Future(BadRequest("Detected error:" + JsError.toJson(e)))
// }
// }.getOrElse {
// Future(BadRequest("Expecting Json data"))
// }
// }
//
// // FIXME this doesn't have to return anything, other than OK/NOK
// def submit = Action.async { request =>
// request.body.asJson.map { json =>
// json.validate[Job].map {
// case job: Job => {
// Future {
// worker.submit(job)
// }.map {
// case res => Ok(Json.toJson(res))
// }
// }
// }.recoverTotal {
// e => Future(BadRequest("Detected error:" + JsError.toJson(e)))
// }
// }.getOrElse {
// Future(BadRequest("Expecting Json data"))
// }
// }
}
| kuhnuri/kuhnuri-worker | common/app/controllers/v1/WorkerController.scala | Scala | apache-2.0 | 1,705 |
object ch7_1 {
trait Par[A]
def map2[A,B,C](a: Par[A], b: Par[B])(f: (A, B) => C): Par[C] = ???
}
import ch7_1._
/*
from repl you can test typing:
:load src/main/scala/fpinscala/ch7/Exercise1.scala
*/
| rucka/fpinscala | src/main/scala/fpinscala/ch7/Exercise1.scala | Scala | gpl-2.0 | 208 |
package no.netcompany.testdatagen.utils
// Copyright (C) 2014 Lars Reed -- GNU GPL 2.0 -- see LICENSE.txt
import scala.util.Random
/**
* A random function that answers true in n% of the cases.
*/
trait Percentage {
/** A random function that answers true in n% of the cases. */
def hit(n: Int): Boolean= Random.nextInt(100) < n
}
| lre-mesan/testdata | src/main/scala/no/netcompany/testdatagen/utils/Percentage.scala | Scala | gpl-2.0 | 340 |
package filodb.coordinator
import com.typesafe.config.Config
import com.typesafe.scalalogging.StrictLogging
import kamon.{MetricReporter, SpanReporter}
import kamon.metric._
import kamon.metric.MeasurementUnit.{information, time}
import kamon.metric.MeasurementUnit.Dimension.{Information, Time}
import kamon.trace.Span
import kamon.util.Clock
class KamonMetricsLogReporter extends MetricReporter with StrictLogging {
override def start(): Unit = {
logger.info("Started KamonMetricsLogReporter successfully")
}
override def stop(): Unit = {}
override def reconfigure(config: Config): Unit = {}
override def reportPeriodSnapshot(snapshot: PeriodSnapshot): Unit = {
logMetricValues(snapshot.metrics.counters, "counter")
logMetricValues(snapshot.metrics.gauges, "gauge")
logMetricDistribution(snapshot.metrics.histograms, "histograms")
logMetricDistribution(snapshot.metrics.rangeSamplers, "rangeSamplers")
}
private def logMetricValues(metrics: Seq[MetricValue], metricType: String): Unit = {
for { c <- metrics } {
val name = normalizeMetricName(c.name, c.unit)
val value = scale(c.value, c.unit)
logger.info(s"KAMON ${metricType} name=${name} ${formatTags(c.tags)} value=$value")
}
}
private def logMetricDistribution(metrics: Seq[MetricDistribution], metricType: String): Unit = {
for { m <- metrics } {
val name = normalizeMetricName(m.name, m.unit)
val h = m.distribution
def percentile(percentile: Double) = scale(h.percentile(25.0D).value, m.unit)
logger.info(s"KAMON ${metricType} name=$name ${formatTags(m.tags)} " +
s"n=${h.count} min=${scale(h.min, m.unit)} " +
s"p50=${percentile(50.0D)} p90=${percentile(90.0D)} " +
s"p95=${percentile(95.0D)} p99=${percentile(99.0D)} " +
s"p999=${percentile(99.9D)} max=${scale(h.max, m.unit)}")
}
}
private def formatTags(tags: Map[String, String]) = tags.view.map { case (k, v) => s"$k=$v" }.mkString(" ")
private def normalizeLabelName(label: String): String =
label.map(charOrUnderscore)
private def charOrUnderscore(char: Char): Char =
if (char.isLetterOrDigit || char == '_') char else '_'
private def normalizeMetricName(metricName: String, unit: MeasurementUnit): String = {
val normalizedMetricName = metricName.map(charOrUnderscore)
unit.dimension match {
case Time => normalizedMetricName + "_seconds"
case Information => normalizedMetricName + "_bytes"
case _ => normalizedMetricName
}
}
private def scale(value: Long, unit: MeasurementUnit): Double = unit.dimension match {
case Time if unit.magnitude != time.seconds.magnitude =>
MeasurementUnit.scale(value, unit, time.seconds)
case Information if unit.magnitude != information.bytes.magnitude =>
MeasurementUnit.scale(value, unit, information.bytes)
case _ => value
}
}
class KamonSpanLogReporter extends SpanReporter with StrictLogging {
override def reportSpans(spans: Seq[Span.FinishedSpan]): Unit = {
spans.groupBy(_.operationName).foreach { case (name, spans) =>
val durations = spans.map { s => Math.floorDiv(Clock.nanosBetween(s.from, s.to), 1000) }
logger.info(s"KAMON-TRACE name $name min=${durations.min} max=${durations.max} " +
s"avg=${durations.sum.toFloat / durations.size}")
}
}
override def start(): Unit = {
logger.info("Started KamonSpanLogReporter successfully")
}
override def stop(): Unit = {}
override def reconfigure(config: Config): Unit = {}
} | velvia/FiloDB | coordinator/src/main/scala/filodb.coordinator/KamonLogger.scala | Scala | apache-2.0 | 3,570 |
package com.malpeza.solid.isp
class TransactionResponse(val done: Boolean, val reason: Option[FailReason]) {
def this() {
this(false, Option(CallBank))
}
}
object TransactionResponse {
def apply() = new TransactionResponse()
def apply(done: Boolean) = new TransactionResponse(done, None)
def apply(failReason: FailReason) = new TransactionResponse(false, Option(failReason))
def apply(done: Boolean, reason: Option[FailReason]) = new TransactionResponse(done, reason)
}
sealed trait FailReason
case object CallBank extends FailReason
case object InsufficientBalance extends FailReason | lsolano/blog.solid.demo | scala/src/main/scala/com/malpeza/solid/isp/TransactionResponse.scala | Scala | mit | 604 |
package net.xylophones.planetoid.game.logic
import net.xylophones.planetoid.game.maths.{RotationDegreesToDirectionVector, Vector3D}
import net.xylophones.planetoid.game.model.{PlayerInput, Rocket, Planet, GamePhysics}
import net.xylophones.planetoid.game.logic.ModelTestObjectMother._
import org.junit.runner.RunWith
import org.scalatest._
import org.scalatest.junit.JUnitRunner
import scala.math.Pi
@RunWith(classOf[JUnitRunner])
class RocketPositionCalculatorTest extends FunSuite with Matchers {
val underTest: RocketPositionCalculator = new RocketPositionCalculator(new BoundsChecker, new RotationDegreesToDirectionVector)
val planet: Planet = Planet(Vector3D(100, 100, 100), 999)
test("planet's gravity attracts rocket") {
// given
val physics: GamePhysics = new GamePhysics(gForce=0.2)
val input = PlayerInput()
val rocket = createRocketAtOriginPointingUp()
// when
val updatedRocket: Rocket = underTest.updateRocketPosition(rocket, input, planet, physics)
// then
val movingTowardsPlanet = updatedRocket.velocity.normalise ~= Vector3D(1, 1, 1).normalise
movingTowardsPlanet shouldBe true
}
test("rocket rotates when user input indicates left") {
val physics: GamePhysics = new GamePhysics(rocketRotationSpeed = 10, gForce = 1, rocketThrustForce = 10)
val input = PlayerInput(left = true)
val rocket = createRocketAtOriginPointingForward()
// when
val updatedRocket: Rocket = underTest.updateRocketPosition(rocket, input, planet, physics)
// then
val tenDegCounterClockwise = updatedRocket.rotation ~= Vector3D(10, 0, 0)
tenDegCounterClockwise shouldBe true
}
test("rocket rotates when user input indicates right") {
val physics: GamePhysics = new GamePhysics(rocketRotationSpeed = 10, gForce = 1, rocketThrustForce = 10)
val input = PlayerInput(right = true)
val rocket = createRocketAtOriginPointingUp()
// when
val updatedRocket: Rocket = underTest.updateRocketPosition(rocket, input, planet, physics)
// then
val tenDegClockwise = updatedRocket.rotation ~= Vector3D(80, 0, 0)
tenDegClockwise shouldBe true
}
test("rocket rotates when user input indicates up") {
val physics: GamePhysics = new GamePhysics(rocketRotationSpeed = 10, gForce = 1, rocketThrustForce = 10)
val input = PlayerInput(up = true)
val rocket = createRocketAtOriginPointingForward()
// when
val updatedRocket: Rocket = underTest.updateRocketPosition(rocket, input, planet, physics)
// then
val tenDegUp = updatedRocket.rotation ~= Vector3D(0, 10, 0)
tenDegUp shouldBe true
}
test("rocket rotates when user input indicates down") {
val physics: GamePhysics = new GamePhysics(rocketRotationSpeed = 10, gForce = 1, rocketThrustForce = 10)
val input = PlayerInput(down = true)
val rocket = createRocketAtOriginPointingForward()
// when
val updatedRocket: Rocket = underTest.updateRocketPosition(rocket, input, planet, physics)
// then
val tenDegDown = updatedRocket.rotation ~= Vector3D(0, -10, 0)
tenDegDown shouldBe true
}
test("rocket moves forward when user input indicates thrust") {
val physics: GamePhysics = new GamePhysics(gForce = 0, rocketThrustForce = 10, rocketMass = 1)
val input = PlayerInput(thrust = true)
val rocket = createRocketAtOriginPointingForward()
// when
val updatedRocket: Rocket = underTest.updateRocketPosition(rocket, input, planet, physics)
// then
val positionIsForward10 = updatedRocket.position ~= Vector3D(0, 0, -10)
positionIsForward10 shouldBe true
}
test("rocket moves backwards when user input indicates reverse thrust") {
val physics: GamePhysics = new GamePhysics(gForce = 0, rocketReverseThrustForce = -5, rocketMass = 1)
val input = PlayerInput(reverseThrust = true)
val rocket = createRocketAtOriginPointingForward()
// when
val updatedRocket: Rocket = underTest.updateRocketPosition(rocket, input, planet, physics)
// then
val positionIsBack5 = updatedRocket.position ~= Vector3D(0, 0, 5)
positionIsBack5 shouldBe true
}
test("rocket moves backwards when user input indicates reverse thrust when facing up") {
val physics: GamePhysics = new GamePhysics(gForce = 0, rocketReverseThrustForce = -5, rocketMass = 1)
val input = PlayerInput(reverseThrust = true)
val rocket = createRocketAtOriginPointingUp()
// when
val updatedRocket: Rocket = underTest.updateRocketPosition(rocket, input, planet, physics)
// then
val positionIsUp5 = updatedRocket.position ~= Vector3D(0, -5, 0)
positionIsUp5 shouldBe true
}
} | wjsrobertson/planetoid3d | game/src/test/scala/net/xylophones/planetoid/game/logic/RocketPositionCalculatorTest.scala | Scala | apache-2.0 | 4,652 |
package sk.scalagine.math
import org.scalatest.matchers.{MatchResult, Matcher}
import org.scalautils.TripleEqualsSupport.Spread
import org.scalautils.Prettifier
import sk.scalagine.math
/**
* Created with IntelliJ IDEA.
* User: zladovan
* Date: 11.5.2014
* Time: 21:55
*/
trait PlusMinusMatchers {
trait ContainsDataList[U] {
def dataList(): List[Float]
}
case class DataListSpread[T <: ContainsDataList[_]] (containsDataList: T, tolerance: Float){
val spreads = containsDataList.dataList().map(cell => Spread(cell, tolerance))
def isWithin(anotherContainsDataList: T): Boolean = {
spreads
.zip(anotherContainsDataList.dataList())
.forall(spreadAndCell => spreadAndCell._1.isWithin(spreadAndCell._2))
}
def ===(n: T): Boolean = isWithin(n)
def !==(n: T): Boolean = !isWithin(n)
override def toString: String = Prettifier.default(containsDataList) + " +- " + Prettifier.default(tolerance)
}
def equal[T <% ContainsDataList[T]](dataListSpread: DataListSpread[ContainsDataList[T]]): Matcher[T] = {
new Matcher[T] {
def apply(left: T): MatchResult = {
MatchResult(
dataListSpread.isWithin(left),
"{0} did not equal {1} plus or minus {2} on each component",
"{0} equaled {1} plus or minus {2} on each component",
Vector(left, dataListSpread.containsDataList, dataListSpread.tolerance)
)
}
override def toString(): String = "equal (" + Prettifier.default(dataListSpread) + ")"
}
}
final class ContainsDataListPlusOrMinusWrapper[T <: ContainsDataList[_]](pivot: T) {
def +-(tolerance: Float): DataListSpread[T] = {
if (tolerance < 0)
throw new IllegalArgumentException(tolerance.toString + " passed to +- was zero or negative. " +
"Must be a positive non-zero number.")
DataListSpread(pivot, tolerance)
}
}
implicit def convertVectorToContainsDataList[T <: math.Vector[_]](vector: T): ContainsDataList[T]
= new ContainsDataList[T] {
override def dataList(): List[Float] = vector.data
override def toString: String = vector.toString
}
implicit def convertVectorToPlusMinusWrapper[T <: math.Vector[_]](vector: T): ContainsDataListPlusOrMinusWrapper[ContainsDataList[T]]
= new ContainsDataListPlusOrMinusWrapper[ContainsDataList[T]](convertVectorToContainsDataList(vector))
implicit def convertMatrixToContainsDataList[T <: Matrix[_,_]](matrix: T): ContainsDataList[T]
= new ContainsDataList[T] {
override def dataList(): List[Float] = matrix.data.flatMap(row => row)
override def toString: String = matrix.toString
}
implicit def convertMatrixToPlusMinusWrapper[T <: Matrix[_,_]](matrix: T): ContainsDataListPlusOrMinusWrapper[ContainsDataList[T]]
= new ContainsDataListPlusOrMinusWrapper[ContainsDataList[T]](convertMatrixToContainsDataList(matrix))
implicit def convertQuaternionToContainsDataList(quaternion: Quaternion): ContainsDataList[Quaternion]
= new ContainsDataList[Quaternion] {
override def dataList(): List[Float] = quaternion.s :: quaternion.ijk.data
override def toString: String = quaternion.toString
}
implicit def convertQuaternionToPlusMinusWrapper(quaternion: Quaternion): ContainsDataListPlusOrMinusWrapper[ContainsDataList[Quaternion]]
= new ContainsDataListPlusOrMinusWrapper[ContainsDataList[Quaternion]](convertQuaternionToContainsDataList(quaternion))
}
| zladovan/scalagine | engine/math/src/test/scala/sk/scalagine/math/PlusMinusMatchers.scala | Scala | mit | 3,460 |
package mesosphere.marathon
package api.validation
import com.wix.accord.validate
import mesosphere.UnitTest
import mesosphere.marathon.api.v2.AppNormalization
import mesosphere.marathon.api.v2.validation.AppValidation
import mesosphere.marathon.core.plugin.PluginManager
import mesosphere.marathon.raml.{ App, AppCContainer, AppUpdate, ContainerPortMapping, EngineType, Raml }
import mesosphere.marathon.state.AppDefinition
import org.scalatest.Matchers
import play.api.libs.json.Json
class AppUpdateValidatorTest extends UnitTest with Matchers {
implicit val appUpdateValidator = AppValidation.validateCanonicalAppUpdateAPI(Set.empty, () => AppNormalization.Configuration(None, "mesos-bridge-name").defaultNetworkName)
implicit val validAppDefinition = AppDefinition.validAppDefinition(Set.empty)(PluginManager.None)
"validation that considers container types" should {
"test that Docker container is validated" in {
val f = new Fixture
val update = AppUpdate(
id = Some("/test"),
container = Some(f.invalidDockerContainer))
assert(validate(update).isFailure)
}
"test that AppC container is validated" in {
val f = new Fixture
val update = AppUpdate(
id = Some("/test"),
container = Some(f.invalidAppCContainer))
assert(validate(update).isFailure)
}
}
"validation for network type changes" should {
// regression test for DCOS-10641
"allow updating from HOST to USER network for an app using a Docker container" in {
val originalApp = Json.parse(
"""
| {
| "id": "/sleepy-moby",
| "cmd": "sleep 1000",
| "instances": 1,
| "cpus": 1,
| "mem": 128,
| "disk": 0,
| "gpus": 0,
| "backoffSeconds": 1,
| "backoffFactor": 1.15,
| "maxLaunchDelaySeconds": 3600,
| "container": {
| "docker": {
| "image": "alpine",
| "forcePullImage": false,
| "privileged": false,
| "network": "HOST"
| }
| },
| "upgradeStrategy": {
| "minimumHealthCapacity": 0.5,
| "maximumOverCapacity": 0
| },
| "portDefinitions": [
| {
| "protocol": "tcp",
| "port": 10004
| }
| ],
| "requirePorts": false
|}
""".stripMargin).as[App]
val config = AppNormalization.Configuration(None, "mesos-bridge-name")
val appDef = Raml.fromRaml(
AppNormalization.apply(config)
.normalized(AppNormalization.forDeprecated(config).normalized(originalApp)))
val appUpdate = AppNormalization.forUpdates(config).normalized(
AppNormalization.forDeprecatedUpdates(config).normalized(Json.parse(
"""
|{
| "id": "/sleepy-moby",
| "cmd": "sleep 1000",
| "instances": 1,
| "cpus": 1,
| "mem": 128,
| "disk": 0,
| "gpus": 0,
| "backoffSeconds": 1,
| "backoffFactor": 1.15,
| "maxLaunchDelaySeconds": 3600,
| "container": {
| "docker": {
| "image": "alpine",
| "forcePullImage": false,
| "privileged": false,
| "network": "USER"
| }
| },
| "upgradeStrategy": {
| "minimumHealthCapacity": 0.5,
| "maximumOverCapacity": 0
| },
| "portDefinitions": [],
| "ipAddress": {
| "networkName": "dcos"
| },
| "requirePorts": false
|}
""".stripMargin).as[AppUpdate]))
assert(validate(Raml.fromRaml(Raml.fromRaml(appUpdate -> appDef))).isSuccess)
}
}
class Fixture {
def invalidDockerContainer: raml.Container = raml.Container(
EngineType.Docker,
portMappings = Option(Seq(
ContainerPortMapping(
// Invalid (negative) port numbers
containerPort = -1, hostPort = Some(-1), servicePort = -1)
))
)
def invalidAppCContainer: raml.Container = raml.Container(EngineType.Mesos, appc = Some(AppCContainer(
image = "anImage",
id = Some("invalidID")))
)
}
}
| guenter/marathon | src/test/scala/mesosphere/marathon/api/validation/AppUpdateValidatorTest.scala | Scala | apache-2.0 | 4,378 |
package org.randi3.model.criterion
import org.randi3.model.criterion.constraint.OrdinalConstraint
import org.randi3.model.Entity._
import scalaz._
import Scalaz._
case class OrdinalCriterion private(id: Int, version: Int, name: String, description: String, values: Set[String], inclusionConstraint: Option[OrdinalConstraint], strata: List[OrdinalConstraint], private val dummy: Any) extends Criterion[String, OrdinalConstraint] {
}
object OrdinalCriterion {
def apply(id: Int = Int.MinValue, version: Int = 0, name: String, description: String, values: Set[String], inclusionConstraint: Option[OrdinalConstraint], strata: List[OrdinalConstraint]): ValidationNel[String, OrdinalCriterion] = {
checkAll(
checkID(id),
checkVersion(version),
checkStringBetween(name, 2, maxTextLength),
checkStringBetween(description, 2, maxTextLength),
checkListContainsMin(values.toList, 2),
checkNotNull(inclusionConstraint),
checkNotNull(strata)
).toEither match {
case Left(x) => Failure(x)
case Right(_) => Success(new OrdinalCriterion(id, version, name, description, values, inclusionConstraint, strata, null))
}
}
private def validCriterion = new OrdinalCriterion(Int.MinValue, 0, "validName", "validDescription", Set("a", "b"), None, Nil, null)
def check(id: Int = validCriterion.id, version: Int = validCriterion.version, name: String = validCriterion.name, description: String = validCriterion.description, values: Set[String] = validCriterion.values, inclusionConstraint: Option[OrdinalConstraint] = validCriterion.inclusionConstraint, strata: List[OrdinalConstraint] = validCriterion.strata): ValidationNel[String, Boolean] = {
apply(id, version, name, description, values, inclusionConstraint, strata).toEither match {
case Left(x) => Failure(x)
case Right(_) => Success(true)
}
}
}
| dschrimpf/randi3-core | src/main/scala/org/randi3/model/criterion/OrdinalCriterion.scala | Scala | gpl-3.0 | 1,880 |
/**
* Created on February 17, 2011
* Copyright (c) 2011, Wei-ju Wu
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of Wei-ju Wu nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY WEI-JU WU ''AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL WEI-JU WU BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.dmpp.adf.logical
/**
* Constants for HasComment trait.
*/
object HasComment {
val CommentMaxChars = 79
}
/**
* Trait for blocks that has comments.
*/
trait HasComment extends ReadsBcplStrings {
self : HeaderBlock =>
import HasComment._
/**
* Returns the comment field stored in this block.
* @return this block's comment
*/
def comment: String = bcplStringAt(sector.sizeInBytes - 184, CommentMaxChars)
/**
* Sets the comment field for this block. If the length of newComment exceeds
* CommentMaxChars, an IllegalArgumentException is thrown.
* @param newComment the new comment
*/
def comment_=(newComment: String) {
if (newComment.length > CommentMaxChars) {
throw new IllegalArgumentException("max. 79 characters for comment")
}
setBcplStringAt(sector.sizeInBytes - 184, CommentMaxChars, newComment)
}
}
| weiju/adf-tools | adf-core/src/main/scala/org/dmpp/adf/logical/HasComment.scala | Scala | bsd-3-clause | 2,426 |
class C {
def +=(n: Int) {}
}
var v = new C
v /* line: 2 */ += 1
| ilinum/intellij-scala | testdata/resolve2/function/assignment/NotAssignmentOrdinary.scala | Scala | apache-2.0 | 67 |
package mesosphere.marathon.integration
import mesosphere.marathon.api.v2.json.GroupUpdate
import mesosphere.marathon.integration.setup.{ IntegrationFunSuite, IntegrationHealthCheck, SingleMarathonIntegrationTest, WaitTestSupport }
import mesosphere.marathon.state.{ AppDefinition, PathId, UpgradeStrategy }
import org.apache.http.HttpStatus
import org.scalatest._
import spray.http.DateTime
import scala.concurrent.duration._
class GroupDeployIntegrationTest
extends IntegrationFunSuite
with SingleMarathonIntegrationTest
with Matchers
with BeforeAndAfter
with GivenWhenThen {
//clean up state before running the test case
before(cleanUp())
test("create empty group successfully") {
Given("A group which does not exist in marathon")
val group = GroupUpdate.empty("test".toRootTestPath)
When("The group gets created")
val result = marathon.createGroup(group)
Then("The group is created. A success event for this group is send.")
result.code should be(201) //created
val event = waitForChange(result)
}
test("update empty group successfully") {
Given("An existing group")
val name = "test2".toRootTestPath
val group = GroupUpdate.empty(name)
val dependencies = Set("/test".toTestPath)
waitForChange(marathon.createGroup(group))
When("The group gets updated")
waitForChange(marathon.updateGroup(name, group.copy(dependencies = Some(dependencies))))
Then("The group is updated")
val result = marathon.group("test2".toRootTestPath)
result.code should be(200)
result.value.dependencies should be(dependencies)
}
test("deleting an existing group gives a 200 http response") {
Given("An existing group")
val group = GroupUpdate.empty("test3".toRootTestPath)
waitForChange(marathon.createGroup(group))
When("The group gets deleted")
val result = marathon.deleteGroup(group.id.get)
waitForChange(result)
Then("The group is deleted")
result.code should be(200)
// only expect the test base group itself
marathon.listGroupsInBaseGroup.value.filter { group => group.id != testBasePath } should be('empty)
}
test("delete a non existing group should give a 404 http response") {
When("A non existing group is deleted")
val result = marathon.deleteGroup("does_not_exist".toRootTestPath)
Then("We get a 404 http response code")
result.code should be(404)
}
test("create a group with applications to start") {
Given("A group with one application")
val app = appProxy("/test/app".toRootTestPath, "v1", 2, withHealth = false)
val group = GroupUpdate("/test".toRootTestPath, Set(app))
When("The group is created")
waitForChange(marathon.createGroup(group))
Then("A success event is send and the application has been started")
val tasks = waitForTasks(app.id, app.instances)
tasks should have size 2
}
test("update a group with applications to restart") {
Given("A group with one application started")
val id = "test".toRootTestPath
val appId = id / "app"
val app1V1 = appProxy(appId, "v1", 2, withHealth = false)
waitForChange(marathon.createGroup(GroupUpdate(id, Set(app1V1))))
waitForTasks(app1V1.id, app1V1.instances)
When("The group is updated, with a changed application")
val app1V2 = appProxy(appId, "v2", 2, withHealth = false)
waitForChange(marathon.updateGroup(id, GroupUpdate(id, Set(app1V2))))
Then("A success event is send and the application has been started")
waitForTasks(app1V2.id, app1V2.instances)
}
test("update a group with the same application so no restart is triggered") {
Given("A group with one application started")
val id = "test".toRootTestPath
val appId = id / "app"
val app1V1 = appProxy(appId, "v1", 2, withHealth = false)
waitForChange(marathon.createGroup(GroupUpdate(id, Set(app1V1))))
waitForTasks(app1V1.id, app1V1.instances)
val tasks = marathon.tasks(appId)
When("The group is updated, with the same application")
waitForChange(marathon.updateGroup(id, GroupUpdate(id, Set(app1V1))))
Then("There is no deployment and all tasks still live")
marathon.listDeploymentsForBaseGroup().value should be ('empty)
marathon.tasks(appId).value.toSet should be(tasks.value.toSet)
}
test("create a group with application with health checks") {
Given("A group with one application")
val id = "proxy".toRootTestPath
val appId = id / "app"
val proxy = appProxy(appId, "v1", 1)
val group = GroupUpdate(id, Set(proxy))
When("The group is created")
val create = marathon.createGroup(group)
Then("A success event is send and the application has been started")
waitForChange(create)
}
test("upgrade a group with application with health checks") {
Given("A group with one application")
val id = "test".toRootTestPath
val appId = id / "app"
val proxy = appProxy(appId, "v1", 1)
val group = GroupUpdate(id, Set(proxy))
waitForChange(marathon.createGroup(group))
val check = appProxyCheck(proxy.id, "v1", state = true)
When("The group is updated")
check.afterDelay(1.second, state = false)
check.afterDelay(3.seconds, state = true)
val update = marathon.updateGroup(id, group.copy(apps = Some(Set(appProxy(appId, "v2", 1)))))
Then("A success event is send and the application has been started")
waitForChange(update)
}
test("rollback from an upgrade of group") {
Given("A group with one application")
val gid = "proxy".toRootTestPath
val appId = gid / "app"
val proxy = appProxy(appId, "v1", 2)
val group = GroupUpdate(gid, Set(proxy))
val create = marathon.createGroup(group)
waitForChange(create)
waitForTasks(proxy.id, proxy.instances)
val v1Checks = appProxyCheck(appId, "v1", state = true)
When("The group is updated")
waitForChange(marathon.updateGroup(gid, group.copy(apps = Some(Set(appProxy(appId, "v2", 2))))))
Then("The new version is deployed")
val v2Checks = appProxyCheck(appId, "v2", state = true)
WaitTestSupport.validFor("all v2 apps are available", 10.seconds) { v2Checks.pingSince(2.seconds) }
When("A rollback to the first version is initiated")
waitForChange(marathon.rollbackGroup(gid, create.value.version), 120.seconds)
Then("The rollback will be performed and the old version is available")
v1Checks.healthy
WaitTestSupport.validFor("all v1 apps are available", 10.seconds) { v1Checks.pingSince(2.seconds) }
}
test("during Deployment the defined minimum health capacity is never undershot") {
Given("A group with one application")
val id = "test".toRootTestPath
val appId = id / "app"
val proxy = appProxy(appId, "v1", 2).copy(upgradeStrategy = UpgradeStrategy(1))
val group = GroupUpdate(id, Set(proxy))
val create = marathon.createGroup(group)
waitForChange(create)
waitForTasks(appId, proxy.instances)
val v1Check = appProxyCheck(appId, "v1", state = true)
When("The new application is not healthy")
val v2Check = appProxyCheck(appId, "v2", state = false) //will always fail
val update = marathon.updateGroup(id, group.copy(apps = Some(Set(appProxy(appId, "v2", 2)))))
Then("All v1 applications are kept alive")
v1Check.healthy
WaitTestSupport.validFor("all v1 apps are always available", 15.seconds) { v1Check.pingSince(3.seconds) }
When("The new application becomes healthy")
v2Check.state = true //make v2 healthy, so the app can be cleaned
waitForChange(update)
}
test("An upgrade in progress can not be interrupted without force") {
Given("A group with one application with an upgrade in progress")
val id = "forcetest".toRootTestPath
val appId = id / "app"
val proxy = appProxy(appId, "v1", 2)
val group = GroupUpdate(id, Set(proxy))
val create = marathon.createGroup(group)
waitForChange(create)
appProxyCheck(appId, "v2", state = false) //will always fail
marathon.updateGroup(id, group.copy(apps = Some(Set(appProxy(appId, "v2", 2)))))
When("Another upgrade is triggered, while the old one is not completed")
val result = marathon.updateGroup(id, group.copy(apps = Some(Set(appProxy(appId, "v3", 2)))))
Then("An error is indicated")
result.code should be (HttpStatus.SC_CONFLICT)
waitForEvent("group_change_failed")
When("Another upgrade is triggered with force, while the old one is not completed")
val force = marathon.updateGroup(id, group.copy(apps = Some(Set(appProxy(appId, "v4", 2)))), force = true)
Then("The update is performed")
waitForChange(force)
}
test("A group with a running deployment can not be deleted without force") {
Given("A group with one application with an upgrade in progress")
val id = "forcetest".toRootTestPath
val appId = id / "app"
val proxy = appProxy(appId, "v1", 2)
appProxyCheck(appId, "v1", state = false) //will always fail
val group = GroupUpdate(id, Set(proxy))
val create = marathon.createGroup(group)
When("Delete the group, while the deployment is in progress")
val deleteResult = marathon.deleteGroup(id)
Then("An error is indicated")
deleteResult.code should be (HttpStatus.SC_CONFLICT)
waitForEvent("group_change_failed")
When("Delete is triggered with force, while the deployment is not completed")
val force = marathon.deleteGroup(id, force = true)
Then("The delete is performed")
waitForChange(force)
}
test("Groups with Applications with circular dependencies can not get deployed") {
Given("A group with 3 circular dependent applications")
val db = appProxy("/test/db".toTestPath, "v1", 1, dependencies = Set("/test/frontend1".toTestPath))
val service = appProxy("/test/service".toTestPath, "v1", 1, dependencies = Set(db.id))
val frontend = appProxy("/test/frontend1".toTestPath, "v1", 1, dependencies = Set(service.id))
val group = GroupUpdate("test".toTestPath, Set(db, service, frontend))
When("The group gets posted")
val result = marathon.createGroup(group)
Then("An unsuccessful response has been posted, with an error indicating cyclic dependencies")
val errors = (result.entityJson \ "details" \\ "errors").flatMap(_.as[Seq[String]])
errors.find(_.contains("cyclic dependencies")) shouldBe defined
}
test("Applications with dependencies get deployed in the correct order") {
Given("A group with 3 dependent applications")
val db = appProxy("/test/db".toTestPath, "v1", 1)
val service = appProxy("/test/service".toTestPath, "v1", 1, dependencies = Set(db.id))
val frontend = appProxy("/test/frontend1".toTestPath, "v1", 1, dependencies = Set(service.id))
val group = GroupUpdate("/test".toTestPath, Set(db, service, frontend))
When("The group gets deployed")
var ping = Map.empty[PathId, DateTime]
def storeFirst(health: IntegrationHealthCheck) {
if (!ping.contains(health.appId)) ping += health.appId -> DateTime.now
}
val dbHealth = appProxyCheck(db.id, "v1", state = true).withHealthAction(storeFirst)
val serviceHealth = appProxyCheck(service.id, "v1", state = true).withHealthAction(storeFirst)
val frontendHealth = appProxyCheck(frontend.id, "v1", state = true).withHealthAction(storeFirst)
waitForChange(marathon.createGroup(group))
Then("The correct order is maintained")
ping should have size 3
ping(db.id) should be < ping(service.id)
ping(service.id) should be < ping(frontend.id)
}
test("Groups with dependencies get deployed in the correct order") {
Given("A group with 3 dependent applications")
val db = appProxy("/test/db/db1".toTestPath, "v1", 1)
val service = appProxy("/test/service/service1".toTestPath, "v1", 1)
val frontend = appProxy("/test/frontend/frontend1".toTestPath, "v1", 1)
val group = GroupUpdate(
"/test".toTestPath,
Set.empty[AppDefinition],
Set(
GroupUpdate(PathId("db"), apps = Set(db)),
GroupUpdate(PathId("service"), apps = Set(service)).copy(dependencies = Some(Set("/test/db".toTestPath))),
GroupUpdate(PathId("frontend"), apps = Set(frontend)).copy(dependencies = Some(Set("/test/service".toTestPath)))
)
)
When("The group gets deployed")
var ping = Map.empty[PathId, DateTime]
def storeFirst(health: IntegrationHealthCheck) {
if (!ping.contains(health.appId)) ping += health.appId -> DateTime.now
}
val dbHealth = appProxyCheck(db.id, "v1", state = true).withHealthAction(storeFirst)
val serviceHealth = appProxyCheck(service.id, "v1", state = true).withHealthAction(storeFirst)
val frontendHealth = appProxyCheck(frontend.id, "v1", state = true).withHealthAction(storeFirst)
waitForChange(marathon.createGroup(group))
Then("The correct order is maintained")
ping should have size 3
ping(db.id) should be < ping(service.id)
ping(service.id) should be < ping(frontend.id)
}
ignore("Groups with dependant Applications get upgraded in the correct order with maintained upgrade strategy") {
var ping = Map.empty[String, DateTime]
def key(health: IntegrationHealthCheck) = s"${health.appId}_${health.versionId}"
def storeFirst(health: IntegrationHealthCheck) {
if (!ping.contains(key(health))) ping += key(health) -> DateTime.now
}
def create(version: String, initialState: Boolean) = {
val db = appProxy("/test/db".toTestPath, version, 1)
val service = appProxy("/test/service".toTestPath, version, 1, dependencies = Set(db.id))
val frontend = appProxy("/test/frontend1".toTestPath, version, 1, dependencies = Set(service.id))
(
GroupUpdate("/test".toTestPath, Set(db, service, frontend)),
appProxyCheck(db.id, version, state = initialState).withHealthAction(storeFirst),
appProxyCheck(service.id, version, state = initialState).withHealthAction(storeFirst),
appProxyCheck(frontend.id, version, state = initialState).withHealthAction(storeFirst))
}
Given("A group with 3 dependent applications")
val (groupV1, dbV1, serviceV1, frontendV1) = create("v1", true)
waitForChange(marathon.createGroup(groupV1))
When("The group gets updated, where frontend2 is not healthy")
val (groupV2, dbV2, serviceV2, frontendV2) = create("v2", false)
val upgrade = marathon.updateGroup(groupV2.id.get, groupV2)
waitForHealthCheck(dbV2)
Then("The correct order is maintained")
ping should have size 4
ping(key(dbV1)) should be < ping(key(serviceV1))
ping(key(serviceV1)) should be < ping(key(frontendV1))
WaitTestSupport.validFor("all v1 apps are available as well as db v2", 15.seconds) {
dbV1.pingSince(2.seconds) &&
serviceV1.pingSince(2.seconds) &&
frontendV1.pingSince(2.seconds) &&
dbV2.pingSince(2.seconds)
}
When("The v2 db becomes healthy")
dbV2.state = true
waitForHealthCheck(serviceV2)
Then("The correct order is maintained")
ping should have size 5
ping(key(serviceV1)) should be < ping(key(frontendV1))
ping(key(dbV2)) should be < ping(key(serviceV2))
WaitTestSupport.validFor("service and frontend v1 are available as well as db and service v2", 15.seconds) {
serviceV1.pingSince(2.seconds) &&
frontendV1.pingSince(2.seconds) &&
dbV2.pingSince(2.seconds) &&
serviceV2.pingSince(2.seconds)
}
When("The v2 service becomes healthy")
serviceV2.state = true
waitForHealthCheck(frontendV2)
Then("The correct order is maintained")
ping should have size 6
ping(key(dbV2)) should be < ping(key(serviceV2))
ping(key(serviceV2)) should be < ping(key(frontendV2))
WaitTestSupport.validFor("frontend v1 is available as well as all v2", 15.seconds) {
frontendV1.pingSince(2.seconds) &&
dbV2.pingSince(2.seconds) &&
serviceV2.pingSince(2.seconds) &&
frontendV2.pingSince(2.seconds)
}
When("The v2 frontend becomes healthy")
frontendV2.state = true
Then("The deployment can be finished. All v1 apps are destroyed and all v2 apps are healthy.")
waitForChange(upgrade)
List(dbV1, serviceV1, frontendV1).foreach(_.pinged = false)
WaitTestSupport.validFor("all v2 apps are alive", 15.seconds) {
!dbV1.pinged && !serviceV1.pinged && !frontendV1.pinged &&
dbV2.pingSince(2.seconds) && serviceV2.pingSince(2.seconds) && frontendV2.pingSince(2.seconds)
}
}
}
| yp-engineering/marathon | src/test/scala/mesosphere/marathon/integration/GroupDeployIntegrationTest.scala | Scala | apache-2.0 | 16,525 |
/*
* Copyright (C) 2012 reuillon
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.openmole.plugin.environment.egi
import org.openmole.core.batch.authentication.CypheredPassword
import java.io.File
object P12Certificate {
def apply(cypheredPassword: String, certificate: File = new File(new File(System.getProperty("user.home")), ".globus/certificate.p12")) =
new P12Certificate(cypheredPassword, certificate)
}
class P12Certificate(val cypheredPassword: String, val certificate: File) extends EGIAuthentication with DIRACAuthentication with CypheredPassword
| ISCPIF/PSEExperiments | openmole-src/openmole/plugins/org.openmole.plugin.environment.egi/src/main/scala/org/openmole/plugin/environment/egi/P12Certificate.scala | Scala | agpl-3.0 | 1,185 |
package intellij.haskell.cabal.lang.psi.impl
import com.intellij.psi.PsiElement
import intellij.haskell.cabal.lang.psi._
import intellij.haskell.psi.HaskellPsiUtil
trait BuildDependsImpl extends PsiElement {
/** Retrieves the package names as strings. */
def getPackageNames: Array[String] = HaskellPsiUtil.getChildOfType(this, classOf[Dependencies]) match {
case None => Array.empty
case Some(el) =>
val res =
HaskellPsiUtil.streamChildren(el, classOf[Dependency]).flatMap(c =>
HaskellPsiUtil.getChildNodes(c, CabalTypes.DEPENDENCY_NAME).headOption.map(_.getText)
).toArray
res
}
}
| rikvdkleij/intellij-haskell | src/main/scala/intellij/haskell/cabal/lang/psi/impl/BuildDependsImpl.scala | Scala | apache-2.0 | 638 |
// Copyright (C) 2011-2012 the original author or authors.
// See the LICENCE.txt file distributed with this work for additional
// information regarding copyright ownership.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package recfun
import org.scalatest.FunSuite
import Main.balance
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class BalanceSuite extends FunSuite {
test("balance: '(if (zero? x) max (/ 1 x))' is balanced") {
assert(balance("(if (zero? x) max (/ 1 x))".toList))
}
test("balance: 'I told him ...' is balanced") {
assert(balance("I told him (that it's not (yet) done).\\n(But he wasn't listening)".toList))
}
test("balance: ':-)' is unbalanced") {
assert(!balance(":-)".toList))
}
test("balance: counting is not enough") {
assert(!balance("())(".toList))
}
test("balance: '((()))()()(())' is balanced") {
assert(balance("((()))()()(())".toList))
}
}
| carstendev/Functional-Programming-in-Scala-Course | recfun/src/test/scala/recfun/BalanceSuite.scala | Scala | apache-2.0 | 1,483 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.impl
import scala.collection.JavaConverters._
import org.apache.spark.SparkFunSuite
import org.apache.spark.api.java.JavaRDD
import org.apache.spark.ml.attribute.{AttributeGroup, NominalAttribute, NumericAttribute}
import org.apache.spark.ml.tree._
import org.apache.spark.mllib.regression.LabeledPoint
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{SQLContext, DataFrame}
private[ml] object TreeTests extends SparkFunSuite {
/**
* Convert the given data to a DataFrame, and set the features and label metadata.
* 把给出的数据转换到一个DataFrame数据集,并设置特征和标签的元数据
* @param data Dataset. Categorical features and labels must already have 0-based indices.
* 分类特征和标签必须已经有0指标
* This must be non-empty.
* @param categoricalFeatures Map: categorical feature index -> number of distinct values
* 指定离散特征,是一个map,featureId->K,其中K表示特征值可能的情况(0, 1, …, K-1)
* @param numClasses Number of classes label can take. If 0, mark as continuous.
* 可采取的类的数量,如果0,连续
* @return DataFrame with metadata
*/
def setMetadata(
data: RDD[LabeledPoint],
categoricalFeatures: Map[Int, Int],
numClasses: Int): DataFrame = {
val sqlContext = new SQLContext(data.sparkContext)
import sqlContext.implicits._
val df = data.toDF()
/**
+-----+-------------+
|label| features|
+-----+-------------+
| 0.0|[0.0,2.0,3.0]|
| 1.0|[0.0,3.0,1.0]|
| 0.0|[0.0,2.0,2.0]|
| 1.0|[0.0,3.0,9.0]|
| 0.0|[0.0,2.0,6.0]|
+-----+-------------+*/
df.show(5)
val numFeatures = data.first().features.size
//获得特征数3
val featuresAttributes = Range(0, numFeatures).map { feature =>
// println(">>>>"+feature)
// Map(0 -> 1)判断key是否包含feature
if (categoricalFeatures.contains(feature)) {
//创建可更改属性的副本
NominalAttribute.defaultAttr.withIndex(feature).withNumValues(categoricalFeatures(feature))
} else {
NumericAttribute.defaultAttr.withIndex(feature)
}
}.toArray
val featuresMetadata = new AttributeGroup("features", featuresAttributes).toMetadata()
//println("===="+featuresMetadata.toString())
val labelAttribute = if (numClasses == 0) {//numClasses 分类数
NumericAttribute.defaultAttr.withName("label")
} else {
NominalAttribute.defaultAttr.withName("label").withNumValues(numClasses)
}
val labelMetadata = labelAttribute.toMetadata()
/**
+-------------+-----+
| features|label|
+-------------+-----+
|[0.0,2.0,3.0]| 0.0|
|[0.0,3.0,1.0]| 1.0|
|[0.0,2.0,2.0]| 0.0|
|[0.0,3.0,9.0]| 1.0|
|[0.0,2.0,6.0]| 0.0|
+-------------+-----+*/
df.select(df("features").as("features", featuresMetadata),
df("label").as("label", labelMetadata)).show(5)
df.select(df("features").as("features", featuresMetadata),
df("label").as("label", labelMetadata))
}
/** Java-friendly version of [[setMetadata()]] */
def setMetadata(
data: JavaRDD[LabeledPoint],
categoricalFeatures: java.util.Map[java.lang.Integer, java.lang.Integer],
numClasses: Int): DataFrame = {
setMetadata(data.rdd, categoricalFeatures.asInstanceOf[java.util.Map[Int, Int]].asScala.toMap,
numClasses)
}
/**
* Check if the two trees are exactly the same.
* 检查两棵树是否完全相同
* Note: I hesitate to override Node.equals since it could cause problems if users
* make mistakes such as creating loops of Nodes.
* If the trees are not equal, this prints the two trees and throws an exception.
* 如果树不相等,打印两个树并抛出异常.
*/
def checkEqual(a: DecisionTreeModel, b: DecisionTreeModel): Unit = {
try {
checkEqual(a.rootNode, b.rootNode)
} catch {
case ex: Exception =>
throw new AssertionError("checkEqual failed since the two trees were not identical.\\n" +
"TREE A:\\n" + a.toDebugString + "\\n" +
"TREE B:\\n" + b.toDebugString + "\\n", ex)
}
}
/**
* Return true iff the two nodes and their descendants are exactly the same.
* 返回true,当且仅当两节点和他们的后代是完全相同
* Note: I hesitate to override Node.equals since it could cause problems if users
* make mistakes such as creating loops of Nodes.
*/
private def checkEqual(a: Node, b: Node): Unit = {
//println(a.prediction+"\\t"+a.impurity+"\\t"+a.leftSide)
//判断预测相同
assert(a.prediction === b.prediction)
//判断不纯度相同
assert(a.impurity === b.impurity)
(a, b) match {
case (aye: InternalNode, bee: InternalNode) =>
// println("split:"+aye.split+" leftChild:"+ aye.leftChild+" rightChild:"+aye.rightChild)
//分隔相等
assert(aye.split === bee.split)
checkEqual(aye.leftChild, bee.leftChild)
checkEqual(aye.rightChild, bee.rightChild)
case (aye: LeafNode, bee: LeafNode) => // do nothing
case _ =>
throw new AssertionError("Found mismatched nodes")
}
}
/**
* Check if the two models are exactly the same.
* 检查两个模型是否完全相同
* If the models are not equal, this throws an exception.
* 如果模型不相等,则抛出一个异常
*/
def checkEqual(a: TreeEnsembleModel, b: TreeEnsembleModel): Unit = {
try {
a.trees.zip(b.trees).foreach { case (treeA, treeB) =>
TreeTests.checkEqual(treeA, treeB)
}
//权重相同
assert(a.treeWeights === b.treeWeights)
} catch {
case ex: Exception => throw new AssertionError(
"checkEqual failed since the two tree ensembles were not identical")
}
}
/**
* Helper method for constructing a tree for testing.
* 用于构造测试树的辅助方法
* Given left, right children, construct a parent node.
* @param split Split for parent node 父节点的拆分
* @return Parent node with children attached
*/
def buildParentNode(left: Node, right: Node, split: Split): Node = {
val leftImp = left.impurityStats
val rightImp = right.impurityStats
val parentImp = leftImp.copy.add(rightImp)
val leftWeight = leftImp.count / parentImp.count.toDouble
val rightWeight = rightImp.count / parentImp.count.toDouble
val gain = parentImp.calculate() -
(leftWeight * leftImp.calculate() + rightWeight * rightImp.calculate())
val pred = parentImp.predict
new InternalNode(pred, parentImp.calculate(), gain, left, right, split, parentImp)
}
}
| tophua/spark1.52 | mllib/src/test/scala/org/apache/spark/ml/impl/TreeTests.scala | Scala | apache-2.0 | 7,574 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.codegen.agg.batch
import org.apache.flink.streaming.api.operators.OneInputStreamOperator
import org.apache.flink.table.dataformat.{BaseRow, BinaryRow, GenericRow, JoinedRow}
import org.apache.flink.table.functions.UserDefinedFunction
import org.apache.flink.table.planner.codegen.{CodeGenUtils, CodeGeneratorContext, ProjectionCodeGenerator}
import org.apache.flink.table.planner.functions.aggfunctions.DeclarativeAggregateFunction
import org.apache.flink.table.planner.plan.utils.AggregateInfoList
import org.apache.flink.table.runtime.generated.GeneratedOperator
import org.apache.flink.table.runtime.operators.TableStreamOperator
import org.apache.flink.table.runtime.operators.aggregate.{BytesHashMap, BytesHashMapSpillMemorySegmentPool}
import org.apache.flink.table.types.logical.RowType
import org.apache.calcite.tools.RelBuilder
/**
* Operator code generator for HashAggregation, Only deal with [[DeclarativeAggregateFunction]]
* and aggregateBuffers should be update(e.g.: setInt) in [[BinaryRow]].
* (Hash Aggregate performs much better than Sort Aggregate).
*/
class HashAggCodeGenerator(
ctx: CodeGeneratorContext,
builder: RelBuilder,
aggInfoList: AggregateInfoList,
inputType: RowType,
outputType: RowType,
grouping: Array[Int],
auxGrouping: Array[Int],
isMerge: Boolean,
isFinal: Boolean) {
private lazy val groupKeyRowType = AggCodeGenHelper.projectRowType(inputType, grouping)
private lazy val aggCallToAggFunction =
aggInfoList.aggInfos.map(info => (info.agg, info.function))
private lazy val aggregates: Seq[UserDefinedFunction] = aggInfoList.aggInfos.map(_.function)
private lazy val aggArgs: Array[Array[Int]] = aggInfoList.aggInfos.map(_.argIndexes)
// get udagg instance names
private lazy val udaggs = AggCodeGenHelper.getUdaggs(aggregates)
// currently put auxGrouping to aggBuffer in code-gen
private lazy val aggBufferNames = AggCodeGenHelper.getAggBufferNames(auxGrouping, aggregates)
private lazy val aggBufferTypes =
AggCodeGenHelper.getAggBufferTypes(inputType, auxGrouping, aggregates)
private lazy val aggBufferRowType = RowType.of(aggBufferTypes.flatten, aggBufferNames.flatten)
def genWithKeys(): GeneratedOperator[OneInputStreamOperator[BaseRow, BaseRow]] = {
val inputTerm = CodeGenUtils.DEFAULT_INPUT1_TERM
val className = if (isFinal) "HashAggregateWithKeys" else "LocalHashAggregateWithKeys"
// add logger
val logTerm = CodeGenUtils.newName("LOG")
ctx.addReusableLogger(logTerm, className)
// gen code to do group key projection from input
val currentKeyTerm = CodeGenUtils.newName("currentKey")
val currentKeyWriterTerm = CodeGenUtils.newName("currentKeyWriter")
val keyProjectionCode = ProjectionCodeGenerator.generateProjectionExpression(
ctx,
inputType,
groupKeyRowType,
grouping,
inputTerm = inputTerm,
outRecordTerm = currentKeyTerm,
outRecordWriterTerm = currentKeyWriterTerm).code
// gen code to create groupKey, aggBuffer Type array
// it will be used in BytesHashMap and BufferedKVExternalSorter if enable fallback
val groupKeyTypesTerm = CodeGenUtils.newName("groupKeyTypes")
val aggBufferTypesTerm = CodeGenUtils.newName("aggBufferTypes")
HashAggCodeGenHelper.prepareHashAggKVTypes(
ctx, groupKeyTypesTerm, aggBufferTypesTerm, groupKeyRowType, aggBufferRowType)
// gen code to aggregate and output using hash map
val aggregateMapTerm = CodeGenUtils.newName("aggregateMap")
val lookupInfo = ctx.addReusableLocalVariable(
classOf[BytesHashMap.LookupInfo].getCanonicalName,
"lookupInfo")
HashAggCodeGenHelper.prepareHashAggMap(
ctx,
groupKeyTypesTerm,
aggBufferTypesTerm,
aggregateMapTerm)
val outputTerm = CodeGenUtils.newName("hashAggOutput")
val (reuseAggMapEntryTerm, reuseGroupKeyTerm, reuseAggBufferTerm) =
HashAggCodeGenHelper.prepareTermForAggMapIteration(
ctx,
outputTerm,
outputType,
groupKeyRowType,
aggBufferRowType,
if (grouping.isEmpty) classOf[GenericRow] else classOf[JoinedRow])
val currentAggBufferTerm = ctx.addReusableLocalVariable(
classOf[BinaryRow].getName, "currentAggBuffer")
val (initedAggBuffer, aggregate, outputExpr) = HashAggCodeGenHelper.genHashAggCodes(
isMerge,
isFinal,
ctx,
builder,
(grouping, auxGrouping),
inputTerm,
inputType,
aggCallToAggFunction,
aggArgs,
aggregates,
currentAggBufferTerm,
aggBufferRowType,
aggBufferTypes,
outputTerm,
outputType,
reuseGroupKeyTerm,
reuseAggBufferTerm)
val outputResultFromMap = HashAggCodeGenHelper.genAggMapIterationAndOutput(
ctx, isFinal, aggregateMapTerm, reuseAggMapEntryTerm, reuseAggBufferTerm, outputExpr)
// gen code to deal with hash map oom, if enable fallback we will use sort agg strategy
val sorterTerm = CodeGenUtils.newName("sorter")
val retryAppend = HashAggCodeGenHelper.genRetryAppendToMap(
aggregateMapTerm, currentKeyTerm, initedAggBuffer, lookupInfo, currentAggBufferTerm)
val (dealWithAggHashMapOOM, fallbackToSortAggCode) = HashAggCodeGenHelper.genAggMapOOMHandling(
isFinal,
ctx,
builder,
(grouping, auxGrouping),
aggCallToAggFunction,
aggArgs,
aggInfoList.aggInfos.map(_.externalResultType),
udaggs,
logTerm,
aggregateMapTerm,
(groupKeyTypesTerm, aggBufferTypesTerm),
(groupKeyRowType, aggBufferRowType),
aggBufferNames,
aggBufferTypes,
outputTerm,
outputType,
outputResultFromMap,
sorterTerm,
retryAppend)
HashAggCodeGenHelper.prepareMetrics(ctx, aggregateMapTerm, if (isFinal) sorterTerm else null)
val lazyInitAggBufferCode = if (auxGrouping.nonEmpty) {
s"""
|// lazy init agg buffer (with auxGrouping)
|${initedAggBuffer.code}
""".stripMargin
} else {
""
}
val processCode =
s"""
| // input field access for group key projection and aggregate buffer update
|${ctx.reuseInputUnboxingCode(inputTerm)}
| // project key from input
|$keyProjectionCode
| // look up output buffer using current group key
|$lookupInfo = $aggregateMapTerm.lookup($currentKeyTerm);
|$currentAggBufferTerm = $lookupInfo.getValue();
|
|if (!$lookupInfo.isFound()) {
| $lazyInitAggBufferCode
| // append empty agg buffer into aggregate map for current group key
| try {
| $currentAggBufferTerm =
| $aggregateMapTerm.append($lookupInfo, ${initedAggBuffer.resultTerm});
| } catch (java.io.EOFException exp) {
| $dealWithAggHashMapOOM
| }
|}
| // aggregate buffer fields access
|${ctx.reuseInputUnboxingCode(currentAggBufferTerm)}
| // do aggregate and update agg buffer
|${aggregate.code}
|""".stripMargin.trim
val endInputCode = if (isFinal) {
val memPoolTypeTerm = classOf[BytesHashMapSpillMemorySegmentPool].getName
s"""
|if ($sorterTerm == null) {
| // no spilling, output by iterating aggregate map.
| $outputResultFromMap
|} else {
| // spill last part of input' aggregation output buffer
| $sorterTerm.sortAndSpill(
| $aggregateMapTerm.getRecordAreaMemorySegments(),
| $aggregateMapTerm.getNumElements(),
| new $memPoolTypeTerm($aggregateMapTerm.getBucketAreaMemorySegments()));
| // only release floating memory in advance.
| $aggregateMapTerm.free(true);
| // fall back to sort based aggregation
| $fallbackToSortAggCode
|}
""".stripMargin
} else {
s"$outputResultFromMap"
}
AggCodeGenHelper.generateOperator(
ctx,
className,
classOf[TableStreamOperator[BaseRow]].getCanonicalName,
processCode,
endInputCode,
inputType)
}
}
| bowenli86/flink | flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/codegen/agg/batch/HashAggCodeGenerator.scala | Scala | apache-2.0 | 8,983 |
package com.softwaremill.example
import akka.actor.ActorSystem
import akka.http.scaladsl.Http
import akka.stream.ActorMaterializer
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.model.StatusCodes._
import com.softwaremill.session._
import com.softwaremill.session.CsrfDirectives._
import com.softwaremill.session.CsrfOptions._
import com.softwaremill.session.SessionDirectives._
import com.softwaremill.session.SessionOptions._
import com.typesafe.scalalogging.slf4j.StrictLogging
import scala.io.StdIn
import scala.util.Try
object Example extends App with StrictLogging {
implicit val system = ActorSystem("example")
implicit val materializer = ActorMaterializer()
import system.dispatcher
val sessionConfig = SessionConfig.default("c05ll3lesrinf39t7mc5h6un6r0c69lgfno69dsak3vabeqamouq4328cuaekros401ajdpkh60rrtpd8ro24rbuqmgtnd1ebag6ljnb65i8a55d482ok7o0nch0bfbe")
implicit val sessionManager = new SessionManager[ExampleSession](sessionConfig)
implicit val refreshTokenStorage = new InMemoryRefreshTokenStorage[ExampleSession] {
def log(msg: String) = logger.info(msg)
}
def mySetSession(v: ExampleSession) = setSession(refreshable, usingHeaders, v)
val myRequiredSession = requiredSession(refreshable, usingHeaders)
val myOptionalSession = optionalSession(refreshable, usingHeaders)
val myInvalidateSession = invalidateSession(refreshable, usingHeaders)
val routes =
path("") {
redirect("/app/campaigns", SeeOther)
} ~
pathPrefix("app") {
get {
getFromFile("public/index.html")
}
} ~
pathPrefix("api") {
path("do_login") {
post {
entity(as[String]) { body =>
logger.info(s"Logging in $body")
mySetSession(ExampleSession(body)) {
ctx => ctx.complete("ok")
}
}
}
} ~
// This should be protected and accessible only when logged in
path("do_logout") {
post {
myRequiredSession { session =>
myInvalidateSession { ctx =>
logger.info(s"Logging out $session")
ctx.complete("ok")
}
}
}
} ~
// This should be protected and accessible only when logged in
path("current_login") {
get {
myRequiredSession { session => ctx =>
logger.info("Current session: " + session)
ctx.complete(session.username)
}
}
}
} ~
get {
getFromDirectory("public/")
}
val bindingFuture = Http().bindAndHandle(routes, "localhost", 8080)
println("Server started, press enter to stop. Visit http://localhost:8080 to see the demo.")
StdIn.readLine()
import system.dispatcher
bindingFuture
.flatMap(_.unbind())
.onComplete { _ =>
system.shutdown()
println("Server stopped")
}
}
case class ExampleSession(username: String)
object ExampleSession {
implicit def serializer: SessionSerializer[ExampleSession, String] = new SingleValueSessionSerializer(
_.username,
(un: String) => Try { ExampleSession(un) }
)
} | ilyai/akka-http-session | example/src/main/scala/com/softwaremill/example/Example.scala | Scala | apache-2.0 | 3,245 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.storage
import java.io.{BufferedOutputStream, File, FileOutputStream, OutputStream}
import java.nio.channels.FileChannel
import org.apache.spark.executor.ShuffleWriteMetrics
import org.apache.spark.internal.Logging
import org.apache.spark.serializer.{SerializationStream, SerializerInstance}
import org.apache.spark.util.Utils
/**
* A class for writing JVM objects directly to a file on disk. This class allows data to be appended
* to an existing block. For efficiency, it retains the underlying file channel across
* multiple commits. This channel is kept open until close() is called. In case of faults,
* callers should instead close with revertPartialWritesAndClose() to atomically revert the
* uncommitted partial writes.
*
* This class does not support concurrent writes. Also, once the writer has been opened it cannot be
* reopened again.
*/
private[spark] class DiskBlockObjectWriter(
val file: File,
serializerInstance: SerializerInstance,
bufferSize: Int,
wrapStream: OutputStream => OutputStream,
syncWrites: Boolean,
// These write metrics concurrently shared with other active DiskBlockObjectWriters who
// are themselves performing writes. All updates must be relative.
writeMetrics: ShuffleWriteMetrics,
val blockId: BlockId = null)
extends OutputStream
with Logging {
/**
* Guards against close calls, e.g. from a wrapping stream.
* Call manualClose to close the stream that was extended by this trait.
* Commit uses this trait to close object streams without paying the
* cost of closing and opening the underlying file.
*/
private trait ManualCloseOutputStream extends OutputStream {
abstract override def close(): Unit = {
flush()
}
def manualClose(): Unit = {
super.close()
}
}
/** The file channel, used for repositioning / truncating the file. */
private var channel: FileChannel = null
private var mcs: ManualCloseOutputStream = null
private var bs: OutputStream = null
private var fos: FileOutputStream = null
private var ts: TimeTrackingOutputStream = null
private var objOut: SerializationStream = null
private var initialized = false
private var streamOpen = false
private var hasBeenClosed = false
/**
* Cursors used to represent positions in the file.
*
* xxxxxxxxxx|----------|-----|
* ^ ^ ^
* | | channel.position()
* | reportedPosition
* committedPosition
*
* reportedPosition: Position at the time of the last update to the write metrics.
* committedPosition: Offset after last committed write.
* -----: Current writes to the underlying file.
* xxxxx: Committed contents of the file.
*/
private var committedPosition = file.length()
private var reportedPosition = committedPosition
/**
* Keep track of number of records written and also use this to periodically
* output bytes written since the latter is expensive to do for each record.
*/
private var numRecordsWritten = 0
private def initialize(): Unit = {
fos = new FileOutputStream(file, true)
channel = fos.getChannel()
ts = new TimeTrackingOutputStream(writeMetrics, fos)
class ManualCloseBufferedOutputStream
extends BufferedOutputStream(ts, bufferSize) with ManualCloseOutputStream
mcs = new ManualCloseBufferedOutputStream
}
def open(): DiskBlockObjectWriter = {
if (hasBeenClosed) {
throw new IllegalStateException("Writer already closed. Cannot be reopened.")
}
if (!initialized) {
initialize()
initialized = true
}
bs = wrapStream(mcs)
objOut = serializerInstance.serializeStream(bs)
streamOpen = true
this
}
/**
* Close and cleanup all resources.
* Should call after committing or reverting partial writes.
*/
private def closeResources(): Unit = {
if (initialized) {
mcs.manualClose()
channel = null
mcs = null
bs = null
fos = null
ts = null
objOut = null
initialized = false
streamOpen = false
hasBeenClosed = true
}
}
/**
* Commits any remaining partial writes and closes resources.
*/
override def close() {
if (initialized) {
Utils.tryWithSafeFinally {
commitAndGet()
} {
closeResources()
}
}
}
/**
* Flush the partial writes and commit them as a single atomic block.
* A commit may write additional bytes to frame the atomic block.
*
* @return file segment with previous offset and length committed on this call.
*/
def commitAndGet(): FileSegment = {
if (streamOpen) {
// NOTE: Because Kryo doesn't flush the underlying stream we explicitly flush both the
// serializer stream and the lower level stream.
objOut.flush()
bs.flush()
objOut.close()
streamOpen = false
if (syncWrites) {
// Force outstanding writes to disk and track how long it takes
val start = System.nanoTime()
fos.getFD.sync()
writeMetrics.incWriteTime(System.nanoTime() - start)
}
val pos = channel.position()
val fileSegment = new FileSegment(file, committedPosition, pos - committedPosition)
committedPosition = pos
// In certain compression codecs, more bytes are written after streams are closed
writeMetrics.incBytesWritten(committedPosition - reportedPosition)
reportedPosition = committedPosition
fileSegment
} else {
new FileSegment(file, committedPosition, 0)
}
}
/**
* Reverts writes that haven't been committed yet. Callers should invoke this function
* when there are runtime exceptions. This method will not throw, though it may be
* unsuccessful in truncating written data.
*
* @return the file that this DiskBlockObjectWriter wrote to.
*/
def revertPartialWritesAndClose(): File = {
// Discard current writes. We do this by flushing the outstanding writes and then
// truncating the file to its initial position.
try {
if (initialized) {
writeMetrics.decBytesWritten(reportedPosition - committedPosition)
writeMetrics.decRecordsWritten(numRecordsWritten)
streamOpen = false
closeResources()
}
val truncateStream = new FileOutputStream(file, true)
try {
truncateStream.getChannel.truncate(committedPosition)
file
} finally {
truncateStream.close()
}
} catch {
case e: Exception =>
logError("Uncaught exception while reverting partial writes to file " + file, e)
file
}
}
/**
* Writes a key-value pair.
*/
def write(key: Any, value: Any) {
if (!streamOpen) {
open()
}
objOut.writeKey(key)
objOut.writeValue(value)
recordWritten()
}
override def write(b: Int): Unit = throw new UnsupportedOperationException()
override def write(kvBytes: Array[Byte], offs: Int, len: Int): Unit = {
if (!streamOpen) {
open()
}
bs.write(kvBytes, offs, len)
}
/**
* Notify the writer that a record worth of bytes has been written with OutputStream#write.
*/
def recordWritten(): Unit = {
numRecordsWritten += 1
writeMetrics.incRecordsWritten(1)
if (numRecordsWritten % 16384 == 0) {
updateBytesWritten()
}
}
/**
* Report the number of bytes written in this writer's shuffle write metrics.
* Note that this is only valid before the underlying streams are closed.
*/
private def updateBytesWritten() {
val pos = channel.position()
writeMetrics.incBytesWritten(pos - reportedPosition)
reportedPosition = pos
}
// For testing
private[spark] override def flush() {
objOut.flush()
bs.flush()
}
}
| likithkailas/StreamingSystems | core/src/main/scala/org/apache/spark/storage/DiskBlockObjectWriter.scala | Scala | apache-2.0 | 8,641 |
package com.pragmasoft.eventaggregator.streams
import akka.kafka.scaladsl.Consumer
import akka.kafka.scaladsl.Consumer.Control
import akka.kafka.{ConsumerSettings, Subscriptions}
import akka.stream.scaladsl.Source
import com.pragmasoft.eventaggregator.ActorSystemProvider
import com.typesafe.scalalogging.LazyLogging
import org.apache.kafka.clients.consumer.{ConsumerConfig, ConsumerRecord}
import org.apache.kafka.common.serialization.ByteArrayDeserializer
case class KafkaPublisherConfig(reactiveKafkaDispatcher: String, bootstrapBrokers: String, topicRegex: String, groupId: String, readFromBeginning: Boolean)
trait SourceProvider[T, Mat] {
def source: Source[T, Mat]
}
trait KafkaSourceProvider extends SourceProvider[ConsumerRecord[Array[Byte], Array[Byte]], Control] with LazyLogging {
self: ActorSystemProvider =>
def kafkaConfig: KafkaPublisherConfig
lazy val consumerProperties = {
ConsumerSettings(actorSystem, new ByteArrayDeserializer, new ByteArrayDeserializer)
.withBootstrapServers(kafkaConfig.bootstrapBrokers)
.withGroupId(kafkaConfig.groupId)
.withProperty(
ConsumerConfig.AUTO_OFFSET_RESET_CONFIG,
if (kafkaConfig.readFromBeginning)
"earliest"
else
"latest"
)
.withProperty(
ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG,
"true"
)
.withDispatcher(kafkaConfig.reactiveKafkaDispatcher)
}
override lazy val source: Source[ConsumerRecord[Array[Byte], Array[Byte]], Control] =
Consumer.atMostOnceSource(consumerProperties, Subscriptions.topicPattern(kafkaConfig.topicRegex))
}
| galarragas/event-aggregator | src/main/scala/com/pragmasoft/eventaggregator/streams/KafkaSourceProvider.scala | Scala | apache-2.0 | 1,616 |
/***********************************************************************
* Copyright (c) 2013-2019 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.security
/**
* Default implementation of the AuthorizationsProvider that doesn't provide any authorizations
*/
class DefaultAuthorizationsProvider extends AuthorizationsProvider {
private var authorizations: java.util.List[String] = new java.util.ArrayList[String]()
override def getAuthorizations: java.util.List[String] = authorizations
override def configure(params: java.util.Map[String, java.io.Serializable]) {
val authString = AuthsParam.lookup(params)
if (authString == null || authString.isEmpty) {
authorizations = new java.util.ArrayList[String]()
} else {
authorizations = java.util.Arrays.asList(authString.split(","): _*)
}
}
}
| elahrvivaz/geomesa | geomesa-security/src/main/scala/org/locationtech/geomesa/security/DefaultAuthorizationsProvider.scala | Scala | apache-2.0 | 1,183 |
// scalac: -Xlint:infer-any -Xfatal-warnings
//
trait Test {
type R = PartialFunction[Any, Unit]
val x: R = { case "" => }
val y: R = { case "" => }
val z: R = x orElse y
val zz = x orElse y
}
| scala/scala | test/files/pos/t8861.scala | Scala | apache-2.0 | 209 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions
import java.io.{ByteArrayInputStream, ByteArrayOutputStream, CharArrayWriter, InputStreamReader, StringWriter}
import scala.util.parsing.combinator.RegexParsers
import com.fasterxml.jackson.core._
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.analysis.TypeCheckResult
import org.apache.spark.sql.catalyst.expressions.codegen.CodegenFallback
import org.apache.spark.sql.catalyst.json._
import org.apache.spark.sql.catalyst.util.{ArrayBasedMapData, ArrayData, BadRecordException, FailFastMode, GenericArrayData, MapData}
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.UTF8String
import org.apache.spark.util.Utils
private[this] sealed trait PathInstruction
private[this] object PathInstruction {
private[expressions] case object Subscript extends PathInstruction
private[expressions] case object Wildcard extends PathInstruction
private[expressions] case object Key extends PathInstruction
private[expressions] case class Index(index: Long) extends PathInstruction
private[expressions] case class Named(name: String) extends PathInstruction
}
private[this] sealed trait WriteStyle
private[this] object WriteStyle {
private[expressions] case object RawStyle extends WriteStyle
private[expressions] case object QuotedStyle extends WriteStyle
private[expressions] case object FlattenStyle extends WriteStyle
}
private[this] object JsonPathParser extends RegexParsers {
import PathInstruction._
def root: Parser[Char] = '$'
def long: Parser[Long] = "\\\\d+".r ^? {
case x => x.toLong
}
// parse `[*]` and `[123]` subscripts
def subscript: Parser[List[PathInstruction]] =
for {
operand <- '[' ~> ('*' ^^^ Wildcard | long ^^ Index) <~ ']'
} yield {
Subscript :: operand :: Nil
}
// parse `.name` or `['name']` child expressions
def named: Parser[List[PathInstruction]] =
for {
name <- '.' ~> "[^\\\\.\\\\[]+".r | "['" ~> "[^\\\\'\\\\?]+".r <~ "']"
} yield {
Key :: Named(name) :: Nil
}
// child wildcards: `..`, `.*` or `['*']`
def wildcard: Parser[List[PathInstruction]] =
(".*" | "['*']") ^^^ List(Wildcard)
def node: Parser[List[PathInstruction]] =
wildcard |
named |
subscript
val expression: Parser[List[PathInstruction]] = {
phrase(root ~> rep(node) ^^ (x => x.flatten))
}
def parse(str: String): Option[List[PathInstruction]] = {
this.parseAll(expression, str) match {
case Success(result, _) =>
Some(result)
case NoSuccess(msg, next) =>
None
}
}
}
private[this] object SharedFactory {
val jsonFactory = new JsonFactory()
// Enabled for Hive compatibility
jsonFactory.enable(JsonParser.Feature.ALLOW_UNQUOTED_CONTROL_CHARS)
}
/**
* Extracts json object from a json string based on json path specified, and returns json string
* of the extracted json object. It will return null if the input json string is invalid.
*/
@ExpressionDescription(
usage = "_FUNC_(json_txt, path) - Extracts a json object from `path`.",
examples = """
Examples:
> SELECT _FUNC_('{"a":"b"}', '$.a');
b
""")
case class GetJsonObject(json: Expression, path: Expression)
extends BinaryExpression with ExpectsInputTypes with CodegenFallback {
import com.fasterxml.jackson.core.JsonToken._
import PathInstruction._
import SharedFactory._
import WriteStyle._
override def left: Expression = json
override def right: Expression = path
override def inputTypes: Seq[DataType] = Seq(StringType, StringType)
override def dataType: DataType = StringType
override def nullable: Boolean = true
override def prettyName: String = "get_json_object"
@transient private lazy val parsedPath = parsePath(path.eval().asInstanceOf[UTF8String])
override def eval(input: InternalRow): Any = {
val jsonStr = json.eval(input).asInstanceOf[UTF8String]
if (jsonStr == null) {
return null
}
val parsed = if (path.foldable) {
parsedPath
} else {
parsePath(path.eval(input).asInstanceOf[UTF8String])
}
if (parsed.isDefined) {
try {
/* We know the bytes are UTF-8 encoded. Pass a Reader to avoid having Jackson
detect character encoding which could fail for some malformed strings */
Utils.tryWithResource(CreateJacksonParser.utf8String(jsonFactory, jsonStr)) { parser =>
val output = new ByteArrayOutputStream()
val matched = Utils.tryWithResource(
jsonFactory.createGenerator(output, JsonEncoding.UTF8)) { generator =>
parser.nextToken()
evaluatePath(parser, generator, RawStyle, parsed.get)
}
if (matched) {
UTF8String.fromBytes(output.toByteArray)
} else {
null
}
}
} catch {
case _: JsonProcessingException => null
}
} else {
null
}
}
private def parsePath(path: UTF8String): Option[List[PathInstruction]] = {
if (path != null) {
JsonPathParser.parse(path.toString)
} else {
None
}
}
// advance to the desired array index, assumes to start at the START_ARRAY token
private def arrayIndex(p: JsonParser, f: () => Boolean): Long => Boolean = {
case _ if p.getCurrentToken == END_ARRAY =>
// terminate, nothing has been written
false
case 0 =>
// we've reached the desired index
val dirty = f()
while (p.nextToken() != END_ARRAY) {
// advance the token stream to the end of the array
p.skipChildren()
}
dirty
case i if i > 0 =>
// skip this token and evaluate the next
p.skipChildren()
p.nextToken()
arrayIndex(p, f)(i - 1)
}
/**
* Evaluate a list of JsonPath instructions, returning a bool that indicates if any leaf nodes
* have been written to the generator
*/
private def evaluatePath(
p: JsonParser,
g: JsonGenerator,
style: WriteStyle,
path: List[PathInstruction]): Boolean = {
(p.getCurrentToken, path) match {
case (VALUE_STRING, Nil) if style == RawStyle =>
// there is no array wildcard or slice parent, emit this string without quotes
if (p.hasTextCharacters) {
g.writeRaw(p.getTextCharacters, p.getTextOffset, p.getTextLength)
} else {
g.writeRaw(p.getText)
}
true
case (START_ARRAY, Nil) if style == FlattenStyle =>
// flatten this array into the parent
var dirty = false
while (p.nextToken() != END_ARRAY) {
dirty |= evaluatePath(p, g, style, Nil)
}
dirty
case (_, Nil) =>
// general case: just copy the child tree verbatim
g.copyCurrentStructure(p)
true
case (START_OBJECT, Key :: xs) =>
var dirty = false
while (p.nextToken() != END_OBJECT) {
if (dirty) {
// once a match has been found we can skip other fields
p.skipChildren()
} else {
dirty = evaluatePath(p, g, style, xs)
}
}
dirty
case (START_ARRAY, Subscript :: Wildcard :: Subscript :: Wildcard :: xs) =>
// special handling for the non-structure preserving double wildcard behavior in Hive
var dirty = false
g.writeStartArray()
while (p.nextToken() != END_ARRAY) {
dirty |= evaluatePath(p, g, FlattenStyle, xs)
}
g.writeEndArray()
dirty
case (START_ARRAY, Subscript :: Wildcard :: xs) if style != QuotedStyle =>
// retain Flatten, otherwise use Quoted... cannot use Raw within an array
val nextStyle = style match {
case RawStyle => QuotedStyle
case FlattenStyle => FlattenStyle
case QuotedStyle => throw new IllegalStateException()
}
// temporarily buffer child matches, the emitted json will need to be
// modified slightly if there is only a single element written
val buffer = new StringWriter()
var dirty = 0
Utils.tryWithResource(jsonFactory.createGenerator(buffer)) { flattenGenerator =>
flattenGenerator.writeStartArray()
while (p.nextToken() != END_ARRAY) {
// track the number of array elements and only emit an outer array if
// we've written more than one element, this matches Hive's behavior
dirty += (if (evaluatePath(p, flattenGenerator, nextStyle, xs)) 1 else 0)
}
flattenGenerator.writeEndArray()
}
val buf = buffer.getBuffer
if (dirty > 1) {
g.writeRawValue(buf.toString)
} else if (dirty == 1) {
// remove outer array tokens
g.writeRawValue(buf.substring(1, buf.length()-1))
} // else do not write anything
dirty > 0
case (START_ARRAY, Subscript :: Wildcard :: xs) =>
var dirty = false
g.writeStartArray()
while (p.nextToken() != END_ARRAY) {
// wildcards can have multiple matches, continually update the dirty count
dirty |= evaluatePath(p, g, QuotedStyle, xs)
}
g.writeEndArray()
dirty
case (START_ARRAY, Subscript :: Index(idx) :: (xs@Subscript :: Wildcard :: _)) =>
p.nextToken()
// we're going to have 1 or more results, switch to QuotedStyle
arrayIndex(p, () => evaluatePath(p, g, QuotedStyle, xs))(idx)
case (START_ARRAY, Subscript :: Index(idx) :: xs) =>
p.nextToken()
arrayIndex(p, () => evaluatePath(p, g, style, xs))(idx)
case (FIELD_NAME, Named(name) :: xs) if p.getCurrentName == name =>
// exact field match
if (p.nextToken() != JsonToken.VALUE_NULL) {
evaluatePath(p, g, style, xs)
} else {
false
}
case (FIELD_NAME, Wildcard :: xs) =>
// wildcard field match
p.nextToken()
evaluatePath(p, g, style, xs)
case _ =>
p.skipChildren()
false
}
}
}
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = "_FUNC_(jsonStr, p1, p2, ..., pn) - Returns a tuple like the function get_json_object, but it takes multiple names. All the input parameters and output column types are string.",
examples = """
Examples:
> SELECT _FUNC_('{"a":1, "b":2}', 'a', 'b');
1 2
""")
// scalastyle:on line.size.limit
case class JsonTuple(children: Seq[Expression])
extends Generator with CodegenFallback {
import SharedFactory._
override def nullable: Boolean = {
// a row is always returned
false
}
// if processing fails this shared value will be returned
@transient private lazy val nullRow: Seq[InternalRow] =
new GenericInternalRow(Array.ofDim[Any](fieldExpressions.length)) :: Nil
// the json body is the first child
@transient private lazy val jsonExpr: Expression = children.head
// the fields to query are the remaining children
@transient private lazy val fieldExpressions: Seq[Expression] = children.tail
// eagerly evaluate any foldable the field names
@transient private lazy val foldableFieldNames: IndexedSeq[Option[String]] = {
fieldExpressions.map {
case expr if expr.foldable => Option(expr.eval()).map(_.asInstanceOf[UTF8String].toString)
case _ => null
}.toIndexedSeq
}
// and count the number of foldable fields, we'll use this later to optimize evaluation
@transient private lazy val constantFields: Int = foldableFieldNames.count(_ != null)
override def elementSchema: StructType = StructType(fieldExpressions.zipWithIndex.map {
case (_, idx) => StructField(s"c$idx", StringType, nullable = true)
})
override def prettyName: String = "json_tuple"
override def checkInputDataTypes(): TypeCheckResult = {
if (children.length < 2) {
TypeCheckResult.TypeCheckFailure(s"$prettyName requires at least two arguments")
} else if (children.forall(child => StringType.acceptsType(child.dataType))) {
TypeCheckResult.TypeCheckSuccess
} else {
TypeCheckResult.TypeCheckFailure(s"$prettyName requires that all arguments are strings")
}
}
override def eval(input: InternalRow): TraversableOnce[InternalRow] = {
val json = jsonExpr.eval(input).asInstanceOf[UTF8String]
if (json == null) {
return nullRow
}
try {
/* We know the bytes are UTF-8 encoded. Pass a Reader to avoid having Jackson
detect character encoding which could fail for some malformed strings */
Utils.tryWithResource(CreateJacksonParser.utf8String(jsonFactory, json)) { parser =>
parseRow(parser, input)
}
} catch {
case _: JsonProcessingException =>
nullRow
}
}
private def parseRow(parser: JsonParser, input: InternalRow): Seq[InternalRow] = {
// only objects are supported
if (parser.nextToken() != JsonToken.START_OBJECT) {
return nullRow
}
// evaluate the field names as String rather than UTF8String to
// optimize lookups from the json token, which is also a String
val fieldNames = if (constantFields == fieldExpressions.length) {
// typically the user will provide the field names as foldable expressions
// so we can use the cached copy
foldableFieldNames.map(_.orNull)
} else if (constantFields == 0) {
// none are foldable so all field names need to be evaluated from the input row
fieldExpressions.map(_.eval(input).asInstanceOf[UTF8String].toString)
} else {
// if there is a mix of constant and non-constant expressions
// prefer the cached copy when available
foldableFieldNames.zip(fieldExpressions).map {
case (null, expr) => expr.eval(input).asInstanceOf[UTF8String].toString
case (fieldName, _) => fieldName.orNull
}
}
val row = Array.ofDim[Any](fieldNames.length)
// start reading through the token stream, looking for any requested field names
while (parser.nextToken() != JsonToken.END_OBJECT) {
if (parser.getCurrentToken == JsonToken.FIELD_NAME) {
// check to see if this field is desired in the output
val jsonField = parser.getCurrentName
var idx = fieldNames.indexOf(jsonField)
if (idx >= 0) {
// it is, copy the child tree to the correct location in the output row
val output = new ByteArrayOutputStream()
// write the output directly to UTF8 encoded byte array
if (parser.nextToken() != JsonToken.VALUE_NULL) {
Utils.tryWithResource(jsonFactory.createGenerator(output, JsonEncoding.UTF8)) {
generator => copyCurrentStructure(generator, parser)
}
val jsonValue = UTF8String.fromBytes(output.toByteArray)
// SPARK-21804: json_tuple returns null values within repeated columns
// except the first one; so that we need to check the remaining fields.
do {
row(idx) = jsonValue
idx = fieldNames.indexOf(jsonField, idx + 1)
} while (idx >= 0)
}
}
}
// always skip children, it's cheap enough to do even if copyCurrentStructure was called
parser.skipChildren()
}
new GenericInternalRow(row) :: Nil
}
private def copyCurrentStructure(generator: JsonGenerator, parser: JsonParser): Unit = {
parser.getCurrentToken match {
// if the user requests a string field it needs to be returned without enclosing
// quotes which is accomplished via JsonGenerator.writeRaw instead of JsonGenerator.write
case JsonToken.VALUE_STRING if parser.hasTextCharacters =>
// slight optimization to avoid allocating a String instance, though the characters
// still have to be decoded... Jackson doesn't have a way to access the raw bytes
generator.writeRaw(parser.getTextCharacters, parser.getTextOffset, parser.getTextLength)
case JsonToken.VALUE_STRING =>
// the normal String case, pass it through to the output without enclosing quotes
generator.writeRaw(parser.getText)
case JsonToken.VALUE_NULL =>
// a special case that needs to be handled outside of this method.
// if a requested field is null, the result must be null. the easiest
// way to achieve this is just by ignoring null tokens entirely
throw new IllegalStateException("Do not attempt to copy a null field")
case _ =>
// handle other types including objects, arrays, booleans and numbers
generator.copyCurrentStructure(parser)
}
}
}
/**
* Converts an json input string to a [[StructType]] or [[ArrayType]] of [[StructType]]s
* with the specified schema.
*/
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = "_FUNC_(jsonStr, schema[, options]) - Returns a struct value with the given `jsonStr` and `schema`.",
examples = """
Examples:
> SELECT _FUNC_('{"a":1, "b":0.8}', 'a INT, b DOUBLE');
{"a":1, "b":0.8}
> SELECT _FUNC_('{"time":"26/08/2015"}', 'time Timestamp', map('timestampFormat', 'dd/MM/yyyy'));
{"time":"2015-08-26 00:00:00.0"}
""",
since = "2.2.0")
// scalastyle:on line.size.limit
case class JsonToStructs(
schema: DataType,
options: Map[String, String],
child: Expression,
timeZoneId: Option[String],
forceNullableSchema: Boolean)
extends UnaryExpression with TimeZoneAwareExpression with CodegenFallback with ExpectsInputTypes {
// The JSON input data might be missing certain fields. We force the nullability
// of the user-provided schema to avoid data corruptions. In particular, the parquet-mr encoder
// can generate incorrect files if values are missing in columns declared as non-nullable.
val nullableSchema = if (forceNullableSchema) schema.asNullable else schema
override def nullable: Boolean = true
// Used in `FunctionRegistry`
def this(child: Expression, schema: Expression) =
this(
schema = JsonExprUtils.validateSchemaLiteral(schema),
options = Map.empty[String, String],
child = child,
timeZoneId = None,
forceNullableSchema = SQLConf.get.getConf(SQLConf.FROM_JSON_FORCE_NULLABLE_SCHEMA))
def this(child: Expression, schema: Expression, options: Expression) =
this(
schema = JsonExprUtils.validateSchemaLiteral(schema),
options = JsonExprUtils.convertToMapData(options),
child = child,
timeZoneId = None,
forceNullableSchema = SQLConf.get.getConf(SQLConf.FROM_JSON_FORCE_NULLABLE_SCHEMA))
// Used in `org.apache.spark.sql.functions`
def this(schema: DataType, options: Map[String, String], child: Expression) =
this(schema, options, child, timeZoneId = None,
forceNullableSchema = SQLConf.get.getConf(SQLConf.FROM_JSON_FORCE_NULLABLE_SCHEMA))
override def checkInputDataTypes(): TypeCheckResult = nullableSchema match {
case _: StructType | ArrayType(_: StructType, _) | _: MapType =>
super.checkInputDataTypes()
case _ => TypeCheckResult.TypeCheckFailure(
s"Input schema ${nullableSchema.simpleString} must be a struct or an array of structs.")
}
@transient
lazy val rowSchema = nullableSchema match {
case st: StructType => st
case ArrayType(st: StructType, _) => st
case mt: MapType => mt
}
// This converts parsed rows to the desired output by the given schema.
@transient
lazy val converter = nullableSchema match {
case _: StructType =>
(rows: Seq[InternalRow]) => if (rows.length == 1) rows.head else null
case ArrayType(_: StructType, _) =>
(rows: Seq[InternalRow]) => new GenericArrayData(rows)
case _: MapType =>
(rows: Seq[InternalRow]) => rows.head.getMap(0)
}
@transient
lazy val parser =
new JacksonParser(
rowSchema,
new JSONOptions(options + ("mode" -> FailFastMode.name), timeZoneId.get))
override def dataType: DataType = nullableSchema
override def withTimeZone(timeZoneId: String): TimeZoneAwareExpression =
copy(timeZoneId = Option(timeZoneId))
override def nullSafeEval(json: Any): Any = {
// When input is,
// - `null`: `null`.
// - invalid json: `null`.
// - empty string: `null`.
//
// When the schema is array,
// - json array: `Array(Row(...), ...)`
// - json object: `Array(Row(...))`
// - empty json array: `Array()`.
// - empty json object: `Array(Row(null))`.
//
// When the schema is a struct,
// - json object/array with single element: `Row(...)`
// - json array with multiple elements: `null`
// - empty json array: `null`.
// - empty json object: `Row(null)`.
// We need `null` if the input string is an empty string. `JacksonParser` can
// deal with this but produces `Nil`.
if (json.toString.trim.isEmpty) return null
try {
converter(parser.parse(
json.asInstanceOf[UTF8String],
CreateJacksonParser.utf8String,
identity[UTF8String]))
} catch {
case _: BadRecordException => null
}
}
override def inputTypes: Seq[AbstractDataType] = StringType :: Nil
override def sql: String = schema match {
case _: MapType => "entries"
case _ => super.sql
}
}
/**
* Converts a [[StructType]], [[ArrayType]] of [[StructType]]s, [[MapType]]
* or [[ArrayType]] of [[MapType]]s to a json output string.
*/
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = "_FUNC_(expr[, options]) - Returns a json string with a given struct value",
examples = """
Examples:
> SELECT _FUNC_(named_struct('a', 1, 'b', 2));
{"a":1,"b":2}
> SELECT _FUNC_(named_struct('time', to_timestamp('2015-08-26', 'yyyy-MM-dd')), map('timestampFormat', 'dd/MM/yyyy'));
{"time":"26/08/2015"}
> SELECT _FUNC_(array(named_struct('a', 1, 'b', 2));
[{"a":1,"b":2}]
> SELECT _FUNC_(map('a', named_struct('b', 1)));
{"a":{"b":1}}
> SELECT _FUNC_(map(named_struct('a', 1),named_struct('b', 2)));
{"[1]":{"b":2}}
> SELECT _FUNC_(map('a', 1));
{"a":1}
> SELECT _FUNC_(array((map('a', 1))));
[{"a":1}]
""",
since = "2.2.0")
// scalastyle:on line.size.limit
case class StructsToJson(
options: Map[String, String],
child: Expression,
timeZoneId: Option[String] = None)
extends UnaryExpression with TimeZoneAwareExpression with CodegenFallback with ExpectsInputTypes {
override def nullable: Boolean = true
def this(options: Map[String, String], child: Expression) = this(options, child, None)
// Used in `FunctionRegistry`
def this(child: Expression) = this(Map.empty, child, None)
def this(child: Expression, options: Expression) =
this(
options = JsonExprUtils.convertToMapData(options),
child = child,
timeZoneId = None)
@transient
lazy val writer = new CharArrayWriter()
@transient
lazy val gen = new JacksonGenerator(
rowSchema, writer, new JSONOptions(options, timeZoneId.get))
@transient
lazy val rowSchema = child.dataType match {
case st: StructType => st
case ArrayType(st: StructType, _) => st
case mt: MapType => mt
case ArrayType(mt: MapType, _) => mt
}
// This converts rows to the JSON output according to the given schema.
@transient
lazy val converter: Any => UTF8String = {
def getAndReset(): UTF8String = {
gen.flush()
val json = writer.toString
writer.reset()
UTF8String.fromString(json)
}
child.dataType match {
case _: StructType =>
(row: Any) =>
gen.write(row.asInstanceOf[InternalRow])
getAndReset()
case ArrayType(_: StructType, _) =>
(arr: Any) =>
gen.write(arr.asInstanceOf[ArrayData])
getAndReset()
case _: MapType =>
(map: Any) =>
gen.write(map.asInstanceOf[MapData])
getAndReset()
case ArrayType(_: MapType, _) =>
(arr: Any) =>
gen.write(arr.asInstanceOf[ArrayData])
getAndReset()
}
}
override def dataType: DataType = StringType
override def checkInputDataTypes(): TypeCheckResult = child.dataType match {
case _: StructType | ArrayType(_: StructType, _) =>
try {
JacksonUtils.verifySchema(rowSchema.asInstanceOf[StructType])
TypeCheckResult.TypeCheckSuccess
} catch {
case e: UnsupportedOperationException =>
TypeCheckResult.TypeCheckFailure(e.getMessage)
}
case _: MapType | ArrayType(_: MapType, _) =>
// TODO: let `JacksonUtils.verifySchema` verify a `MapType`
try {
val st = StructType(StructField("a", rowSchema.asInstanceOf[MapType]) :: Nil)
JacksonUtils.verifySchema(st)
TypeCheckResult.TypeCheckSuccess
} catch {
case e: UnsupportedOperationException =>
TypeCheckResult.TypeCheckFailure(e.getMessage)
}
case _ => TypeCheckResult.TypeCheckFailure(
s"Input type ${child.dataType.simpleString} must be a struct, array of structs or " +
"a map or array of map.")
}
override def withTimeZone(timeZoneId: String): TimeZoneAwareExpression =
copy(timeZoneId = Option(timeZoneId))
override def nullSafeEval(value: Any): Any = converter(value)
override def inputTypes: Seq[AbstractDataType] = TypeCollection(ArrayType, StructType) :: Nil
}
object JsonExprUtils {
def validateSchemaLiteral(exp: Expression): DataType = exp match {
case Literal(s, StringType) => DataType.fromDDL(s.toString)
case e => throw new AnalysisException(s"Expected a string literal instead of $e")
}
def convertToMapData(exp: Expression): Map[String, String] = exp match {
case m: CreateMap
if m.dataType.acceptsType(MapType(StringType, StringType, valueContainsNull = false)) =>
val arrayMap = m.eval().asInstanceOf[ArrayBasedMapData]
ArrayBasedMapData.toScalaMap(arrayMap).map { case (key, value) =>
key.toString -> value.toString
}
case m: CreateMap =>
throw new AnalysisException(
s"A type of keys and values in map() must be string, but got ${m.dataType}")
case _ =>
throw new AnalysisException("Must use a map() function for options")
}
}
| bravo-zhang/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/jsonExpressions.scala | Scala | apache-2.0 | 27,200 |
package uk.ac.ncl.openlab.intake24.services.foodindex.danish
import org.apache.commons.lang3.StringUtils
import org.workcraft.phrasesearch.{CaseInsensitiveString, WordOps}
class DanishWordOpsImpl extends WordOps {
val stemmer = new DanishSnowballStemmer()
val danishWords: Set[String] = scala.io.Source.fromInputStream(getClass.getResourceAsStream("da_dict.txt")).getLines().toSet
def stem(word: CaseInsensitiveString): CaseInsensitiveString = {
stemmer.setCurrent(word.lowerCase)
stemmer.stem()
val stemmed = stemmer.getCurrent
CaseInsensitiveString(StringUtils.stripAccents(stemmed))
}
def splitCompound(word: CaseInsensitiveString): Seq[CaseInsensitiveString] = {
def decompose(word: String, acc: List[String]): List[String] = {
val candidates = Range(1, word.length).map(word.splitAt(_)).filter(c => danishWords.contains(c._2))
// Performance: this needs to be lazy to avoid building the whole search space at once,
// but since it is typically very small it doesn't matter
candidates.map {
case (l, r) => decompose(l, r :: acc)
}.find(_.nonEmpty).getOrElse {
if (danishWords.contains(word))
word :: acc
else
Nil
}
}
val decomposed = decompose(word.lowerCase, Nil)
if (decomposed.isEmpty) // always return original word even if it is not a dictionary word
List(word)
else
decomposed.map(CaseInsensitiveString(_))
}
} | digitalinteraction/intake24 | FoodDataServices/src/main/scala/uk/ac/ncl/openlab/intake24/services/foodindex/danish/DanishWordOpsImpl.scala | Scala | apache-2.0 | 1,470 |
/*
* MOIS: NetCdf Step Handler Test
* Copyright (C) 2014 University of Edinburgh School of Informatics
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package uk.ac.ed.inf.mois.test
import uk.ac.ed.inf.mois.{DiscreteProcess, NetCdfWriter}
import spire.implicits._
import uk.ac.ed.inf.mois.implicits._
import java.io.File
import scala.sys.process._
import org.scalatest.{BeforeAndAfter, FlatSpec, Matchers}
class NetCdfTest extends FlatSpec with Matchers with BeforeAndAfter {
after {
new File("test.nc").delete
}
class P extends DiscreteProcess[Double] {
annotate ("rdfs:label", "NetCDF Test Process")
val x1 = Double("ex:x1")
val x2 = Double("ex:x2")
val x3 = Double("ex:x3")
val x4 = Double("ex:D") dimension(3)
next(x1) := x1 + 1 + x4
next(x2) := x1 + x2 + 1 + x4
next(x3) := x1 + x2 + x3 + 1 + x4
}
val expected =
"""netcdf test {
dimensions:
sim\\:t = UNLIMITED ; // (4 currently)
ex\\:D = 3 ;
variables:
double sim\\:t(sim\\:t) ;
double ex\\:D(ex\\:D) ;
double ex\\:x1(sim\\:t, ex\\:D) ;
double ex\\:x2(sim\\:t, ex\\:D) ;
double ex\\:x3(sim\\:t, ex\\:D) ;
// global attributes:
:rdfs\\:label = "NetCDF Test Process" ;
:class = "uk.ac.ed.inf.mois.test.NetCdfTest$P" ;
data:
sim\\:t = 1, 2, 3, 4 ;
ex\\:D = 0, 10, 20 ;
ex\\:x1 =
1, 11, 21,
2, 22, 42,
3, 33, 63,
4, 44, 84 ;
ex\\:x2 =
1, 11, 21,
3, 33, 63,
6, 66, 126,
10, 110, 210 ;
ex\\:x3 =
1, 11, 21,
4, 44, 84,
10, 110, 210,
20, 220, 420 ;
}
"""
"netcdf writer" should "store data scalably" in {
val p = new P
val cdf = new NetCdfWriter("test.nc")
p.addStepHandler(cdf)
p.init(0)
import p._
for (d <- 0 until 3) {
x1 := 0
x2 := 0
x3 := 0
reset(0)
for (t <- 0 until 4) {
p(t, 1)
}
p.dimension(x4) += 1
x4 += 10
}
cdf.finish
var got: String = null
val pb = List("sh", "-c", "ncdump test.nc | sed -e /created/d -e 's/\\t/ /g'")
val pio = new ProcessIO(_ => (),
stdout => {
got = scala.io.Source.fromInputStream(stdout).mkString
},
_ => ())
val ncdump = pb.run(pio)
ncdump.exitValue should equal (0)
got should equal (expected)
}
}
| edinburgh-rbm/mois | src/test/scala/uk/ac/ed/inf/mois/test/NetCDFTest.scala | Scala | gpl-3.0 | 2,861 |
package io.getquill.norm
import io.getquill.ast._
import io.getquill.ast.Implicits._
import io.getquill.norm.EqualityBehavior.AnsiEquality
/**
* Due to the introduction of null checks in `map`, `flatMap`, and `exists`, in
* `FlattenOptionOperation` in order to resolve #1053, as well as to support non-ansi
* compliant string concatenation as outlined in #1295, large conditional composites
* became common. For example:
* <code><pre>
* case class Holder(value:Option[String])
*
* // The following statement
* query[Holder].map(h => h.value.map(_ + "foo"))
* // Will yield the following result
* SELECT CASE WHEN h.value IS NOT NULL THEN h.value || 'foo' ELSE null END FROM Holder h
* </pre></code>
* Now, let's add a <code>getOrElse</code> statement to the clause that requires an additional
* wrapped null check. We cannot rely on there being a <code>map</code> call beforehand
* since we could be reading <code>value</code> as a nullable field directly from the database).
* <code><pre>
* // The following statement
* query[Holder].map(h => h.value.map(_ + "foo").getOrElse("bar"))
* // Yields the following result:
* SELECT CASE WHEN
* CASE WHEN h.value IS NOT NULL THEN h.value || 'foo' ELSE null END
* IS NOT NULL THEN
* CASE WHEN h.value IS NOT NULL THEN h.value || 'foo' ELSE null END
* ELSE 'bar' END FROM Holder h
* </pre></code>
* This of course is highly redundant and can be reduced to simply:
* <code><pre>
* SELECT CASE WHEN h.value IS NOT NULL AND (h.value || 'foo') IS NOT NULL THEN h.value || 'foo' ELSE 'bar' END FROM Holder h
* </pre></code>
* This reduction is done by the "Center Rule." There are some other simplification
* rules as well. Note how we are force to null-check both `h.value` as well as `(h.value || 'foo')` because
* a user may use `Option[T].flatMap` and explicitly transform a particular value to `null`.
*/
class SimplifyNullChecks(equalityBehavior: EqualityBehavior) extends StatelessTransformer {
override def apply(ast: Ast): Ast =
ast match {
// Center rule
case IfExist(
IfExistElseNull(condA, thenA),
IfExistElseNull(condB, thenB),
otherwise
) if (condA == condB && thenA == thenB) =>
apply(If(IsNotNullCheck(condA) +&&+ IsNotNullCheck(thenA), thenA, otherwise))
// Left hand rule
case IfExist(IfExistElseNull(check, affirm), value, otherwise) =>
apply(If(IsNotNullCheck(check) +&&+ IsNotNullCheck(affirm), value, otherwise))
// Right hand rule
case IfExistElseNull(cond, IfExistElseNull(innerCond, innerThen)) =>
apply(If(IsNotNullCheck(cond) +&&+ IsNotNullCheck(innerCond), innerThen, NullValue))
case OptionIsDefined(Optional(a)) +&&+ OptionIsDefined(Optional(b)) +&&+ (exp @ (Optional(a1) `== or !=` Optional(b1))) if (a == a1 && b == b1 && equalityBehavior == AnsiEquality) => apply(exp)
case OptionIsDefined(Optional(a)) +&&+ (exp @ (Optional(a1) `== or !=` Optional(_))) if (a == a1 && equalityBehavior == AnsiEquality) => apply(exp)
case OptionIsDefined(Optional(b)) +&&+ (exp @ (Optional(_) `== or !=` Optional(b1))) if (b == b1 && equalityBehavior == AnsiEquality) => apply(exp)
case (left +&&+ OptionIsEmpty(Optional(Constant(_, _)))) +||+ other => apply(other)
case (OptionIsEmpty(Optional(Constant(_, _))) +&&+ right) +||+ other => apply(other)
case other +||+ (left +&&+ OptionIsEmpty(Optional(Constant(_, _)))) => apply(other)
case other +||+ (OptionIsEmpty(Optional(Constant(_, _))) +&&+ right) => apply(other)
case (left +&&+ OptionIsDefined(Optional(Constant(_, _)))) => apply(left)
case (OptionIsDefined(Optional(Constant(_, _))) +&&+ right) => apply(right)
case (left +||+ OptionIsEmpty(Optional(Constant(_, _)))) => apply(left)
case (OptionIsEmpty(OptionSome(Optional(_))) +||+ right) => apply(right)
case other =>
super.apply(other)
}
object `== or !=` {
def unapply(ast: Ast): Option[(Ast, Ast)] = ast match {
case a +==+ b => Some((a, b))
case a +!=+ b => Some((a, b))
case _ => None
}
}
/**
* Simple extractor that looks inside of an optional values to see if the thing inside can be pulled out.
* If not, it just returns whatever element it can find.
*/
object Optional {
def unapply(a: Ast): Option[Ast] = a match {
case OptionApply(value) => Some(value)
case OptionSome(value) => Some(value)
case value => Some(value)
}
}
}
| getquill/quill | quill-core-portable/src/main/scala/io/getquill/norm/SimplifyNullChecks.scala | Scala | apache-2.0 | 4,519 |
package dk.bayes.math.discretise
import org.junit._
import Assert._
import dk.bayes.clustergraph.testutil.AssertUtil._
class HistogramTest {
@Test def toValues = {
assertVector(List(-10, 0, 10), Histogram(-10, 10, 3).toValues, 0.001)
assertVector(List(-10, -3, 4, 11), Histogram(-10, 11, 4).toValues, 0.001)
assertVector(List(-10, -8, -6, -4, -2, 0, 2, 4, 6, 8, 10), Histogram(-10, 10, 11).toValues, 0.001)
}
@Test def mapValues = {
assertVector(List(-25, -7.5,10, 27.5), Histogram(-10, 11, 4).mapValues(v => v * 2.5), 0.001)
}
@Test(expected = classOf[IllegalArgumentException]) def valueOf_below_range: Unit = {
Histogram(-10, 10, 4).valueOf(-1)
}
@Test(expected = classOf[IllegalArgumentException]) def valueOf_above_range: Unit = {
Histogram(-10, 10, 4).valueOf(5)
}
@Test def valueOf = {
assertEquals(-10, Histogram(-10, 10, 4).valueOf(0), 0)
assertEquals(-3.333, Histogram(-10, 10, 4).valueOf(1), 0.001)
assertEquals(3.333, Histogram(-10, 10, 4).valueOf(2), 0.001)
assertEquals(10, Histogram(-10, 10, 4).valueOf(3), 0)
}
@Test(expected = classOf[IllegalArgumentException]) def binIndexOf_below_range:Unit = {
Histogram(-10, 10, 4).binIndexOf(-10.1)
}
@Test(expected = classOf[IllegalArgumentException]) def binIndexOf_above_range:Unit = {
Histogram(-10, 10, 4).binIndexOf(10.1)
}
@Test def binIndexOf = {
assertEquals(0, Histogram(-10, 10, 4).binIndexOf(-10))
assertEquals(1, Histogram(-10, 10, 4).binIndexOf(-9.999))
assertEquals(1, Histogram(-10, 10, 4).binIndexOf(-8.64))
assertEquals(1, Histogram(-10, 10, 4).binIndexOf(-3.334))
assertEquals(2, Histogram(-10, 10, 4).binIndexOf(-3.333))
assertEquals(2, Histogram(-10, 10, 4).binIndexOf(-3.332))
assertEquals(2, Histogram(-10, 10, 4).binIndexOf(-0.1))
assertEquals(2, Histogram(-10, 10, 4).binIndexOf(0))
assertEquals(2, Histogram(-10, 10, 4).binIndexOf(1))
assertEquals(2, Histogram(-10, 10, 4).binIndexOf(3.332))
assertEquals(2, Histogram(-10, 10, 4).binIndexOf(3.333))
assertEquals(3, Histogram(-10, 10, 4).binIndexOf(3.334))
assertEquals(3, Histogram(-10, 10, 4).binIndexOf(7.54))
assertEquals(3, Histogram(-10, 10, 4).binIndexOf(9.99))
assertEquals(3, Histogram(-10, 10, 4).binIndexOf(10))
}
} | danielkorzekwa/bayes-scala | src/test/scala/dk/bayes/math/discretise/HistogramTest.scala | Scala | bsd-2-clause | 2,308 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.streaming.dstream
import org.apache.spark.streaming.{Duration, Time}
import org.apache.spark.rdd.RDD
import org.apache.spark.rdd.UnionRDD
import scala.collection.mutable.ArrayBuffer
import scala.reflect.ClassTag
private[streaming]
class UnionDStream[T: ClassTag](parents: Array[DStream[T]])
extends DStream[T](parents.head.ssc) {
if (parents.length == 0) {
throw new IllegalArgumentException("Empty array of parents")
}
if (parents.map(_.ssc).distinct.size > 1) {
throw new IllegalArgumentException("Array of parents have different StreamingContexts")
}
if (parents.map(_.slideDuration).distinct.size > 1) {
throw new IllegalArgumentException("Array of parents have different slide times")
}
override def dependencies = parents.toList
override def slideDuration: Duration = parents.head.slideDuration
override def compute(validTime: Time): Option[RDD[T]] = {
val rdds = new ArrayBuffer[RDD[T]]()
parents.map(_.getOrCompute(validTime)).foreach(_ match {
case Some(rdd) => rdds += rdd
case None => throw new Exception("Could not generate RDD from a parent for unifying at time " + validTime)
})
if (rdds.size > 0) {
Some(new UnionRDD(ssc.sc, rdds))
} else {
None
}
}
}
| cloudera/spark | streaming/src/main/scala/org/apache/spark/streaming/dstream/UnionDStream.scala | Scala | apache-2.0 | 2,085 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.types
import java.util.Locale
import scala.reflect.runtime.universe.typeTag
import org.apache.spark.annotation.InterfaceStability
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.expressions.{Expression, Literal}
/**
* The data type representing `java.math.BigDecimal` values.
* A Decimal that must have fixed precision (the maximum number of digits) and scale (the number
* of digits on right side of dot).
*
* The precision can be up to 38, scale can also be up to 38 (less or equal to precision).
*
* The default precision and scale is (10, 0).
*
* Please use `DataTypes.createDecimalType()` to create a specific instance.
*
* @since 1.3.0
*/
@InterfaceStability.Stable
case class DecimalType(precision: Int, scale: Int) extends FractionalType {
if (scale > precision) {
throw new AnalysisException(
s"Decimal scale ($scale) cannot be greater than precision ($precision).")
}
if (precision > DecimalType.MAX_PRECISION) {
throw new AnalysisException(s"DecimalType can only support precision up to 38")
}
// default constructor for Java
def this(precision: Int) = this(precision, 0)
def this() = this(10)
private[sql] type InternalType = Decimal
@transient private[sql] lazy val tag = typeTag[InternalType]
private[sql] val numeric = Decimal.DecimalIsFractional
private[sql] val fractional = Decimal.DecimalIsFractional
private[sql] val ordering = Decimal.DecimalIsFractional
private[sql] val asIntegral = Decimal.DecimalAsIfIntegral
override def typeName: String = s"decimal($precision,$scale)"
override def toString: String = s"DecimalType($precision,$scale)"
override def sql: String = typeName.toUpperCase(Locale.ROOT)
/**
* Returns whether this DecimalType is wider than `other`. If yes, it means `other`
* can be casted into `this` safely without losing any precision or range.
*/
private[sql] def isWiderThan(other: DataType): Boolean = other match {
case dt: DecimalType =>
(precision - scale) >= (dt.precision - dt.scale) && scale >= dt.scale
case dt: IntegralType =>
isWiderThan(DecimalType.forType(dt))
case _ => false
}
/**
* Returns whether this DecimalType is tighter than `other`. If yes, it means `this`
* can be casted into `other` safely without losing any precision or range.
*/
private[sql] def isTighterThan(other: DataType): Boolean = other match {
case dt: DecimalType =>
(precision - scale) <= (dt.precision - dt.scale) && scale <= dt.scale
case dt: IntegralType =>
isTighterThan(DecimalType.forType(dt))
case _ => false
}
/**
* The default size of a value of the DecimalType is 8 bytes when precision is at most 18,
* and 16 bytes otherwise.
*/
override def defaultSize: Int = if (precision <= Decimal.MAX_LONG_DIGITS) 8 else 16
override def simpleString: String = s"decimal($precision,$scale)"
private[spark] override def asNullable: DecimalType = this
}
/**
* Extra factory methods and pattern matchers for Decimals.
*
* @since 1.3.0
*/
@InterfaceStability.Stable
object DecimalType extends AbstractDataType {
import scala.math.min
val MAX_PRECISION = 38
val MAX_SCALE = 38
val SYSTEM_DEFAULT: DecimalType = DecimalType(MAX_PRECISION, 18)
val USER_DEFAULT: DecimalType = DecimalType(10, 0)
val MINIMUM_ADJUSTED_SCALE = 6
// The decimal types compatible with other numeric types
private[sql] val ByteDecimal = DecimalType(3, 0)
private[sql] val ShortDecimal = DecimalType(5, 0)
private[sql] val IntDecimal = DecimalType(10, 0)
private[sql] val LongDecimal = DecimalType(20, 0)
private[sql] val FloatDecimal = DecimalType(14, 7)
private[sql] val DoubleDecimal = DecimalType(30, 15)
private[sql] val BigIntDecimal = DecimalType(38, 0)
private[sql] def forType(dataType: DataType): DecimalType = dataType match {
case ByteType => ByteDecimal
case ShortType => ShortDecimal
case IntegerType => IntDecimal
case LongType => LongDecimal
case FloatType => FloatDecimal
case DoubleType => DoubleDecimal
}
private[sql] def fromLiteral(literal: Literal): DecimalType = literal.value match {
case v: Short => fromBigDecimal(BigDecimal(v))
case v: Int => fromBigDecimal(BigDecimal(v))
case v: Long => fromBigDecimal(BigDecimal(v))
case _ => forType(literal.dataType)
}
private[sql] def fromBigDecimal(d: BigDecimal): DecimalType = {
DecimalType(Math.max(d.precision, d.scale), d.scale)
}
private[sql] def bounded(precision: Int, scale: Int): DecimalType = {
DecimalType(min(precision, MAX_PRECISION), min(scale, MAX_SCALE))
}
/**
* Scale adjustment implementation is based on Hive's one, which is itself inspired to
* SQLServer's one. In particular, when a result precision is greater than
* {@link #MAX_PRECISION}, the corresponding scale is reduced to prevent the integral part of a
* result from being truncated.
*
* This method is used only when `spark.sql.decimalOperations.allowPrecisionLoss` is set to true.
*/
private[sql] def adjustPrecisionScale(precision: Int, scale: Int): DecimalType = {
// Assumption:
assert(precision >= scale)
if (precision <= MAX_PRECISION) {
// Adjustment only needed when we exceed max precision
DecimalType(precision, scale)
} else if (scale < 0) {
// Decimal can have negative scale (SPARK-24468). In this case, we cannot allow a precision
// loss since we would cause a loss of digits in the integer part.
// In this case, we are likely to meet an overflow.
DecimalType(MAX_PRECISION, scale)
} else {
// Precision/scale exceed maximum precision. Result must be adjusted to MAX_PRECISION.
val intDigits = precision - scale
// If original scale is less than MINIMUM_ADJUSTED_SCALE, use original scale value; otherwise
// preserve at least MINIMUM_ADJUSTED_SCALE fractional digits
val minScaleValue = Math.min(scale, MINIMUM_ADJUSTED_SCALE)
// The resulting scale is the maximum between what is available without causing a loss of
// digits for the integer part of the decimal and the minimum guaranteed scale, which is
// computed above
val adjustedScale = Math.max(MAX_PRECISION - intDigits, minScaleValue)
DecimalType(MAX_PRECISION, adjustedScale)
}
}
override private[sql] def defaultConcreteType: DataType = SYSTEM_DEFAULT
override private[sql] def acceptsType(other: DataType): Boolean = {
other.isInstanceOf[DecimalType]
}
override private[sql] def simpleString: String = "decimal"
private[sql] object Fixed {
def unapply(t: DecimalType): Option[(Int, Int)] = Some((t.precision, t.scale))
}
private[sql] object Expression {
def unapply(e: Expression): Option[(Int, Int)] = e.dataType match {
case t: DecimalType => Some((t.precision, t.scale))
case _ => None
}
}
/**
* Returns if dt is a DecimalType that fits inside an int
*/
def is32BitDecimalType(dt: DataType): Boolean = {
dt match {
case t: DecimalType =>
t.precision <= Decimal.MAX_INT_DIGITS
case _ => false
}
}
/**
* Returns if dt is a DecimalType that fits inside a long
*/
def is64BitDecimalType(dt: DataType): Boolean = {
dt match {
case t: DecimalType =>
t.precision <= Decimal.MAX_LONG_DIGITS
case _ => false
}
}
/**
* Returns if dt is a DecimalType that doesn't fit inside a long
*/
def isByteArrayDecimalType(dt: DataType): Boolean = {
dt match {
case t: DecimalType =>
t.precision > Decimal.MAX_LONG_DIGITS
case _ => false
}
}
def unapply(t: DataType): Boolean = t.isInstanceOf[DecimalType]
def unapply(e: Expression): Boolean = e.dataType.isInstanceOf[DecimalType]
}
| bravo-zhang/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/types/DecimalType.scala | Scala | apache-2.0 | 8,658 |
package com.datastax.spark.connector.types
import com.datastax.oss.driver.api.core.CqlSession
import com.datastax.spark.connector.SparkCassandraITFlatSpecBase
import com.datastax.spark.connector.cluster.DefaultCluster
import com.datastax.spark.connector.cql.CassandraConnector
import com.datastax.spark.connector._
import org.apache.spark.sql.cassandra._
// UDTs
case class File(data: Array[Byte])
case class Profile(name: String, picture: File)
// Tables
case class Files(id: Int, file: File)
case class Profiles(id: Int, profile: Profile)
class UserDefinedTypeSpec extends SparkCassandraITFlatSpecBase with DefaultCluster {
override lazy val conn = CassandraConnector(sparkConf)
val FilesTable = "files"
val ProfilesTable = "profiles"
def makeUdtTables(session: CqlSession): Unit = {
session.execute(s"""CREATE TYPE IF NOT EXISTS $ks.file (data blob);""")
session.execute(
s"""CREATE TABLE IF NOT EXISTS $ks.$FilesTable
|(id int PRIMARY KEY, file frozen<file>);""".stripMargin)
session.execute(s"""CREATE TYPE IF NOT EXISTS $ks.profile (name text, picture frozen<file>)""")
session.execute(
s"""CREATE TABLE IF NOT EXISTS $ks.$ProfilesTable
|(id int PRIMARY KEY, profile frozen<profile>)""".stripMargin)
}
override def beforeClass {
conn.withSessionDo { session =>
session.execute(
s"""CREATE KEYSPACE IF NOT EXISTS $ks
|WITH REPLICATION = { 'class': 'SimpleStrategy', 'replication_factor': 1 }"""
.stripMargin)
makeUdtTables(session)
}
}
"SparkSql" should "write UDTs with BLOB fields" in {
val expected = File(":)".getBytes)
spark.createDataFrame(Seq(Files(1, expected)))
.write
.cassandraFormat(FilesTable, ks)
.mode("append")
.save()
val row = spark.sparkContext
.cassandraTable[Files](ks, FilesTable)
.collect()
.head
row.file.data shouldEqual expected.data
}
it should "write nested UDTs" in {
val expected = Profile("John Smith", File(":)".getBytes))
spark.createDataFrame(Seq(Profiles(1, expected)))
.write
.cassandraFormat(ProfilesTable, ks)
.mode("append")
.save()
val row = spark.sparkContext
.cassandraTable[Profiles](ks, ProfilesTable)
.collect()
.head
row.profile.name shouldEqual expected.name
row.profile.picture.data shouldEqual expected.picture.data
}
} | datastax/spark-cassandra-connector | connector/src/it/scala/com/datastax/spark/connector/types/UserDefinedTypeSpec.scala | Scala | apache-2.0 | 2,424 |
package org.jetbrains.plugins.scala
import java.io.Closeable
import java.lang.reflect.InvocationTargetException
import java.util.concurrent.{Callable, Future}
import javax.swing.SwingUtilities
import com.intellij.openapi.application.{ApplicationManager, Result}
import com.intellij.openapi.command.{CommandProcessor, WriteCommandAction}
import com.intellij.openapi.progress.ProgressManager
import com.intellij.openapi.project.Project
import com.intellij.openapi.util.{Computable, ThrowableComputable}
import com.intellij.psi._
import com.intellij.psi.impl.source.PostprocessReformattingAspect
import com.intellij.psi.search.GlobalSearchScope
import com.intellij.util.Processor
import org.jetbrains.annotations.NotNull
import org.jetbrains.plugins.scala.extensions.implementation._
import org.jetbrains.plugins.scala.lang.psi.api.base.patterns.ScBindingPattern
import org.jetbrains.plugins.scala.lang.psi.api.base.{ScFieldId, ScPrimaryConstructor}
import org.jetbrains.plugins.scala.lang.psi.api.expr.ScNewTemplateDefinition
import org.jetbrains.plugins.scala.lang.psi.api.statements.params.{ScClassParameter, ScParameter}
import org.jetbrains.plugins.scala.lang.psi.api.statements.{ScDeclaredElementsHolder, ScFunction}
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef._
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.{ScModifierListOwner, ScNamedElement, ScTypedDefinition}
import org.jetbrains.plugins.scala.lang.psi.fake.FakePsiParameter
import org.jetbrains.plugins.scala.lang.psi.impl.toplevel.synthetic.ScSyntheticClass
import org.jetbrains.plugins.scala.lang.psi.impl.toplevel.typedef.MixinNodes
import org.jetbrains.plugins.scala.lang.psi.impl.toplevel.typedef.TypeDefinitionMembers.SignatureNodes
import org.jetbrains.plugins.scala.lang.psi.light.{PsiClassWrapper, PsiTypedDefinitionWrapper, StaticPsiMethodWrapper}
import org.jetbrains.plugins.scala.lang.psi.types.result.TypingContext
import org.jetbrains.plugins.scala.lang.psi.types.{ScSubstitutor, ScType, ScTypeExt}
import org.jetbrains.plugins.scala.lang.psi.{ScalaPsiElement, ScalaPsiUtil}
import org.jetbrains.plugins.scala.project.ProjectExt
import scala.collection.generic.CanBuildFrom
import scala.collection.immutable.HashSet
import scala.io.Source
import scala.language.higherKinds
import scala.reflect.{ClassTag, classTag}
import scala.runtime.NonLocalReturnControl
import scala.util.control.Exception.catching
import scala.util.matching.Regex
import scala.util.{Failure, Success, Try}
/**
* Pavel Fatin
*/
package object extensions {
implicit class PsiMethodExt(val repr: PsiMethod) extends AnyVal {
import org.jetbrains.plugins.scala.extensions.PsiMethodExt._
def isAccessor: Boolean = {
hasNoParams && hasQueryLikeName && !hasVoidReturnType
}
def isMutator: Boolean = {
hasVoidReturnType || hasMutatorLikeName
}
def hasQueryLikeName = {
def startsWith(name: String, prefix: String) =
name.length > prefix.length && name.startsWith(prefix) && name.charAt(prefix.length).isUpper
repr.getName match {
case "getInstance" => false // TODO others?
case name if startsWith(name, "getAnd") || startsWith(name, "getOr") => false
case AccessorNamePattern() => true
case _ => false
}
}
def hasMutatorLikeName = repr.getName match {
case MutatorNamePattern() => true
case _ => false
}
def hasVoidReturnType = repr.getReturnType == PsiType.VOID
def hasNoParams = repr.getParameterList.getParameters.isEmpty
}
object PsiMethodExt {
val AccessorNamePattern =
"""(?-i)(?:get|is|can|could|has|have|to)\\p{Lu}.*""".r
val MutatorNamePattern =
"""(?-i)(?:do|set|add|remove|insert|delete|aquire|release|update)(?:\\p{Lu}.*)""".r
}
implicit class TraversableExt[CC[X] <: Traversable[X], A](val value: CC[A]) extends AnyVal {
private type CanBuildTo[Elem, C[X]] = CanBuildFrom[Nothing, Elem, C[Elem]]
def filterBy[T](aClass: Class[T])(implicit cbf: CanBuildTo[T, CC]): CC[T] =
value.filter(aClass.isInstance(_)).map[T, CC[T]](_.asInstanceOf[T])(collection.breakOut)
def findBy[T](aClass: Class[T]): Option[T] =
value.find(aClass.isInstance(_)).map(_.asInstanceOf[T])
def mkParenString(implicit ev: A <:< String): String = value.mkString("(", ", ", ")")
}
implicit class SeqExt[CC[X] <: Seq[X], A](val value: CC[A]) extends AnyVal {
private type CanBuildTo[Elem, C[X]] = CanBuildFrom[Nothing, Elem, C[Elem]]
def distinctBy[K](f: A => K)(implicit cbf: CanBuildTo[A, CC]): CC[A] = {
val b = cbf()
var seen = Set[K]()
for (x <- value) {
val v = f(x)
if (!(seen contains v)) {
b += x
seen = seen + v
}
}
b.result()
}
def mapWithIndex[B](f: (A, Int) => B)(implicit cbf: CanBuildTo[B, CC]): CC[B] = {
val b = cbf()
var i = 0
for (x <- value) {
b += f(x, i)
i += 1
}
b.result()
}
def foreachWithIndex[B](f: (A, Int) => B) {
var i = 0
for (x <- value) {
f(x, i)
i += 1
}
}
}
implicit class IterableExt[CC[X] <: Iterable[X], A](val value: CC[A]) extends AnyVal {
private type CanBuildTo[Elem, C[X]] = CanBuildFrom[Nothing, Elem, C[Elem]]
def zipMapped[B](f: A => B)(implicit cbf: CanBuildTo[(A, B), CC]): CC[(A, B)] = {
val b = cbf()
val it = value.iterator
while (it.hasNext) {
val v = it.next()
b += ((v, f(v)))
}
b.result()
}
}
implicit class ObjectExt[T](val v: T) extends AnyVal{
def toOption: Option[T] = Option(v)
def asOptionOf[E: ClassTag]: Option[E] = {
if (classTag[E].runtimeClass.isInstance(v)) Some(v.asInstanceOf[E])
else None
}
def getOrElse[H >: T](default: H): H = if (v == null) default else v
def collectOption[B](pf : scala.PartialFunction[T, B]) = Some(v).collect(pf)
}
implicit class BooleanExt(val b: Boolean) extends AnyVal {
def ifTrue[T](value: => T) = if (b) Some(value) else None
// looks better withing expressions than { if (???) ??? else ??? } block
def fold[T](ifTrue: => T, ifFalse: => T) = if (b) ifTrue else ifFalse
def toInt: Int = if (b) 1 else 0
}
implicit class StringExt(val s: String) extends AnyVal{
def startsWith(c: Char) = !s.isEmpty && s.charAt(0) == c
def endsWith(c: Char) = !s.isEmpty && s.charAt(s.length - 1) == c
def parenthesisedIf(condition: Boolean) = if (condition) "(" + s + ")" else s
}
implicit class PsiElementExt(override val repr: PsiElement) extends AnyVal with PsiElementExtTrait {
def startOffsetInParent: Int = {
repr match {
case s: ScalaPsiElement => s.startOffsetInParent
case _ => repr.getStartOffsetInParent
}
}
def typeSystem = repr.getProject.typeSystem
def ofNamedElement(substitutor: ScSubstitutor = ScSubstitutor.empty): Option[ScType] = {
def lift: PsiType => Option[ScType] = _.toScType(repr.getProject, repr.getResolveScope).toOption
(repr match {
case e: ScPrimaryConstructor => None
case e: ScFunction if e.isConstructor => None
case e: ScFunction => e.returnType.toOption
case e: ScBindingPattern => e.getType(TypingContext.empty).toOption
case e: ScFieldId => e.getType(TypingContext.empty).toOption
case e: ScParameter => e.getRealParameterType(TypingContext.empty).toOption
case e: PsiMethod if e.isConstructor => None
case e: PsiMethod => lift(e.getReturnType)
case e: PsiVariable => lift(e.getType)
case _ => None
}).map(substitutor.subst)
}
}
implicit class PsiTypeExt(val `type`: PsiType) extends AnyVal {
def toScType(project: Project,
scope: GlobalSearchScope = null,
visitedRawTypes: HashSet[PsiClass] = HashSet.empty,
paramTopLevel: Boolean = false,
treatJavaObjectAsAny: Boolean = true) = {
project.typeSystem.bridge.toScType(`type`, project, scope, visitedRawTypes, paramTopLevel, treatJavaObjectAsAny)
}
}
implicit class PsiMemberExt(val member: PsiMember) extends AnyVal {
/**
* Second match branch is for Java only.
*/
def containingClass: PsiClass = {
member match {
case member: ScMember => member.containingClass
case b: ScBindingPattern => b.containingClass
case _ => member.getContainingClass
}
}
}
implicit class PsiClassExt(val clazz: PsiClass) extends AnyVal {
/**
* Second match branch is for Java only.
*/
def qualifiedName: String = {
clazz match {
case t: ScTemplateDefinition => t.qualifiedName
case _ => clazz.getQualifiedName
}
}
def constructors: Array[PsiMethod] = {
clazz match {
case c: ScClass => c.constructors
case _ => clazz.getConstructors
}
}
def isEffectivelyFinal: Boolean = clazz match {
case scClass: ScClass => scClass.hasFinalModifier
case _: ScObject | _: ScNewTemplateDefinition => true
case synth: ScSyntheticClass if !Seq("AnyRef", "AnyVal").contains(synth.className) => true //wrappers for value types
case _ => clazz.hasModifierProperty(PsiModifier.FINAL)
}
def processPsiMethodsForNode(node: SignatureNodes.Node, isStatic: Boolean, isInterface: Boolean)
(processMethod: PsiMethod => Unit, processName: String => Unit = _ => ()): Unit = {
def concreteClassFor(typedDef: ScTypedDefinition): Option[PsiClass] = {
if (typedDef.isAbstractMember) return None
clazz match {
case wrapper: PsiClassWrapper if wrapper.definition.isInstanceOf[ScObject] =>
return Some(wrapper) //this is static case, when containing class should be wrapper
case _ =>
}
ScalaPsiUtil.nameContext(typedDef) match {
case m: ScMember =>
m.containingClass match {
case t: ScTrait =>
val linearization = MixinNodes.linearization(clazz)
.flatMap(_.extractClass(clazz.getProject)(clazz.typeSystem))
var index = linearization.indexWhere(_ == t)
while (index >= 0) {
val cl = linearization(index)
if (!cl.isInterface) return Some(cl)
index -= 1
}
Some(clazz)
case _ => None
}
case _ => None
}
}
node.info.namedElement match {
case fun: ScFunction if !fun.isConstructor =>
val wrappers = fun.getFunctionWrappers(isStatic, isInterface = fun.isAbstractMember, concreteClassFor(fun))
wrappers.foreach(processMethod)
wrappers.foreach(w => processName(w.name))
case method: PsiMethod if !method.isConstructor =>
if (isStatic) {
if (method.containingClass != null && method.containingClass.qualifiedName != "java.lang.Object") {
processMethod(StaticPsiMethodWrapper.getWrapper(method, clazz))
processName(method.getName)
}
}
else {
processMethod(method)
processName(method.getName)
}
case t: ScTypedDefinition if t.isVal || t.isVar ||
(t.isInstanceOf[ScClassParameter] && t.asInstanceOf[ScClassParameter].isCaseClassVal) =>
PsiTypedDefinitionWrapper.processWrappersFor(t, concreteClassFor(t), node.info.name, isStatic, isInterface, processMethod, processName)
case _ =>
}
}
def namedElements: Seq[PsiNamedElement] = {
clazz match {
case td: ScTemplateDefinition =>
td.members.flatMap {
case holder: ScDeclaredElementsHolder => holder.declaredElements
case named: ScNamedElement => Seq(named)
case _ => Seq.empty
}
case _ => clazz.getFields ++ clazz.getMethods
}
}
}
implicit class PsiNamedElementExt(val named: PsiNamedElement) extends AnyVal {
/**
* Second match branch is for Java only.
*/
def name: String = {
named match {
case nd: ScNamedElement => nd.name
case nd => nd.getName
}
}
}
implicit class PsiModifierListOwnerExt(val member: PsiModifierListOwner) extends AnyVal{
/**
* Second match branch is for Java only.
*/
def hasAbstractModifier: Boolean = {
member match {
case member: ScModifierListOwner => member.hasAbstractModifier
case _ => member.hasModifierProperty(PsiModifier.ABSTRACT)
}
}
/**
* Second match branch is for Java only.
*/
def hasFinalModifier: Boolean = {
member match {
case member: ScModifierListOwner => member.hasFinalModifier
case _ => member.hasModifierProperty(PsiModifier.FINAL)
}
}
/**
* Second match branch is for Java only.
*/
def hasModifierPropertyScala(name: String): Boolean = {
member match {
case member: ScModifierListOwner => member.hasModifierPropertyScala(name)
case _ => member.hasModifierProperty(name)
}
}
}
implicit class PipedObject[T](val value: T) extends AnyVal {
def |>[R](f: T => R) = f(value)
}
implicit class IteratorExt[A](val delegate: Iterator[A]) extends AnyVal {
def findByType[T](aClass: Class[T]): Option[T] =
delegate.find(aClass.isInstance(_)).map(_.asInstanceOf[T])
def filterByType[T](aClass: Class[T]): Iterator[T] =
delegate.filter(aClass.isInstance(_)).map(_.asInstanceOf[T])
def headOption: Option[A] = {
if (delegate.hasNext) Some(delegate.next())
else None
}
}
implicit class RegexExt(val regex: Regex) extends AnyVal {
def matches(s: String) = regex.pattern.matcher(s).matches
}
import scala.language.implicitConversions
implicit def toIdeaFunction[A, B](f: Function[A, B]): com.intellij.util.Function[A, B] = new com.intellij.util.Function[A, B] {
override def fun(param: A): B = f(param)
}
implicit def toProcessor[T](action: T => Boolean): Processor[T] = new Processor[T] {
override def process(t: T): Boolean = action(t)
}
implicit def toRunnable(action: => Any): Runnable = new Runnable {
override def run(): Unit = action
}
implicit def toComputable[T](action: => T): Computable[T] = new Computable[T] {
override def compute(): T = action
}
implicit def toCallable[T](action: => T): Callable[T] = new Callable[T] {
override def call(): T = action
}
def startCommand(project: Project, commandName: String)(body: => Unit): Unit = {
CommandProcessor.getInstance.executeCommand(project, new Runnable {
def run() {
inWriteAction {
body
}
}
}, commandName, null)
}
def inWriteAction[T](body: => T): T = {
val application = ApplicationManager.getApplication
if (application.isWriteAccessAllowed) body
else {
application.runWriteAction(
new Computable[T] {
def compute: T = body
}
)
}
}
def inWriteCommandAction[T](project: Project, commandName: String = "Undefined")(body: => T): T = {
val computable = new Computable[T] {
override def compute(): T = body
}
new WriteCommandAction[T](project, commandName) {
protected def run(@NotNull result: Result[T]) {
result.setResult(computable.compute())
}
}.execute.getResultObject
}
def inReadAction[T](body: => T): T = {
val application = ApplicationManager.getApplication
if (application.isReadAccessAllowed) body
else {
application.runReadAction(
new Computable[T] {
override def compute(): T = body
}
)
}
}
def executeOnPooledThread[T](body: => T): Future[T] = {
ApplicationManager.getApplication.executeOnPooledThread(toCallable(body))
}
def withProgressSynchronously[T](title: String)(body: ((String => Unit) => T)): T = {
withProgressSynchronouslyTry[T](title)(body) match {
case Success(result) => result
case Failure(exception) => throw exception
}
}
def withProgressSynchronouslyTry[T](title: String)(body: ((String => Unit) => T)): Try[T] = {
val progressManager = ProgressManager.getInstance
val computable = new ThrowableComputable[T, Exception] {
@throws(classOf[Exception])
def compute: T = {
val progressIndicator = progressManager.getProgressIndicator
body(progressIndicator.setText)
}
}
catching(classOf[Exception]).withTry {
progressManager.runProcessWithProgressSynchronously(computable, title, false, null)
}
}
def postponeFormattingWithin[T](project: Project)(body: => T): T = {
PostprocessReformattingAspect.getInstance(project).postponeFormattingInside(new Computable[T]{
def compute(): T = body
})
}
def withDisabledPostprocessFormatting[T](project: Project)(body: => T): T = {
PostprocessReformattingAspect.getInstance(project).disablePostprocessFormattingInside {
new Computable[T] {
override def compute(): T = body
}
}
}
def invokeLater[T](body: => T) {
ApplicationManager.getApplication.invokeLater(new Runnable {
def run() {
body
}
})
}
def invokeAndWait[T](body: => Unit) {
preservingControlFlow {
SwingUtilities.invokeAndWait(new Runnable {
def run() {
body
}
})
}
}
private def preservingControlFlow(body: => Unit) {
try {
body
} catch {
case e: InvocationTargetException => e.getTargetException match {
case control: NonLocalReturnControl[_] => throw control
case _ => throw e
}
}
}
/** Create a PartialFunction from a sequence of cases. Workaround for pattern matcher bug */
def pf[A, B](cases: PartialFunction[A, B]*) = new PartialFunction[A, B] {
def isDefinedAt(x: A): Boolean = cases.exists(_.isDefinedAt(x))
def apply(v1: A): B = {
val it = cases.iterator
while (it.hasNext) {
val caze = it.next()
if (caze.isDefinedAt(v1))
return caze(v1)
}
throw new MatchError(v1.toString)
}
}
implicit class PsiParameterExt(val param: PsiParameter) extends AnyVal {
def paramType: ScType = {
param match {
case f: FakePsiParameter => f.parameter.paramType
case param: ScParameter => param.getType(TypingContext.empty).getOrAny
case _ => param.getType.toScType(param.getProject, param.getResolveScope, paramTopLevel = true)
}
}
def exactParamType(treatJavaObjectAsAny: Boolean = true): ScType = {
param match {
case f: FakePsiParameter => f.parameter.paramType
case param: ScParameter => param.getType(TypingContext.empty).getOrAny
case _ =>
val paramType = param.getType match {
case p: PsiArrayType if param.isVarArgs => p.getComponentType
case tp => tp
}
paramType.toScType(param.getProject, param.getResolveScope, paramTopLevel = true,
treatJavaObjectAsAny = treatJavaObjectAsAny)
}
}
def index: Int = {
param match {
case f: FakePsiParameter => f.parameter.index
case p: ScParameter => p.index
case _ =>
param.getParent match {
case pList: PsiParameterList => pList.getParameterIndex(param)
case _ => -1
}
}
}
}
def using[A <: Closeable, B](resource: A)(block: A => B): B = {
try {
block(resource)
} finally {
if (resource != null) resource.close()
}
}
def using[B](source: Source)(block: Source => B): B = {
try {
block(source)
} finally {
source.close()
}
}
val ChildOf = Parent
} | whorbowicz/intellij-scala | src/org/jetbrains/plugins/scala/extensions/package.scala | Scala | apache-2.0 | 19,938 |
package io.sqooba.oss.timeseries.archive
import fi.iki.yak.ts.compression.gorilla._
import io.sqooba.oss.timeseries.immutable.TSEntry
import io.sqooba.oss.timeseries.validation.{TSEntryFitter, TimestampValidator}
import scala.util.Success
/** A GorillaBlock represents a compressed/encoded TimeSeries as defined in this
* library. It is the unit of compression/decompression for series data.
*/
trait GorillaBlock {
/** Decompress this Gorilla encoded timeseries block into a lazily evaluated
* stream of TSEntries.
*
* @return a stream of TSEntry[Double]
*/
def decompress: Stream[TSEntry[Double]]
/** @return the bytes for storing this block in binary format. This can be
* called multiple times.
*/
def serialize: Array[Byte]
}
/** Standard implementation of the GorillaBlock that has one GorillaArray
* for the values and one for the validities of the timeseries.
*
* @param valueBytes encodes the timeseries formed by the values along with their timestamps
* @param validityBytes encodes the series formed by the validities with their timestamps
*
* This implementation also defines a binary format for storing the two
* GorillaArrays. The values array is always preceding the validities array.
* Before the block there are 4 bytes that contain the length of the values
* array as an Int. This serves to split the block into the two arrays at
* decoding.
*/
case class TupleGorillaBlock private (
valueBytes: GorillaArray,
validityBytes: GorillaArray
) extends GorillaBlock {
require(valueBytes.nonEmpty, "Value GorillaArray cannot be empty.")
require(validityBytes.nonEmpty, "Validities GorillaArray cannot be empty.")
def serialize: Array[Byte] =
int2ByteArray(valueBytes.length) ++ valueBytes ++ validityBytes
def decompress: Stream[TSEntry[Double]] = {
// The underlying library throws IndexOutOfBounds, if something is not in
// the expected format. We wrap that in a Try to return a custom error.
val valueDecompressor = wrapTryDecompressor(valueBytes)
val validityDecompressor = wrapTryDecompressor(validityBytes)
// lazily generates the stream of entries, pair by pair
def nextEntry: Stream[TSEntry[Double]] =
(
valueDecompressor.map(_.readPair()),
validityDecompressor.map(_.readPair())
) match {
// both timeseries have a next entry with equal timestamps
case (Success(vPair: Pair), Success(dPair: Pair)) if vPair.getTimestamp == dPair.getTimestamp =>
TSEntry(vPair.getTimestamp, vPair.getDoubleValue, dPair.getLongValue) #:: nextEntry
// end of both compressed timeseries
case (Success(null), Success(null)) => Stream.empty
case _ =>
throw new IllegalArgumentException(
s"The byte arrays in this block are not a valid compressed timeseries."
)
}
nextEntry
}
}
/**
* GorillaBlock for series that have mostly similar validities. This can be
* stored more efficiently with a single GorillaArray. After decompression all
* entries will have the validity of the sample rate. This rate may include a
* bit of margin for jitter because individual validities will be trimmed at if
* they overlap once they are put into a 'TimeSeries'.
*
* @param valueBytes encodes the timeseries formed by the values along with their timestamps
* @param sampleRate the maximal validity of each entry in the series after decompression
*/
case class SampledGorillaBlock private (
valueBytes: GorillaArray,
sampleRate: Long
) extends GorillaBlock {
require(valueBytes.nonEmpty, "Value GorillaArray cannot be empty.")
require(sampleRate > 0, "Sampling rate must be positive.")
// the sample rate will be serialized by the GorillaSuperBlock
def serialize: Array[Byte] = valueBytes
def decompress: Stream[TSEntry[Double]] = {
// The underlying library throws IndexOutOfBounds, if something is not in
// the expected format. We wrap that in a Try to return a custom error.
val decompressor = wrapTryDecompressor(valueBytes)
// lazily generates the stream of entries, pair by pair
def nextEntry: Stream[TSEntry[Double]] =
decompressor.map(_.readPair()) match {
case Success(pair: Pair) =>
TSEntry(pair.getTimestamp, pair.getDoubleValue, sampleRate) #:: nextEntry
case Success(null) => Stream.empty
case _ =>
throw new IllegalArgumentException(
s"The byte array in this block isn't a valid compressed timeseries."
)
}
nextEntry
}
}
object GorillaBlock {
/** Create a GorillaBlock from a tuple of GorillaArrays.
*
* @param valueBytes encodes the timeseries formed by the values along with their timestamps
* @param validityBytes encodes the series formed by the validities with their timestamps
*/
def fromTupleArrays(valueBytes: GorillaArray, validityBytes: GorillaArray): GorillaBlock =
TupleGorillaBlock(valueBytes, validityBytes)
/** Create a GorillaBlock from a value GorillaArray and a sample rate.
*
* @param valueBytes encodes the timeseries formed by the values along with their timestamps
* @param sampleRate the constant validity of each entry in the series
*/
def fromSampled(valueBytes: GorillaArray, sampleRate: Long): GorillaBlock =
SampledGorillaBlock(valueBytes, sampleRate)
/** Deserialize a tuple GorillaBlock according to the binary format specified
* above and return it.
*/
def fromTupleSerialized(bytes: Array[Byte]): GorillaBlock = {
val (lengthBytes, arrayBytes) = bytes.splitAt(Integer.BYTES)
val (valueBytes, validityBytes) = arrayBytes.splitAt(byteArray2Int(lengthBytes))
TupleGorillaBlock(valueBytes, validityBytes)
}
/** Compress all the entries of the stream according to the Gorilla TSC format to
* two GorillaArrays and wrap them in a GorillaBlock.
*
* @note The entries need to be a well-formed series according to
* TSEntryFitter and TimestampValidator. Those constraints are checked
* and enforced.
*
* @param entries a non-empty stream of TSEntry[Double] to be compressed,
* other types of numbers need to be converted to doubles
* @return a gorilla encoded block
*/
def compress(entries: Stream[TSEntry[Double]]): GorillaBlock =
entries.foldLeft(GorillaBlock.newBuilder)(_ += _).result()
/** Compress all the entries of the stream according to the Gorilla TSC format to
* one GorillaArray and wrap it in a GorillaBlock. The sample rate will be written
* as well.
*
* @note The entries need to be a well-formed series according to
* TSEntryFitter and TimestampValidator. Those constraints are checked
* and enforced.
*
* @param entries a non-empty stream of TSEntry[Double] to be compressed,
* their validities will be discarded
* @param sampleRate the fixed validity for all the entries
* @return a gorilla encoded block
*/
def compressSampled(entries: Stream[TSEntry[Double]], sampleRate: Long): GorillaBlock =
entries.foldLeft(GorillaBlock.newBuilder(sampleRate))(_ += _).result()
/** A 'mutable.Builder' for the iterative construction of a GorillaBlock. The
* builder takes TSEntries and continually encodes them. This also compresses
* contiguous equal entries.
*/
def newBuilder: Builder = newBuilder(true)
/** A 'mutable.Builder' for the iterative construction of a GorillaBlock. The
* builder takes TSEntries and continually encodes them.
*/
def newBuilder(compress: Boolean): Builder = new Builder(None, compress)
/** A 'mutable.Builder' for the iterative construction of a GorillaBlock. The
* builder takes TSEntries and continually encodes them to a sampled
* GorillaBlock in which all entries have the validity of the sample rate.
* This also compresses contiguous equal entries.
*/
def newBuilder(fixedValidity: Long): Builder =
new Builder(Some(fixedValidity), compress = true)
/** A 'mutable.Builder' for the iterative construction of a GorillaBlock. The
* builder takes TSEntries and continually encodes them to a sampled
* GorillaBlock in which all entries have the validity of the sample rate.
*/
def newBuilder(fixedValidity: Long, compress: Boolean): Builder =
new Builder(Some(fixedValidity), compress)
/** A 'mutable.Builder' for the iterative construction of a GorillaBlock. */
class Builder private[GorillaBlock] (
validity: Option[Long],
compress: Boolean
) {
require(validity.forall(_ > 0), "Sampling rate must be positive.")
// These need be vars because the Java implementations don't provide clear() methods.
private var valueOutput: LongArrayOutput = _
private var valueCompressor: GorillaCompressor = _
private var validityOutput: LongArrayOutput = _
private var validityCompressor: GorillaCompressor = _
private val entryBuilder = new TSEntryFitter[Double](compress)
private var resultCalled = false
clear()
// Reset the builder to its initial state. The compressors must be set to null,
// because they rely on the first timestamp which is only available at the
// first addition of an element.
def clear(): Unit = {
valueOutput = new LongArrayOutput()
validityOutput = new LongArrayOutput()
valueCompressor = null
validityCompressor = null
entryBuilder.clear()
resultCalled = false
}
def +=(entry: TSEntry[Double]): this.type = addOne(entry)
def addOne(entry: TSEntry[Double]): this.type = {
// If this is the first element added, initialise the compressors with its timestamp.
if (lastEntry.isEmpty) {
// NOTE: Don't forget to validate the first timestamp, if a block timestamp
// other than the first entry's timestamp is used.
valueCompressor = new GorillaCompressor(entry.timestamp, valueOutput)
validityCompressor = new GorillaCompressor(entry.timestamp, validityOutput)
} else {
TimestampValidator.validateGorilla(lastEntry.get.timestamp, entry.timestamp)
}
entryBuilder.addAndFitLast(entry).foreach(compressEntry)
this
}
private def compressEntry(entry: TSEntry[Double]): Unit = {
valueCompressor.addValue(entry.timestamp, entry.value)
validityCompressor.addValue(entry.timestamp, entry.validity)
}
/** @return the last entry that was added to the fitter. This entry can still change
* if more entries are added (it might be compressed/trimmed).
*/
def lastEntry: Option[TSEntry[Double]] = entryBuilder.lastEntry
/** @return whether all added entries so far were either contiguous or overlapping.
* I.e. there were no holes in the domain of definition of the entries seen so far.
*/
def isDomainContinuous: Boolean = entryBuilder.isDomainContinuous
def result(): GorillaBlock = {
if (resultCalled) {
throw new IllegalStateException(
"Cannot call result more than once, unless the builder was cleared."
)
} else if (lastEntry.isEmpty) {
throw new IllegalStateException("Cannot call result if no element was added.")
}
resultCalled = true
entryBuilder.lastEntry.foreach(compressEntry)
valueCompressor.close()
validityCompressor.close()
// Choose the sampled GorillaBlock implemenation over the tuple one if
// a fixed validity is given.
validity match {
case Some(fixedValidity) =>
SampledGorillaBlock(
longArray2ByteArray(valueOutput.getLongArray),
fixedValidity
)
case None =>
TupleGorillaBlock(
longArray2ByteArray(valueOutput.getLongArray),
longArray2ByteArray(validityOutput.getLongArray)
)
}
}
}
}
| Shastick/tslib | src/main/scala/io/sqooba/oss/timeseries/archive/GorillaBlock.scala | Scala | mit | 11,976 |
package com.twitter.diffy.analysis
import javax.inject.Inject
import com.twitter.diffy.compare.{Difference, PrimitiveDifference}
import com.twitter.diffy.lifter.{JsonLifter, Message}
import com.twitter.diffy.thriftscala._
import com.twitter.finagle.tracing.Trace
import com.twitter.logging._
import com.twitter.util.{Future, Time}
import com.twitter.util.StorageUnitConversions._
import scala.util.Random
object DifferenceAnalyzer {
val NoControllerFound = Some("No_controller_reached")
val log = Logger(classOf[DifferenceAnalyzer])
log.setUseParentHandlers(false)
log.addHandler(
FileHandler(
filename = "differences.log",
rollPolicy = Policy.MaxSize(128.megabytes),
rotateCount = 2
)()
)
def normalizeEndpointName(name: String) = name.replace("/", "-")
}
case class Field(endpoint: String, prefix: String)
class DifferenceAnalyzer @Inject()(
rawCounter: RawDifferenceCounter,
noiseCounter: NoiseDifferenceCounter,
store: InMemoryDifferenceCollector)
{
import DifferenceAnalyzer._
def apply(
request: Message,
candidate: Message,
primary: Message,
secondary: Message
): Unit = {
getEndpointName(request.endpoint, candidate.endpoint,
primary.endpoint, secondary.endpoint) foreach { endpointName =>
// If there is no traceId then generate our own
val id = Trace.idOption map { _.traceId.toLong } getOrElse(Random.nextLong)
val rawDiff = Difference(primary, candidate).flattened
val noiseDiff = Difference(primary, secondary).flattened
rawCounter.counter.count(endpointName, rawDiff)
noiseCounter.counter.count(endpointName, noiseDiff)
if (rawDiff.size > 0) {
val diffResult = DifferenceResult(
id,
Trace.idOption map { _.traceId.toLong },
endpointName,
Time.now.inMillis,
differencesToJson(rawDiff),
JsonLifter.encode(request.result),
Responses(
candidate = JsonLifter.encode(candidate.result),
primary = JsonLifter.encode(primary.result),
secondary = JsonLifter.encode(secondary.result)
)
)
log.info(s"diff[$id]=$diffResult")
store.create(diffResult)
} else {
log.debug(s"diff[$id]=NoDifference")
}
}
}
def clear(): Future[Unit] = Future {
rawCounter.counter.clear()
noiseCounter.counter.clear()
store.clear()
}
def differencesToJson(diffs: Map[String, Difference]): Map[String, String] =
diffs map {
case (field, diff @ PrimitiveDifference(_: Long, _)) =>
field ->
JsonLifter.encode(
diff.toMap map {
case (k, v) => k -> v.toString
}
)
case (field, diff) => field -> JsonLifter.encode(diff.toMap)
}
private[this] def getEndpointName(
requestEndpoint: Option[String],
candidateEndpoint: Option[String],
primaryEndpoint: Option[String],
secondaryEndpoint: Option[String]): Option[String] = {
val rawEndpointName = (requestEndpoint, candidateEndpoint, primaryEndpoint, secondaryEndpoint) match {
case (Some(_), _, _, _) => requestEndpoint
// No controller reached when action header is missing from all three instances
case (_, None, None, None) => NoControllerFound
// the assumption is that primary and secondary should call the same endpoint,
// otherwise it's noise and we should discard the request
case (_, None, _, _) if primaryEndpoint == secondaryEndpoint => primaryEndpoint
case (_, None, _, _) => None
case (_, Some(candidate), _, _) => candidateEndpoint
}
rawEndpointName map { normalizeEndpointName(_) }
}
}
| 1c4r/diffy | src/main/scala/com/twitter/diffy/analysis/DifferenceCollector.scala | Scala | apache-2.0 | 3,727 |
package com.k2b6s9j.terrafirmacompanion
import cpw.mods.fml.common.Mod
import cpw.mods.fml.common.event.{FMLPostInitializationEvent, FMLInitializationEvent, FMLPreInitializationEvent}
import net.minecraftforge.common.config.Configuration
import org.apache.logging.log4j.Logger
@Mod(
modid = "TFCompanion",
name = "TerraFirmaCompanion",
modLanguage = "scala",
dependencies =
"required-after:terrafirmacraft;" +
"after:Botania")
object TerraFirmaCompanion {
private[terrafirmacompanion] var log: Logger = null
private[terrafirmacompanion] var config: Configuration = null
@Mod.EventHandler
def preInit(event: FMLPreInitializationEvent) = {
log = event.getModLog
printModInfo()
ModCompatibility.preInit()
}
def printModInfo() = {
log info "TerraFirmaCompanion"
log info "Copyright Kepler Sticka-Jones 2014"
log info "k2b6s9j.com/TerraFirmaCompanion"
}
@Mod.EventHandler
def init(event: FMLInitializationEvent) = {
ModCompatibility.init()
}
@Mod.EventHandler
def postInit(event: FMLPostInitializationEvent) = {
ModCompatibility.postInit()
}
}
| kepler0/TerraFirmaCompanion | src/main/scala/com/k2b6s9j/terrafirmacompanion/TerraFirmaCompanion.scala | Scala | mit | 1,121 |
/*
* Scala (https://www.scala-lang.org)
*
* Copyright EPFL and Lightbend, Inc.
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package scala
package collection
package immutable
import scala.annotation.unchecked.uncheckedVariance
import scala.collection.mutable.Builder
import scala.language.higherKinds
/** An immutable map whose key-value pairs are sorted according to an [[scala.math.Ordering]] on the keys.
*
* Allows for range queries to be performed on its keys, and implementations must guarantee that traversal happens in
* sorted order, according to the map's [[scala.math.Ordering]].
*
* @example {{{
* import scala.collection.immutable.SortedMap
*
* // Make a SortedMap via the companion object factory
* val weekdays = SortedMap(
* 2 -> "Monday",
* 3 -> "Tuesday",
* 4 -> "Wednesday",
* 5 -> "Thursday",
* 6 -> "Friday"
* )
* // TreeMap(2 -> Monday, 3 -> Tuesday, 4 -> Wednesday, 5 -> Thursday, 6 -> Friday)
*
* val days = weekdays ++ List(1 -> "Sunday", 7 -> "Saturday")
* // TreeMap(1 -> Sunday, 2 -> Monday, 3 -> Tuesday, 4 -> Wednesday, 5 -> Thursday, 6 -> Friday, 7 -> Saturday)
*
* val day3 = days.get(3) // Some("Tuesday")
*
* val rangeOfDays = days.range(2, 5) // TreeMap(2 -> Monday, 3 -> Tuesday, 4 -> Wednesday)
*
* val daysUntil2 = days.rangeUntil(2) // TreeMap(1 -> Sunday)
* val daysTo2 = days.rangeTo(2) // TreeMap(1 -> Sunday, 2 -> Monday)
* val daysAfter5 = days.rangeFrom(5) // TreeMap(5 -> Thursday, 6 -> Friday, 7 -> Saturday)
* }}}
*
* @tparam K the type of the keys contained in this tree map.
* @tparam V the type of the values associated with the keys.
*/
trait SortedMap[K, +V]
extends Map[K, V]
with collection.SortedMap[K, V]
with SortedMapOps[K, V, SortedMap, SortedMap[K, V]]
with SortedMapFactoryDefaults[K, V, SortedMap, Iterable, Map] {
override def unsorted: Map[K, V] = this
override def sortedMapFactory: SortedMapFactory[SortedMap] = SortedMap
/** The same map with a given default function.
* Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc.
* are not affected by `withDefault`.
*
* Invoking transformer methods (e.g. `map`) will not preserve the default value.
*
* @param d the function mapping keys to values, used for non-present keys
* @return a wrapper of the map with a default value
*/
override def withDefault[V1 >: V](d: K => V1): SortedMap[K, V1] = new SortedMap.WithDefault[K, V1](this, d)
/** The same map with a given default value.
* Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc.
* are not affected by `withDefaultValue`.
*
* Invoking transformer methods (e.g. `map`) will not preserve the default value.
*
* @param d default value used for non-present keys
* @return a wrapper of the map with a default value
*/
override def withDefaultValue[V1 >: V](d: V1): SortedMap[K, V1] = new SortedMap.WithDefault[K, V1](this, _ => d)
}
trait SortedMapOps[K, +V, +CC[X, +Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], +C <: SortedMapOps[K, V, CC, C]]
extends MapOps[K, V, Map, C] with collection.SortedMapOps[K, V, CC, C] { self =>
protected def coll: C with CC[K, V]
def unsorted: Map[K, V]
override def keySet: SortedSet[K] = new ImmutableKeySortedSet
/** The implementation class of the set returned by `keySet` */
protected class ImmutableKeySortedSet extends AbstractSet[K] with SortedSet[K] with GenKeySet with GenKeySortedSet {
def rangeImpl(from: Option[K], until: Option[K]): SortedSet[K] = {
val map = self.rangeImpl(from, until)
new map.ImmutableKeySortedSet
}
def incl(elem: K): SortedSet[K] = fromSpecific(this).incl(elem)
def excl(elem: K): SortedSet[K] = fromSpecific(this).excl(elem)
}
// We override these methods to fix their return type (which would be `Map` otherwise)
def updated[V1 >: V](key: K, value: V1): CC[K, V1]
@`inline` final override def +[V1 >: V](kv: (K, V1)): CC[K, V1] = updated(kv._1, kv._2)
override def updatedWith[V1 >: V](key: K)(remappingFunction: Option[V] => Option[V1]): CC[K, V1] = {
// Implementation has been copied from `MapOps`
val previousValue = this.get(key)
val nextValue = remappingFunction(previousValue)
(previousValue, nextValue) match {
case (None, None) => coll
case (Some(_), None) => this.removed(key).coll
case (_, Some(v)) => this.updated(key, v)
}
}
override def transform[W](f: (K, V) => W): CC[K, W] = map({ case (k, v) => (k, f(k, v)) })(ordering)
}
trait StrictOptimizedSortedMapOps[K, +V, +CC[X, +Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], +C <: SortedMapOps[K, V, CC, C]]
extends SortedMapOps[K, V, CC, C]
with collection.StrictOptimizedSortedMapOps[K, V, CC, C]
with StrictOptimizedMapOps[K, V, Map, C] {
override def concat[V2 >: V](xs: collection.IterableOnce[(K, V2)]): CC[K, V2] = {
var result: CC[K, V2] = coll
val it = xs.iterator
while (it.hasNext) result = result + it.next()
result
}
}
@SerialVersionUID(3L)
object SortedMap extends SortedMapFactory.Delegate[SortedMap](TreeMap) {
override def from[K: Ordering, V](it: IterableOnce[(K, V)]): SortedMap[K, V] = it match {
case sm: SortedMap[K, V] if Ordering[K] == sm.ordering => sm
case _ => super.from(it)
}
final class WithDefault[K, +V](underlying: SortedMap[K, V], defaultValue: K => V)
extends Map.WithDefault[K, V](underlying, defaultValue)
with SortedMap[K, V]
with SortedMapOps[K, V, SortedMap, WithDefault[K, V]] with Serializable {
implicit def ordering: Ordering[K] = underlying.ordering
override def sortedMapFactory: SortedMapFactory[SortedMap] = underlying.sortedMapFactory
def iteratorFrom(start: K): scala.collection.Iterator[(K, V)] = underlying.iteratorFrom(start)
def keysIteratorFrom(start: K): scala.collection.Iterator[K] = underlying.keysIteratorFrom(start)
def rangeImpl(from: Option[K], until: Option[K]): WithDefault[K, V] =
new WithDefault[K, V](underlying.rangeImpl(from, until), defaultValue)
// Need to override following methods to match type signatures of `SortedMap.WithDefault`
// for operations preserving default value
override def updated[V1 >: V](key: K, value: V1): WithDefault[K, V1] =
new WithDefault[K, V1](underlying.updated(key, value), defaultValue)
override def concat [V2 >: V](xs: collection.IterableOnce[(K, V2)]): WithDefault[K, V2] =
new WithDefault( underlying.concat(xs) , defaultValue)
override def removed(key: K): WithDefault[K, V] = new WithDefault[K, V](underlying.removed(key), defaultValue)
override def empty: WithDefault[K, V] = new WithDefault[K, V](underlying.empty, defaultValue)
override protected def fromSpecific(coll: scala.collection.IterableOnce[(K, V)] @uncheckedVariance): WithDefault[K, V] =
new WithDefault[K, V](sortedMapFactory.from(coll), defaultValue)
override protected def newSpecificBuilder: Builder[(K, V), WithDefault[K, V]] @uncheckedVariance =
SortedMap.newBuilder.mapResult((p: SortedMap[K, V]) => new WithDefault[K, V](p, defaultValue))
}
}
| martijnhoekstra/scala | src/library/scala/collection/immutable/SortedMap.scala | Scala | apache-2.0 | 7,481 |
/*
* Copyright © 2015 Reactific Software LLC. All Rights Reserved.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package rxmongo.bson
import java.util.Date
import java.util.regex.Pattern
import com.reactific.hsp.Profiler
import org.specs2.mutable.Specification
import rxmongo.bson.BinarySubtype._
/** Test Suite For BSON object */
class BSONSpec extends Specification with ByteStringTestUtils {
sequential
"BinarySubtype" should {
"yield correct values for objects" in {
GenericBinary.code must beEqualTo(0)
FunctionBinary.code must beEqualTo(1)
DeprecatedGenericBinary.code must beEqualTo(2)
DeprecatedUUIDBinary.code must beEqualTo(3)
UUIDBinary.code must beEqualTo(4)
MD5SumBinary.code must beEqualTo(5)
UserDefinedBinary.code must beEqualTo(-128)
}
"apply values correctly" in {
BinarySubtype(0) must beEqualTo(GenericBinary)
BinarySubtype(1) must beEqualTo(FunctionBinary)
BinarySubtype(2) must beEqualTo(DeprecatedGenericBinary)
BinarySubtype(3) must beEqualTo(DeprecatedUUIDBinary)
BinarySubtype(4) must beEqualTo(UUIDBinary)
BinarySubtype(5) must beEqualTo(MD5SumBinary)
BinarySubtype(6) must throwA[NoSuchElementException]
BinarySubtype(-128) must beEqualTo(UserDefinedBinary)
}
}
"TypeCode" should {
"yield correct values for objects" in {
NotACode.code must beEqualTo(0)
DoubleCode.code must beEqualTo(1)
StringCode.code must beEqualTo(2)
ObjectCode.code must beEqualTo(3)
ArrayCode.code must beEqualTo(4)
BinaryCode.code must beEqualTo(5)
UndefinedCode.code must beEqualTo(6)
ObjectIDCode.code must beEqualTo(7)
BooleanCode.code must beEqualTo(8)
DateCode.code must beEqualTo(9)
NullCode.code must beEqualTo(10)
RegexCode.code must beEqualTo(11)
DBPointerCode.code must beEqualTo(12)
JavaScriptCode.code must beEqualTo(13)
SymbolCode.code must beEqualTo(14)
ScopedJSCode.code must beEqualTo(15)
IntegerCode.code must beEqualTo(16)
TimestampCode.code must beEqualTo(17)
LongCode.code must beEqualTo(18)
MinKey.code must beEqualTo(-1)
MaxKey.code must beEqualTo(127)
}
"apply values correctly" in {
TypeCode(1.toByte) must beEqualTo(DoubleCode)
TypeCode(2.toByte) must beEqualTo(StringCode)
TypeCode(3.toByte) must beEqualTo(ObjectCode)
TypeCode(4.toByte) must beEqualTo(ArrayCode)
TypeCode(5.toByte) must beEqualTo(BinaryCode)
TypeCode(6.toByte) must beEqualTo(UndefinedCode)
TypeCode(7.toByte) must beEqualTo(ObjectIDCode)
TypeCode(8.toByte) must beEqualTo(BooleanCode)
TypeCode(9.toByte) must beEqualTo(DateCode)
TypeCode(10.toByte) must beEqualTo(NullCode)
TypeCode(11.toByte) must beEqualTo(RegexCode)
TypeCode(12.toByte) must beEqualTo(DBPointerCode)
TypeCode(13.toByte) must beEqualTo(JavaScriptCode)
TypeCode(14.toByte) must beEqualTo(SymbolCode)
TypeCode(15.toByte) must beEqualTo(ScopedJSCode)
TypeCode(16.toByte) must beEqualTo(IntegerCode)
TypeCode(17.toByte) must beEqualTo(TimestampCode)
TypeCode(18.toByte) must beEqualTo(LongCode)
TypeCode(-1.toByte) must beEqualTo(MinKey)
TypeCode(127.toByte) must beEqualTo(MaxKey)
TypeCode(19.toByte) must throwA[NoSuchElementException]
TypeCode(0.toByte) must throwA[NoSuchElementException]
}
}
val ALL_FLAGS : Int = Pattern.CASE_INSENSITIVE |
Pattern.MULTILINE |
Pattern.DOTALL |
Pattern.UNICODE_CHARACTER_CLASS |
Pattern.UNICODE_CASE |
Pattern.COMMENTS
"BSON" should {
"build and interpret reflectively" in {
val startime = System.nanoTime()
val bsonObject = makeObject()
val endtime = System.nanoTime
val constructiontime = endtime - startime
if (suitableForTimingTests)
constructiontime must beLessThan(200000000L)
val double = bsonObject.get("double")
val string = bsonObject.get("string")
val obj = bsonObject.get("obj")
val array = bsonObject.get("array")
val binary = bsonObject.get("binary")
val undefined = bsonObject.get("undefined")
val objectid = bsonObject.get("objectid")
val boolean = bsonObject.get("boolean")
val date = bsonObject.get("date")
val nil = bsonObject.get("null")
val regex = bsonObject.get("regex")
val dbpointer = bsonObject.get("dbpointer")
val jscode = bsonObject.get("jscode")
val symbol = bsonObject.get("symbol")
val scopedJsCode = bsonObject.get("scopedjscode")
val integer = bsonObject.get("integer")
val timestamp = bsonObject.get("timestamp")
val long = bsonObject.get("long")
double.isDefined must beTrue
string.isDefined must beTrue
obj.isDefined must beTrue
array.isDefined must beTrue
binary.isDefined must beTrue
undefined.isDefined must beTrue
objectid.isDefined must beTrue
boolean.isDefined must beTrue
date.isDefined must beTrue
nil.isDefined must beTrue
regex.isDefined must beTrue
dbpointer.isDefined must beTrue
jscode.isDefined must beTrue
symbol.isDefined must beTrue
scopedJsCode.isDefined must beTrue
integer.isDefined must beTrue
timestamp.isDefined must beTrue
long.isDefined must beTrue
double.get.value must beEqualTo(42.0D)
string.get.value must beEqualTo("fourty-two")
obj.get.value.asInstanceOf[Map[String, BSONValue]] must beEqualTo(
Map("one" -> BSONDouble(84.0D), "two" -> BSONString("eighty-four")))
array.get.asInstanceOf[BSONArray].seq must beEqualTo(
Seq(BSONDouble(42.0D), BSONDouble(84.0D)))
val pair = binary.get.value.asInstanceOf[(BinarySubtype, Array[Byte])]
pair._1 must beEqualTo(UserDefinedBinary)
pair._2 must beEqualTo(data)
undefined.get.value.asInstanceOf[Unit] must beEqualTo({})
objectid.get.value.asInstanceOf[Array[Byte]] must beEqualTo(data)
boolean.get.value.asInstanceOf[Boolean] must beEqualTo(true)
date.get.value.asInstanceOf[Date].getTime must beLessThan(System.currentTimeMillis)
nil.get.value.asInstanceOf[Unit] must beEqualTo({})
val r = regex.get.value.asInstanceOf[Pattern]
r.pattern must beEqualTo("pattern")
r.flags() must beEqualTo(ALL_FLAGS)
val (referent, objid) = dbpointer.get.value.asInstanceOf[(String, Array[Byte])]
referent must beEqualTo("referent")
objid must beEqualTo(data)
jscode.get.value.asInstanceOf[String] must beEqualTo("function(x) { return x + 1; };")
symbol.get.value.asInstanceOf[String] must beEqualTo("symbol")
val (code, scope) = scopedJsCode.get.value.asInstanceOf[(String, BSONObject)]
code must beEqualTo("function(x)")
scope must beEqualTo(anObject)
integer.get.value.asInstanceOf[Int] must beEqualTo(42)
timestamp.get.value.asInstanceOf[Long] must beEqualTo(42L)
long.get.value.asInstanceOf[Long] must beEqualTo(42L)
}
}
"BSONBuilder" should {
"warm up the JIT compiler" in {
val warmpup = makeObject(2, 10)
success
}
"build a tree of 2^12 (8,192) objects of 18 fields, quickly" in {
timedAndCountedTests("TreeTop",
Map("makeAnObject" -> (8191L,75000.0*8191), "TreeTop" -> (1L, 6500000000.0) )) { profiler =>
makeObject(2, 12, profiler)
}
success
}
"build and compact 100,000 objects of 18 fields, quickly" in {
timedAndCountedTests("ListTop", Map("makeAnObject" -> (100000L, 100000*20000.0 ))) { profiler =>
makeAnObject(profiler)
}
success
}
}
"BSONObject" should {
"construct from a variety of Any values" in {
val date = new Date()
val regex = Pattern.compile("pattern", ALL_FLAGS)
val map = Map("foo" -> 84, "bar" -> true, "roo" -> "fourty-two")
val b = BSONObject(
"double" -> 42.0D,
"string" -> "fourty-two",
"obj" -> anObject,
"array" -> anArray,
"map" -> map,
"binary" -> data,
"null" → BSONNull,
"undefined" -> BSONUndefined,
"boolean" -> true,
"date" -> date,
"regex" -> regex,
"integer" -> 42,
"long" -> 42L
)
b.get("double") must beEqualTo(Some(BSONDouble(42.0D)))
b.get("string") must beEqualTo(Some(BSONString("fourty-two")))
b.get("obj") must beEqualTo(Some(anObject))
b.get("array") must beEqualTo(Some(anArrayBSON))
b.get("map") must beEqualTo(Some(BSONObject(map)))
b.get("binary") must beEqualTo(Some(BSONBinary(data, UserDefinedBinary)))
b.get("null") must beEqualTo(Some(BSONNull))
b.get("undefined") must beEqualTo(Some(BSONUndefined))
b.get("boolean") must beEqualTo(Some(BSONBoolean(value = true)))
b.get("date") must beEqualTo(Some(BSONDate(date)))
b.get("regex") must beEqualTo(Some(BSONRegex(regex)))
b.get("integer") must beEqualTo(Some(BSONInteger(42)))
b.get("long") must beEqualTo(Some(BSONLong(42L)))
}
}
}
| reactific/RxMongo | bson/src/test/scala/rxmongo/bson/BSONSpec.scala | Scala | mit | 10,175 |
/**
* Copyright 2015 Gianluca Amato <[email protected]>
*
* This file is part of JANDOM: JVM-based Analyzer for Numerical DOMains
* JANDOM is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* JANDOM is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty ofa
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with JANDOM. If not, see <http://www.gnu.org/licenses/>.
*/
package it.unich.jandom.fixpoint
import org.scalatest.FunSpec
import org.scalatest.prop.PropertyChecks
import org.scalacheck.Gen
import org.scalacheck.Arbitrary
import scala.collection.immutable.HashMap
/**
* Test solvers for finite equation systems.
*/
class FiniteEquationSystemTest extends FunSpec with PropertyChecks {
import IterativeStrategy._
object simpleEqs extends FiniteEquationSystem {
type Unknown = Int
type Value = Double
def apply(rho: Assignment): Assignment = {
case 0 => rho(0)
case 1 => (rho(0) max rho(2)) min rho(3)
case 2 => rho(1) + 1
case 3 => rho(3)
}
val unknowns = Seq(0, 1, 2, 3)
def infl(x: Int) = x match {
case 0 => Seq(0, 1, 2)
case 1 => Seq(2)
case 2 => Seq(1)
case 3 => Seq(1, 3)
}
}
val simpleEqsStrategy = IterativeStrategy[Int](Left, El(0), Left, El(1), El(2), El(3), Right, Right)
val wideningBox: simpleEqs.Box = { (x1, x2) => if (x2 > x1) Double.PositiveInfinity else x1 }
val maxBox: simpleEqs.Box = { _ max _ }
val lastBox: simpleEqs.Box = { (x1, x2) => x2 }
val allMax: Int => simpleEqs.Box = { _ => maxBox }
val allWiden: Int => simpleEqs.Box = { _ => wideningBox }
val allLast: Int => simpleEqs.Box = { _ => lastBox }
val startRho: Int => Double = { (x: Int) => if (x == 3) 10.0 else 0.0 }
/**
* Tests whether solving `eqs` equation system always returns a correct result. Should be used only for solvers which are
* guaranteed to terminate with the given box assignment.
*/
def testCorrectness(solver: FixpointSolver[_ <: FiniteEquationSystem])(boxes: solver.eqs.Unknown => solver.eqs.Box)(implicit values: Arbitrary[solver.eqs.Value]) = {
val startRhosList = Gen.listOfN(solver.eqs.unknowns.size, values.arbitrary)
val startRhos = startRhosList map { (l) => HashMap(solver.eqs.unknowns zip l: _*) }
it("always returns a box solution") {
forAll(startRhos) { startEnv =>
val finalEnv = solver(startEnv, boxes)
for (x <- solver.eqs.unknowns)
assert(finalEnv(x) === boxes(x)(finalEnv(x), solver.eqs(finalEnv)(x)))
}
}
}
/**
* Test solvers for the `simpleEqs` equation system when starting from the initial
* assignment `startRho`.
*/
def testExpectedResult(solver: FixpointSolver[simpleEqs.type]) = {
describe(s"${solver.name} with last") {
it("gives the expected result starting from startRho") {
val finalRho = solver(startRho, allLast)
assert(finalRho(0) === 0.0)
assert(finalRho(1) === 10.0)
assert(finalRho(2) === 11.0)
assert(finalRho(3) === 10.0)
}
}
describe(s"${solver.name} with max") {
it("gives the expected result starting from startRho") {
val finalRho = solver(startRho, allMax)
assert(finalRho(0) === 0.0)
assert(finalRho(1) === 10.0)
assert(finalRho(2) === 11.0)
assert(finalRho(3) === 10.0)
}
}
describe(s"${solver.name} with widening") {
it("gives the expected result starting from startRho") {
val finalRho = solver(startRho, allWiden)
assert(finalRho(0) === 0.0)
assert(finalRho(1) === Double.PositiveInfinity)
assert(finalRho(2) === Double.PositiveInfinity)
assert(finalRho(3) === 10.0)
}
}
describe(s"${solver.name} with widening") {
testCorrectness(solver)(allWiden)
}
}
testExpectedResult(RoundRobinSolver(simpleEqs))
testExpectedResult(WorkListSolver(simpleEqs))
testExpectedResult(IterativeStrategySolver(simpleEqs)(simpleEqsStrategy))
}
| francescaScozzari/Jandom | core/src/test/scala/it/unich/jandom/fixpoint/FiniteEquationSystemTest.scala | Scala | lgpl-3.0 | 4,372 |
/*
* Copyright 2009-2010 WorldWide Conferencing, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.liftweb {
package builtin {
package snippet {
import _root_.scala.xml._
import _root_.net.liftweb.http._
object Tail extends DispatchSnippet {
def dispatch: DispatchIt = {
case _ => render _
}
def render(xhtml: NodeSeq) : NodeSeq = <tail>xhtml</tail>
}
}
}
}
| jeppenejsum/liftweb | framework/lift-base/lift-webkit/src/main/scala/net/liftweb/builtin/snippet/Tail.scala | Scala | apache-2.0 | 908 |
package dotty.partest
import dotty.tools.dotc.reporting.ConsoleReporter
import scala.tools.partest.{ TestState, nest }
import java.io.{ File, PrintWriter, FileWriter }
/* NOTE: Adapted from partest.DirectCompiler and DottyTest */
class DPDirectCompiler(runner: DPTestRunner) extends nest.DirectCompiler(runner) {
override def compile(opts0: List[String], sources: List[File]): TestState = {
val clogFWriter = new FileWriter(runner.cLogFile.jfile, true)
val clogWriter = new PrintWriter(clogFWriter, true)
clogWriter.println("\\ncompiling " + sources.mkString(" ") + "\\noptions: " + opts0.mkString(" "))
implicit var ctx: dotty.tools.dotc.core.Contexts.Context = {
val base = new dotty.tools.dotc.core.Contexts.ContextBase
import base.settings._
val ctx = base.initialCtx.fresh.setSetting(printtypes, true)
.setSetting(pageWidth, 90).setSetting(log, List("<some"))
base.definitions.init(ctx)
ctx
}
try {
val processor = if (opts0.exists(_.startsWith("#"))) dotty.tools.dotc.Bench else dotty.tools.dotc.Main
val clogger = new ConsoleReporter(writer = clogWriter)(ctx)
val reporter = processor.process((sources.map(_.toString) ::: opts0).toArray, ctx, Some(clogger))
if (!reporter.hasErrors) runner.genPass()
else {
reporter.printSummary(ctx)
runner.genFail(s"compilation failed with ${reporter.errorCount} errors")
}
} catch {
case t: Throwable => runner.genCrash(t)
} finally {
clogFWriter.close
clogWriter.close
}
}
}
| spetz911/dotty | test/dotty/partest/DPDirectCompiler.scala | Scala | bsd-3-clause | 1,571 |
package se.betygen.update.config
import akka.actor.ActorContext
import akka.util.Timeout
import se.betygen.update.config.DefaultConfig.Actors.defaultTimeout
import scala.concurrent.ExecutionContext
import scala.language.postfixOps
trait ActorDefaults {
val context: ActorContext
implicit val timeout = Timeout(defaultTimeout)
implicit val executionContext: ExecutionContext =
context.system.dispatchers.defaultGlobalDispatcher
} | betygen/update | src/main/scala/se/betygen/update/config/ActorDefaults.scala | Scala | mit | 441 |
/**
* Copyright (C) 2015 Stratio (http://stratio.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.crossdata.execution
import org.apache.spark.sql.catalyst.expressions.Expression
import org.apache.spark.sql.catalyst.expressions.Unevaluable
import org.apache.spark.sql.catalyst.expressions.Attribute
import org.apache.spark.sql.catalyst.expressions.AttributeSet
import org.apache.spark.sql.catalyst.expressions.AttributeReference
import org.apache.spark.sql.catalyst.plans.logical
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.catalyst.rules.Rule
import org.apache.spark.sql.types.DataType
case class NativeUDF(name: String,
dataType: DataType,
children: Seq[Expression]) extends Expression with Unevaluable {
override def toString: String = s"NativeUDF#$name(${children.mkString(",")})"
override def nullable: Boolean = true
}
case class EvaluateNativeUDF(udf: NativeUDF,
child: LogicalPlan,
resultAttribute: Attribute) extends logical.UnaryNode {
def output: Seq[Attribute] = child.output :+ resultAttribute
// References should not include the produced attribute.
override def references: AttributeSet = udf.references
}
object EvaluateNativeUDF {
def apply(udf: NativeUDF, child: LogicalPlan): EvaluateNativeUDF =
new EvaluateNativeUDF(udf, child, AttributeReference(udf.name, udf.dataType, false)())
}
// case class NativeUDFEvaluation(udf: NativeUDF, output: Seq[Attribute], child: SparkPlan) extends SparkPlan
/*
*
* Analysis rule to replace resolved NativeUDFs by their evaluations as filters LogicalPlans
* These evaluations contain the information needed to refer the UDF in the native connector
* query generator.
*
*/
object ExtractNativeUDFs extends Rule[LogicalPlan] {
override def apply(plan: LogicalPlan): LogicalPlan = plan resolveOperators {
case plan: EvaluateNativeUDF => plan
case plan: LogicalPlan =>
plan.expressions.
flatMap(_.collect {case udf: NativeUDF => udf} ).
find(_.resolved).
map { case udf =>
var evaluation: EvaluateNativeUDF = null
val newChildren = plan.children flatMap { child =>
// Check to make sure that the UDF can be evaluated with only the input of this child.
// Other cases are disallowed as they are ambiguous or would require a cartesian
// product.
if (udf.references.subsetOf(child.outputSet)) {
evaluation = EvaluateNativeUDF(udf, child)
evaluation::Nil
} else if (udf.references.intersect(child.outputSet).nonEmpty) {
sys.error(s"Invalid NativeUDF $udf, requires attributes from more than one child.")
} else {
child::Nil
}
}
assert(evaluation != null, "Unable to evaluate NativeUDF. Missing input attributes.")
logical.Project(
plan.output, //plan.withNewChildren(newChildren)
plan.transformExpressions {
case u: NativeUDF if(u.fastEquals(udf)) => evaluation.resultAttribute
}.withNewChildren(newChildren)
)
} getOrElse plan
}
} | luismcl/crossdata | core/src/main/scala/org/apache/spark/sql/crossdata/execution/nativeudfs.scala | Scala | apache-2.0 | 3,862 |
/*
* Copyright 2001-2015 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import sbt.IO
import scala.io.Source
import java.io.{File, FileWriter, BufferedWriter}
object GenScalaTestDotty {
private def uncommentJsExport(line: String): String =
if (line.trim.startsWith("//DOTTY-ONLY "))
line.substring(line.indexOf("//DOTTY-ONLY ") + 13)
else if (line.trim.startsWith("//DOTTY-ONLY "))
line.substring(line.indexOf("//DOTTY-ONLY ") + 13)
else
line
private def transformLine(line: String): String =
uncommentJsExport(line)
private def copyFile(sourceFile: File, destFile: File): File = {
val destWriter = new BufferedWriter(new FileWriter(destFile))
try {
val lines = Source.fromFile(sourceFile).getLines.toList
var skipMode = false
for (line <- lines) {
if (line.trim == "// SKIP-DOTTY-START" || line.trim == "// SKIP-DOTTY-START")
skipMode = true
else if (line.trim == "// SKIP-DOTTY-END" || line.trim == "// SKIP-DOTTY-END")
skipMode = false
else if (!skipMode) {
destWriter.write(transformLine(line))
destWriter.newLine()
}
}
destFile
}
finally {
destWriter.flush()
destWriter.close()
println("Copied " + destFile.getAbsolutePath)
}
}
private def uncommentJsExportJS(line: String): String =
if (line.trim.startsWith("//DOTTY-ONLY "))
line.substring(line.indexOf("//DOTTY-ONLY ") + 13)
else if (line.trim.startsWith("//DOTTY-ONLY "))
line.substring(line.indexOf("//DOTTY-ONLY ") + 13)
else if (line.trim.startsWith("//SCALATESTJS,NATIVE-ONLY "))
line.substring(line.indexOf("//SCALATESTJS,NATIVE-ONLY ") + 26)
else if (line.trim.startsWith("//SCALATESTJS-ONLY "))
line.substring(line.indexOf("//SCALATESTJS-ONLY ") + 19)
else
line
private def transformLineJS(line: String): String =
uncommentJsExportJS(line)
private def copyFileJS(sourceFile: File, destFile: File): File = {
val destWriter = new BufferedWriter(new FileWriter(destFile))
try {
val lines = Source.fromFile(sourceFile).getLines.toList
var skipDottyMode = false
var skipJSMode = false
for (line <- lines) {
if (!skipJSMode && line.trim == "// SKIP-DOTTY-START")
skipDottyMode = true
else if (!skipJSMode && line.trim == "// SKIP-DOTTY-END")
skipDottyMode = false
else if (!skipDottyMode && (line.trim == "// SKIP-SCALATESTJS,NATIVE-START" || line.trim == "// SKIP-SCALATESTJS-START"))
skipJSMode = true
else if (!skipDottyMode && (line.trim == "// SKIP-SCALATESTJS,NATIVE-END" || line.trim == "// SKIP-SCALATESTJS-END"))
skipJSMode = false
else if (!skipDottyMode && !skipJSMode) {
destWriter.write(transformLineJS(line))
destWriter.newLine()
}
}
destFile
}
finally {
destWriter.flush()
destWriter.close()
println("Copied " + destFile.getAbsolutePath)
}
}
def copyFiles(sourceDirName: String, packageDirName: String, targetDir: File, files: List[String]): Seq[File] = {
val packageDir = new File(targetDir, packageDirName)
packageDir.mkdirs()
val sourceDir = new File(sourceDirName)
files.map { sourceFileName =>
val sourceFile = new File(sourceDir, sourceFileName)
val destFile = new File(packageDir, sourceFile.getName)
if (!destFile.exists || sourceFile.lastModified > destFile.lastModified)
copyFile(sourceFile, destFile)
destFile
}
}
def copyStartsWithFiles(sourceDirName: String, packageDirName: String, startsWith: String, targetDir: File): Seq[File] = {
val packageDir = new File(targetDir, packageDirName)
packageDir.mkdirs()
val sourceDir = new File(sourceDirName)
sourceDir.listFiles.toList.filter(f => f.isFile && f.getName.startsWith(startsWith) && f.getName.endsWith(".scala")).map { sourceFile =>
val destFile = new File(packageDir, sourceFile.getName)
if (!destFile.exists || sourceFile.lastModified > destFile.lastModified)
copyFile(sourceFile, destFile)
destFile
}
}
def copyDir(sourceDirName: String, packageDirName: String, targetDir: File, skipList: List[String]): Seq[File] = {
val packageDir = new File(targetDir, packageDirName)
packageDir.mkdirs()
val sourceDir = new File(sourceDirName)
sourceDir.listFiles.toList.filter(f => f.isFile && !skipList.contains(f.getName) && (f.getName.endsWith(".scala") || f.getName.endsWith(".java"))).map { sourceFile =>
val destFile = new File(packageDir, sourceFile.getName)
if (!destFile.exists || sourceFile.lastModified > destFile.lastModified)
copyFile(sourceFile, destFile)
destFile
}
}
def copyDirJS(sourceDirName: String, packageDirName: String, targetDir: File, skipList: List[String]): Seq[File] = {
val packageDir = new File(targetDir, packageDirName)
packageDir.mkdirs()
val sourceDir = new File(sourceDirName)
sourceDir.listFiles.toList.filter(f => f.isFile && !skipList.contains(f.getName) && (f.getName.endsWith(".scala") || f.getName.endsWith(".java"))).map { sourceFile =>
val destFile = new File(packageDir, sourceFile.getName)
if (!destFile.exists || sourceFile.lastModified > destFile.lastModified)
copyFileJS(sourceFile, destFile)
destFile
}
}
def copyResourceDir(sourceDirName: String, packageDirName: String, targetDir: File, skipList: List[String]): Seq[File] = {
val packageDir = new File(targetDir, packageDirName)
packageDir.mkdirs()
val sourceDir = new File(sourceDirName)
sourceDir.listFiles.toList.filter(f => f.isFile && !skipList.contains(f.getName)).map { sourceFile =>
val destFile = new File(packageDir, sourceFile.getName)
if (!destFile.exists || sourceFile.lastModified > destFile.lastModified)
IO.copyFile(sourceFile, destFile)
destFile
}
}
def genJava(targetDir: File, version: String, scalaVersion: String): Seq[File] = {
copyFiles("jvm/core/src/main/java/org/scalatest", "org/scalatest", targetDir,
List(
"Finders.java",
"TagAnnotation.java",
"WrapWith.java",
"DoNotDiscover.java",
"Ignore.java"
)) ++
copyDir("jvm/core/src/main/java/org/scalatest/tags", "org/scalatest/tags", targetDir, List.empty)
}
def genHtml(targetDir: File, version: String, scalaVersion: String): Seq[File] = {
copyResourceDir("scalatest-doc/src/main/html", "html", targetDir, List.empty) ++
copyResourceDir("jvm/core/src/main/resources/images", "images", targetDir, List.empty) ++
copyResourceDir("jvm/core/src/main/resources/org/scalatest", "org/scalatest", targetDir, List.empty)
}
val genScalaPackages: Map[String, List[String]] =
Map(
"org/scalatest" -> List(
"Assertions.scala", // Re-implemented
"AssertionsMacro.scala", // Re-implemented
"CompileMacro.scala", // Re-implemented
"DiagrammedAssertions.scala", // Re-implemented
"DiagrammedAssertionsMacro.scala", // Re-implemented
"DiagrammedExprMacro.scala", // Re-implemented
"DiagrammedExpr.scala", // Re-implemented
"Expectations.scala", // Re-implemented
"ExpectationsMacro.scala", // Re-implemented
"Inspectors.scala", // Re-implemented without path-dependent type
),
"org/scalatest/concurrent" -> List.empty,
"org/scalatest/diagrams" -> List(
"Diagrams.scala",
"DiagramsMacro.scala",
"DiagrammedExprMacro.scala", // Re-implemented
"DiagrammedExpr.scala", // Re-implemented
),
"org/scalatest/exceptions" -> List.empty,
"org/scalatest/enablers" -> List(
"InspectorAsserting.scala" // Re-implemented without path-dependent type
),
"org/scalatest/events" -> List.empty,
"org/scalatest/fixture" -> List.empty,
"org/scalatest/featurespec" -> List.empty,
"org/scalatest/funspec" -> List.empty,
"org/scalatest/funsuite" -> List.empty,
"org/scalatest/freespec" -> List.empty,
"org/scalatest/flatspec" -> List.empty,
"org/scalatest/matchers" -> List(
"Matcher.scala", // Re-implemented with new macro
"MatchPatternMacro.scala", // Re-implemented with new macro
"TypeMatcherMacro.scala" // Re-implemented with new macro
),
"org/scalatest/matchers/dsl" -> List(
"BeWord.scala",
"JavaCollectionWrapper.scala",
"JavaMapWrapper.scala",
"MatchPatternWord.scala",
"NotWord.scala",
"ResultOfNotWordForAny.scala"
),
"org/scalatest/expectations" -> List.empty,
"org/scalatest/matchers/should" -> List.empty,
"org/scalatest/path" -> List.empty,
"org/scalatest/prop" -> List.empty,
"org/scalatest/propspec" -> List.empty,
"org/scalatest/tagobjects" -> List.empty,
"org/scalatest/time" -> List.empty,
"org/scalatest/verbs" -> List.empty,
"org/scalatest/tools" -> List.empty,
"org/scalatest/refspec" -> List.empty,
"org/scalatest/words" -> List.empty,
"org/scalatest/wordspec" -> List.empty
)
val genScalaPackagesJS: Map[String, List[String]] =
Map(
"org/scalatest" -> List(
"Assertions.scala", // Re-implemented
"AssertionsMacro.scala", // Re-implemented
"CompileMacro.scala", // Re-implemented
"DiagrammedAssertions.scala", // Re-implemented
"DiagrammedAssertionsMacro.scala", // Re-implemented
"DiagrammedExprMacro.scala", // Re-implemented
"DiagrammedExpr.scala", // Re-implemented
"Expectations.scala", // Re-implemented
"ExpectationsMacro.scala", // Re-implemented
"Inspectors.scala", // Re-implemented without path-dependent type
"Shell.scala", // Not supported on scala-js
"run.scala", // Not supported on scala-js
"SuiteRerunner.scala", // Not supported on scala-js
"JavaClassesWrappers.scala", // Re-implemented in scala-js
"DispatchReporter.scala" // Not supported on scala-js
),
"org/scalatest/concurrent" -> List(
"SleepHelper.scala"
),
"org/scalatest/diagrams" -> List(
"Diagrams.scala",
"DiagramsMacro.scala"
),
"org/scalatest/exceptions" -> List.empty,
"org/scalatest/enablers" -> List(
"InspectorAsserting.scala" // Re-implemented without path-dependent type
),
"org/scalatest/events" -> List.empty,
"org/scalatest/fixture" -> List.empty,
"org/scalatest/featurespec" -> List.empty,
"org/scalatest/funspec" -> List.empty,
"org/scalatest/funsuite" -> List.empty,
"org/scalatest/freespec" -> List.empty,
"org/scalatest/flatspec" -> List.empty,
"org/scalatest/matchers" -> List(
"Matcher.scala", // Re-implemented with new macro
"MatchPatternMacro.scala", // Re-implemented with new macro
"TypeMatcherMacro.scala" // Re-implemented with new macro
),
"org/scalatest/matchers/dsl" -> List(
"BeWord.scala",
"JavaCollectionWrapper.scala",
"JavaMapWrapper.scala",
"MatchPatternWord.scala",
"NotWord.scala",
"ResultOfNotWordForAny.scala"
),
"org/scalatest/expectations" -> List.empty,
"org/scalatest/matchers/should" -> List.empty,
"org/scalatest/path" -> List.empty,
"org/scalatest/prop" -> List.empty,
"org/scalatest/propspec" -> List.empty,
"org/scalatest/tagobjects" -> List.empty,
"org/scalatest/time" -> List.empty,
"org/scalatest/verbs" -> List.empty,
"org/scalatest/tools" -> List.empty,
"org/scalatest/refspec" -> List.empty,
"org/scalatest/words" -> List.empty,
"org/scalatest/wordspec" -> List.empty
)
/*def genScala(targetDir: File, version: String, scalaVersion: String): Seq[File] =
genScalaPackages.filter(_._1 != "org/scalatest/tools").flatMap { case (packagePath, skipList) =>
copyDir("scalatest/src/main/scala/org/scalatest/tools" + packagePath, packagePath, targetDir, skipList)
}.toList*/
def genScalaJS(targetDir: File, version: String, scalaVersion: String): Seq[File] =
copyDir("dotty/core/src/main/scala/org/scalatest", "org/scalatest", targetDir, List.empty) ++
copyDir("dotty/core/src/main/scala/org/scalatest/enablers", "org/scalatest/enablers", targetDir, List.empty) ++
copyDir("dotty/core/src/main/scala/org/scalatest/expectations", "org/scalatest/expectations", targetDir, List.empty) ++
copyDir("js/core/src/main/scala/org/scalatest/compatible", "org/scalatest/compatible", targetDir, List.empty) ++
copyDir("js/core/src/main/scala/org/scalatest/concurrent", "org/scalatest/concurrent", targetDir, List.empty) ++
copyDir("js/core/src/main/scala/org/scalatest/tools", "org/scalatest/tools", targetDir, List.empty) ++
copyDir("js/core/src/main/scala/org/scalatest", "org/scalatest", targetDir, List.empty) ++
copyDirJS("jvm/core/src/main/scala/org/scalatest/tools", "org/scalatest/tools", targetDir,
List(
"AboutJDialog.scala",
//"AnsiColor.scala",
"AnsiReset.scala",
"ColorBar.scala",
"DashboardReporter.scala",
"DiscoverySuite.scala",
"Durations.scala",
"EventHolder.scala",
"EventToPresent.scala",
"EventHolderDefaultListModel.scala",
"EventHolderListCellRenderer.scala",
"FileReporter.scala",
"FilterReporter.scala",
"Framework.scala",
"FriendlyParamsTranslator.scala",
"HtmlReporter.scala",
"IconEmbellishedListCellRenderer.scala",
"JUnitXmlReporter.scala",
"Memento.scala",
"MemoryReporter.scala",
"NarrowJOptionPane.scala",
"NestedSuiteParam.scala",
//"ParsedArgs.scala",
"PrintReporter.scala",
"ProgressBarPanel.scala",
//"ReporterConfigParam.scala",
"ReporterConfiguration.scala",
"ReporterFactory.scala",
"RunDoneListener.scala",
"Runner.scala",
"RunnerGUI.scala",
"RunnerGUIState.scala",
"RunnerJFrame.scala",
"SbtCommandParser.scala",
"SbtDispatchReporter.scala",
"ScalaTestAntTask.scala",
"ScalaTestFramework.scala",
"SocketReporter.scala",
"StandardErrReporter.scala",
"StandardOutReporter.scala",
"StatusJPanel.scala",
"SuiteDiscoveryHelper.scala",
"SuiteParam.scala",
"SuiteResult.scala",
"SuiteResultHolder.scala",
//"SuiteRunner.scala",
"TestSpec.scala",
"XmlReporter.scala",
"XmlSocketReporter.scala"
))
def genMatchersCoreScalaJS(targetDir: File, version: String, scalaVersion: String): Seq[File] =
copyDirJS("dotty/matchers-core/src/main/scala/org/scalatest/matchers", "org/scalatest/matchers", targetDir, List.empty) ++
copyDirJS("dotty/matchers-core/src/main/scala/org/scalatest/matchers/dsl", "org/scalatest/matchers/dsl", targetDir, List.empty)
def genShouldMatchersScalaJS(targetDir: File, version: String, scalaVersion: String): Seq[File] =
copyDirJS("dotty/shouldmatchers/src/main/scala/org/scalatest/matchers/should", "org/scalatest/matchers/should", targetDir, List.empty)
def genMustMatchersScalaJS(targetDir: File, version: String, scalaVersion: String): Seq[File] =
copyDirJS("dotty/mustmatchers/src/main/scala/org/scalatest/matchers/must", "org/scalatest/matchers/must", targetDir, List.empty)
def genDiagramsScalaJS(targetDir: File, version: String, scalaVersion: String): Seq[File] =
copyDirJS("dotty/diagrams/src/main/scala/org/scalatest", "org/scalatest", targetDir, List.empty) ++
copyDirJS("dotty/diagrams/src/main/scala/org/scalatest/diagrams", "org/scalatest/diagrams", targetDir, List.empty)
def genTest(targetDir: File, version: String, scalaVersion: String): Seq[File] = {
copyDir("jvm/scalatest-test/src/test/scala/org/scalatest", "org/scalatest", targetDir,
List(
"SeveredStackTracesFailureSpec.scala", // skipped because tests failed
"SeveredStackTracesSpec.scala", // skipped because tests failed
)
) ++
copyDir("jvm/scalatest-test/src/test/scala/org/scalatest/expectations", "org/scalatest/expectations", targetDir, List.empty) ++
copyDir("jvm/scalatest-test/src/test/scala/org/scalatest/concurrent", "org/scalatest/concurrent", targetDir, List.empty) ++
copyDir("jvm/scalatest-test/src/test/scala/org/scalatest/enablers", "org/scalatest/enablers", targetDir, List.empty) ++
copyDir("jvm/scalatest-test/src/test/scala/org/scalatest/events/examples", "org/scalatest/events/examples", targetDir, List.empty) ++
copyDir("jvm/scalatest-test/src/test/scala/org/scalatest/events", "org/scalatest/events", targetDir, List.empty) ++
copyDir("jvm/scalatest-test/src/test/scala/org/scalatest/exceptions", "org/scalatest/exceptions", targetDir, List.empty) ++
copyDir("jvm/scalatest-test/src/test/scala/org/scalatest/fixture", "org/scalatest/fixture", targetDir,
List(
"NoArgSpec.scala", // skipped because tests failed.
)) ++
copyDir("jvm/scalatest-test/src/test/scala/org/scalatest/path", "org/scalatest/path", targetDir, List.empty) ++
copyDir("jvm/scalatest-test/src/test/scala/org/scalatest/prop", "org/scalatest/prop", targetDir, List.empty) ++
copyDir("jvm/scalatest-test/src/test/scala/org/scalatest/suiteprop", "org/scalatest/suiteprop", targetDir, List.empty) ++
copyDir("jvm/scalatest-test/src/test/scala/org/scalatest/matchers", "org/scalatest/matchers", targetDir, List.empty) ++
copyDir("jvm/scalatest-test/src/test/scala/org/scalatest/time", "org/scalatest/time", targetDir, List.empty) ++
copyDir("jvm/scalatest-test/src/test/scala/org/scalatest/tagobjects", "org/scalatest/tagobjects", targetDir, List.empty) ++
copyDir("jvm/scalatest-test/src/test/scala/org/scalatest/verbs", "org/scalatest/verbs", targetDir, List.empty) ++
copyDir("jvm/scalatest-test/src/test/scala/org/scalatest/tools", "org/scalatest/tools", targetDir, List.empty) ++
copyDir("jvm/scalatest-test/src/test/scala/org/scalatest/tools/scalasbt", "org/scalatest/tools/scalasbt", targetDir, List.empty)
}
def genTestJS(targetDir: File, version: String, scalaVersion: String): Seq[File] = {
copyDirJS("jvm/scalatest-test/src/test/scala/org/scalatest", "org/scalatest", targetDir,
List(
"BigSuiteSuite.scala",
"CatchReporterProp.scala", // skipped because heavily depends on java reflection
"DeprecatedCatchReporterProp.scala", // skipped because heavily depends on java reflection
"ClassTaggingProp.scala", // skipped because annotation not supported
"DeprecatedClassTaggingProp.scala", // skipped because annotation not supported
"ConfigMapWrapperSuiteSpec.scala", // skipped because depends on java reflection
"DispatchReporterSpec.scala", // skipped because DispatchReporter uses thread.
"DocSpecSpec.scala", // skipped because DocSpecSpec is not supported yet
"EncodedOrderingSpec.scala", // skipped because use scala.reflect.NameTransformer.encode
"EntrySpec.scala", // skipped because Entry extends java.util.Map
"FunSuiteSuite.scala", // skipped because depends on java reflection
"InheritedTagProp.scala", // skipped because depends on java reflection
"OldDocSpec.scala", // Do we still need this?
"PrivateMethodTesterSpec.scala", // skipped because depends on java reflection
"PropertyFunSuite.scala", // skipped because depends on java reflection
"SavesConfigMapSuite.scala", // skipped because depends on java reflection
"SeveredStackTracesFailureSpec.scala", // skipped because tests failed
"SeveredStackTracesSpec.scala", // skipped because tests failed
"ShellSuite.scala", // skipped because execute is not supported for now, asmounting brackets it depends on Suite.execute, which in turns depends on StandardOutReporter, PrintReporter that depends on java classes.
"ShouldBeAnSymbolSpec.scala", // skipped because depends on java reflections
"ShouldBeASymbolSpec.scala", // skipped because depends on java reflections.
"ShouldBeSymbolSpec.scala", // skipped because depends on java reflections.
"ShouldFileBePropertyMatcherSpec.scala", // skipped because depends on java.io.File
"ShouldLogicalMatcherExprSpec.scala", // skipped because depends on mockito
"ShouldNotTypeCheckSpec.scala", // skipped because tests failed
"ShouldSameInstanceAsSpec.scala", // skipped because identical string in js env is always the same instance.
"RefSpecSpec.scala", // skipped because depends on java reflections.
"SpecSpec.scala", // skipped because depends on java reflections.
"StatusProp.scala", // skipped because uses VirtualMachineError
"DeprecatedStatusProp.scala", // skipped because uses VirtualMachineError
"StreamlinedXmlEqualitySpec.scala", // skipped because use scala.xml
"StreamlinedXmlNormMethodsSpec.scala", // skipped because use scala.xml
"StreamlinedXmlSpec.scala", // skipped because use scala.xml
"SuiteSuite.scala", // skipped because it depends on java reflection
"MatchersSerializableSpec.scala", // skipped because testing java serialization
"SeveredStackTracesSpec.scala", // skipped because stack trace isn't really helpful after linked in different js env like node.
"SeveredStackTracesFailureSpec.scala" // skipped because stack trace isn't really helpful after linked in different js env like node.
)
) ++
copyDirJS("dotty/scalatest-test/src/test/scala/org/scalatest", "org/scalatest", targetDir, List.empty) ++
copyDirJS("dotty/scalatest-test/src/test/scala/org/scalatest/matchers/should", "org/scalatest/matchers/should", targetDir, List.empty) ++
copyDirJS("dotty/scalatest-test/src/test/scala/org/scalatest/matchers/must", "org/scalatest/matchers/must", targetDir, List.empty) ++
copyDir("jvm/scalatest-test/src/test/scala/org/scalatest/expectations", "org/scalatest/expectations", targetDir, List.empty) ++
copyDirJS("jvm/scalatest-test/src/test/scala/org/scalatest/concurrent", "org/scalatest/concurrent", targetDir,
List(
"WaitersSpec.scala", // skipped because Waiters not supported.
"AsyncAssertionsSpec.scala", // skipped because AsyncAssertions (deprecated name for Waiters) not supported.
"ConductorFixtureSuite.scala", // skipped because Conductors not supported.
"ConductorMethodsSuite.scala", // skipped because Conductors not supported.
"ConductorSuite.scala", // skipped because Conductors not supported.
"ConductorFixtureDeprecatedSuite.scala", // skipped because Conductors not supported.
"ConductorMethodsDeprecatedSuite.scala", // skipped because Conductors not supported.
"ConductorDeprecatedSuite.scala", // skipped because Conductors not supported.
"EventuallySpec.scala", // skipped because Eventually not supported.
"IntegrationPatienceSpec.scala", // skipped because depends on Eventually
"DeprecatedIntegrationPatienceSpec.scala",
"JavaFuturesSpec.scala", // skipped because depends on java futures
"TestThreadsStartingCounterSpec.scala", // skipped because depends on Conductors
"DeprecatedTimeLimitedTestsSpec.scala", // skipped because DeprecatedTimeLimitedTests not supported.
"TimeoutsSpec.scala", // skipped because Timeouts not supported.
"UltimatelySpec.scala" // skipped because Eventually not supported.
)
) ++
copyDirJS("jvm/scalatest-test/src/test/scala/org/scalatest/enablers", "org/scalatest/enablers", targetDir, List.empty) ++
copyDirJS("jvm/scalatest-test/src/test/scala/org/scalatest/events/examples", "org/scalatest/events/examples", targetDir, List.empty) ++
copyDirJS("jvm/scalatest-test/src/test/scala/org/scalatest/events", "org/scalatest/events", targetDir,
List(
"TestLocationJUnit3Suite.scala", // Not supported by scala-js
"TestLocationJUnitSuite.scala", // Not supported by scala-js
"TestLocationTestNGSuite.scala", // Not supported by scala-js
"TestLocationMethodJUnit3Suite.scala", // Not supported by scala-js
"TestLocationMethodJUnitSuite.scala", // Not supported by scala-js
"TestLocationMethodTestNGSuite.scala", // Not supported by scala-js
"LocationMethodSuiteProp.scala", // Not supported by scala-js
)
) ++
copyDirJS("jvm/scalatest-test/src/test/scala/org/scalatest/exceptions", "org/scalatest/exceptions", targetDir, List.empty) ++
copyDirJS("jvm/scalatest-test/src/test/scala/org/scalatest/fixture", "org/scalatest/fixture", targetDir,
List(
"NoArgSpec.scala", // skipped because tests failed.
"SuiteSpec.scala" // skipped because depends on java reflections
)) ++
copyDirJS("jvm/scalatest-test/src/test/scala/org/scalatest/path", "org/scalatest/path", targetDir, List.empty) ++
copyDirJS("jvm/scalatest-test/src/test/scala/org/scalatest/prop", "org/scalatest/prop", targetDir, List.empty) ++
copyDirJS("jvm/scalatest-test/src/test/scala/org/scalatest/suiteprop", "org/scalatest/suiteprop", targetDir, List.empty) ++
copyDirJS("jvm/scalatest-test/src/test/scala/org/scalatest/matchers", "org/scalatest/matchers", targetDir, List.empty) ++
copyDirJS("jvm/scalatest-test/src/test/scala/org/scalatest/time", "org/scalatest/time", targetDir, List.empty) ++
copyDirJS("jvm/scalatest-test/src/test/scala/org/scalatest/tagobjects", "org/scalatest/tagobjects", targetDir, List.empty) ++
copyDirJS("jvm/scalatest-test/src/test/scala/org/scalatest/verbs", "org/scalatest/verbs", targetDir, List.empty) ++
copyDirJS("jvm/scalatest-test/src/test/scala/org/scalatest/tools", "org/scalatest/tools", targetDir,
List(
"DashboardReporterSpec.scala",
"DiscoverySuiteSuite.scala",
"FilterReporterSpec.scala",
"FrameworkSuite.scala",
"HtmlReporterSpec.scala",
"JUnitXmlReporterSuite.scala",
"MemoryReporterSuite.scala",
"RunnerSpec.scala",
"SbtCommandParserSpec.scala",
"ScalaTestAntTaskSpec.scala",
"ScalaTestFrameworkSuite.scala",
"ScalaTestRunnerSuite.scala",
"SomeApiClass.scala",
"SomeApiClassRunner.scala",
"SomeApiSubClass.scala",
"StringReporterAlertSpec.scala",
"StringReporterSuite.scala",
"StringReporterSummarySpec.scala",
"SuiteDiscoveryHelperSuite.scala",
"XmlSocketReporterSpec.scala"
)
)
}
def genDiagramsTest(targetDir: File, version: String, scalaVersion: String): Seq[File] =
copyDir("jvm/diagrams-test/src/test/scala/org/scalatest/diagrams", "org/scalatest/diagrams", targetDir, List.empty)
def genDiagramsTestJS(targetDir: File, version: String, scalaVersion: String): Seq[File] =
copyDirJS("jvm/diagrams-test/src/test/scala/org/scalatest/diagrams", "org/scalatest/diagrams", targetDir, List.empty)
def genFeatureSpecTest(targetDir: File, version: String, scalaVersion: String): Seq[File] =
copyDir("jvm/featurespec-test/src/test/scala/org/scalatest/featurespec", "org/scalatest/featurespec", targetDir, List.empty)
def genFeatureSpecTestJS(targetDir: File, version: String, scalaVersion: String): Seq[File] =
copyDirJS("jvm/featurespec-test/src/test/scala/org/scalatest/featurespec", "org/scalatest/featurespec", targetDir, List.empty)
def genFlatSpecTest(targetDir: File, version: String, scalaVersion: String): Seq[File] =
copyDir("jvm/flatspec-test/src/test/scala/org/scalatest/flatspec", "org/scalatest/flatspec", targetDir, List.empty)
def genFlatSpecTestJS(targetDir: File, version: String, scalaVersion: String): Seq[File] =
copyDirJS("jvm/flatspec-test/src/test/scala/org/scalatest/flatspec", "org/scalatest/flatspec", targetDir, List.empty)
def genFreeSpecTest(targetDir: File, version: String, scalaVersion: String): Seq[File] =
copyDir("jvm/freespec-test/src/test/scala/org/scalatest/freespec", "org/scalatest/freespec", targetDir, List.empty)
def genFreeSpecTestJS(targetDir: File, version: String, scalaVersion: String): Seq[File] =
copyDirJS("jvm/freespec-test/src/test/scala/org/scalatest/freespec", "org/scalatest/freespec", targetDir, List.empty)
def genFunSpecTest(targetDir: File, version: String, scalaVersion: String): Seq[File] =
copyDir("jvm/funspec-test/src/test/scala/org/scalatest/funspec", "org/scalatest/funspec", targetDir, List.empty)
def genFunSpecTestJS(targetDir: File, version: String, scalaVersion: String): Seq[File] =
copyDirJS("jvm/funspec-test/src/test/scala/org/scalatest/funspec", "org/scalatest/funspec", targetDir, List.empty)
def genFunSuiteTest(targetDir: File, version: String, scalaVersion: String): Seq[File] =
copyDir("jvm/funsuite-test/src/test/scala/org/scalatest/funsuite", "org/scalatest/funsuite", targetDir, List.empty)
def genFunSuiteTestJS(targetDir: File, version: String, scalaVersion: String): Seq[File] =
copyDirJS("jvm/funsuite-test/src/test/scala/org/scalatest/funsuite", "org/scalatest/funsuite", targetDir, List.empty)
def genPropSpecTest(targetDir: File, version: String, scalaVersion: String): Seq[File] =
copyDir("jvm/propspec-test/src/test/scala/org/scalatest/propspec", "org/scalatest/propspec", targetDir, List.empty)
def genPropSpecTestJS(targetDir: File, version: String, scalaVersion: String): Seq[File] =
copyDirJS("jvm/propspec-test/src/test/scala/org/scalatest/propspec", "org/scalatest/propspec", targetDir, List.empty)
def genWordSpecTest(targetDir: File, version: String, scalaVersion: String): Seq[File] =
copyDir("jvm/wordspec-test/src/test/scala/org/scalatest/wordspec", "org/scalatest/wordspec", targetDir, List.empty)
def genWordSpecTestJS(targetDir: File, version: String, scalaVersion: String): Seq[File] =
copyDirJS("jvm/wordspec-test/src/test/scala/org/scalatest/wordspec", "org/scalatest/wordspec", targetDir, List.empty)
}
| scalatest/scalatest | project/GenScalaTestDotty.scala | Scala | apache-2.0 | 31,477 |
package graffiti.ioc
import scala.reflect.ClassTag
/**
* @author Alexander De Leon <[email protected]>
*/
trait Injector {
def getInstance[T: ClassTag]: T
def getInstance[T: ClassTag](name: String): T
def getInstances[T: ClassTag]: Seq[T]
}
| devialab/graffiti | core/src/main/scala/graffiti/ioc/Injector.scala | Scala | mit | 252 |
package com.meetup.iap
import com.meetup.iap.receipt.Subscription
import org.slf4j.LoggerFactory
import java.io.File
import scala.io.Source
import org.json4s.DefaultFormats
import org.json4s.native.Serialization.{read, writePretty}
import org.apache.commons.io.FileUtils
/**
* Save the existing biller data to a temp file to be cached.
*/
object BillerCache {
val log = LoggerFactory.getLogger(BillerCache.getClass)
implicit val formats = DefaultFormats
private val ProjectName = "iap-service"
private val inProject = new File(".").getCanonicalPath.endsWith(ProjectName)
private val Folder = {
val base = if(inProject) "" else "iap-service/"
new File(s"${base}tmp/")
}
if(!Folder.exists) {
Folder.mkdirs
}
private val TempFile = new File(Folder, "subscriptions.json")
if(!TempFile.exists) {
TempFile.createNewFile
}
private val PlansFile = new File(Folder, "plans.json")
if (!PlansFile.exists) {
PlansFile.createNewFile
}
def readFromCache(): Map[String, Subscription] = {
log.info("Reading from file: " + TempFile.getAbsolutePath)
val raw = Source.fromFile(TempFile).mkString.trim
if(raw.nonEmpty) {
Map(read[Map[String, Subscription]](raw).toSeq: _*)
} else Map.empty
}
def writeToCache(subs: Map[String, Subscription]) {
val json = writePretty(subs)
FileUtils.writeStringToFile(TempFile, json, "UTF-8")
}
def readPlansFromFile(): List[Plan] = {
log.info(s"Reading from plans file: ${PlansFile.getAbsolutePath}")
val raw = Source.fromFile(PlansFile).mkString.trim
if(raw.nonEmpty) {
log.info("Found some plans")
List(read[List[Plan]](raw).toSeq: _*)
} else List.empty
}
}
| meetup/apple-of-my-iap | iap-service/src/main/scala/com/meetup/iap/BillerCache.scala | Scala | mit | 1,714 |
/*§
===========================================================================
Chronos
===========================================================================
Copyright (C) 2015-2016 Gianluca Costa
===========================================================================
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
===========================================================================
*/
package info.gianlucacosta.chronos.ast.statements
import info.gianlucacosta.chronos.ast.{AstVisitor, Expression, Statement}
case class Println(expression: Option[Expression], lineNumber: Int) extends Statement {
override def accept[T](visitor: AstVisitor[T]): T =
visitor.visit(this)
} | giancosta86/Chronos | src/main/scala/info/gianlucacosta/chronos/ast/statements/Println.scala | Scala | apache-2.0 | 1,208 |
/*
* Copyright 2013 Stephan Rehfeld
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package scaladelray.ui.model
import scaladelray.material.{SingleColorMaterial, Material}
import scaladelray.Color
import javax.swing.table.TableModel
import javax.swing.event.TableModelListener
class SingleColorMaterialProvider extends MaterialProvider with TableModel {
var color = Color( 1, 1, 1 )
override def createMaterial( l : () => Unit ): Material = {
l()
SingleColorMaterial( color )
}
override def remove(obj: AnyRef) {}
override def getRowCount: Int = 1
override def getColumnCount: Int = 2
override def getColumnName( column : Int): String = column match {
case 0 => "Property"
case 1 => "Value"
}
override def getColumnClass(row: Int): Class[_] = classOf[String]
override def isCellEditable(row: Int, column: Int): Boolean = column match {
case 0 => false
case 1 => true
}
override def getValueAt(row: Int, column: Int): AnyRef = column match {
case 0 =>
row match {
case 0 =>
"Color"
}
case 1 =>
row match {
case 0 =>
color
}
}
override def setValueAt(obj: Any, row: Int, column: Int) {
try {
row match {
case 0 =>
color = obj.asInstanceOf[Color]
}
} catch {
case _ : Throwable =>
}
}
override def addTableModelListener(p1: TableModelListener) {}
override def removeTableModelListener(p1: TableModelListener) {}
override def isReady: Boolean = true
override def toString: String = "Single color material"
override def count = 1
}
| stephan-rehfeld/scaladelray | src/main/scala/scaladelray/ui/model/SingleColorMaterialProvider.scala | Scala | apache-2.0 | 2,141 |
package wordCount2
import akka.actor.Actor
import akka.routing.RoundRobinPool
import akka.actor.Props
import akka.actor.ActorLogging
object MapActor {
val STOP_WORDS_LIST = Set("a", "am", "an", "and", "are", "as", "at",
"be", "do", "go", "if", "in", "is", "it", "of", "on", "the", "to")
def routerProps() = RoundRobinPool(5).props(props)
def props() = Props[MapActor]
case class Process(id: Int, line: String)
case class MapResult(id: Int, data: Seq[(String, Int)])
}
class MapActor extends Actor with ActorLogging {
import MapActor._
def receive: Receive = {
case Process(id, line) =>
log.debug("processing line for job {}, {}",id,line)
sender ! MapResult(id, processExpression(line))
}
def processExpression(line: String): Seq[(String, Int)] = {
line.split("\\s+").
filter(w => !STOP_WORDS_LIST.contains(w.toLowerCase)).
map(w => (w, 1))
}
} | fabiofumarola/akka-tutorial | src/main/scala/wordCount2/MapActor.scala | Scala | cc0-1.0 | 911 |
package io.finch
import java.util.UUID
class DecodePathSpec extends FinchSpec {
checkAll("DecodePath[Int]", DecodePathLaws[Int].all)
checkAll("DecodePath[Long]", DecodePathLaws[Long].all)
checkAll("DecodePath[Boolean]", DecodePathLaws[Boolean].all)
checkAll("DecodePath[UUID]", DecodePathLaws[UUID].all)
}
| yanana/finch | core/src/test/scala/io/finch/DecodePathSpec.scala | Scala | apache-2.0 | 316 |
package kea
package instances
import cats.Semigroup
import com.typesafe.config.Config
import kea.types._
trait ConfigInstances {
/** Implicit converter to the `Conf` from Typelevel config. */
implicit def toConf(config: Config): Conf = Conf(config)
/**
* Define how to combine validation results.
*/
implicit val semiGroup = new Semigroup[ThrowableNel] {
override def combine(x: ThrowableNel, y: ThrowableNel) = x.concatNel(y)
}
/** Read a sub-config. */
implicit val subConfigReader: ConfigReader[Config] = (c: Config, p: String) =>
validated(c.getConfig(p))
}
object ConfigInstances extends ConfigInstances | lewismj/kea | core/src/main/scala/kea/instances/config.scala | Scala | bsd-2-clause | 649 |
package sample.blog.post
import akka.actor.{ ActorSystem, Props }
import akka.testkit._
import com.typesafe.config.Config
import demesne._
import demesne.testkit.AggregateRootSpec
import demesne.testkit.concurrent.CountDownFunction
import org.scalatest.Tag
import omnibus.akka.envelope._
import omnibus.akka.publish.ReliablePublisher.ReliableMessage
import scala.concurrent.duration._
import org.scalatest.concurrent.ScalaFutures
import sample.blog.author.AuthorListingModule
import sample.blog.post.{ PostPrototol => P }
/**
* Created by damonrolfs on 9/18/14.
*/
class PostModuleSpec extends AggregateRootSpec[Post, Post#ID] with ScalaFutures {
override type State = Post
override type ID = PostModule.ID
override type Protocol = PostPrototol.type
override val protocol: Protocol = PostPrototol
override def createAkkaFixture(
test: OneArgTest,
system: ActorSystem,
slug: String
): PostFixture = {
new PostFixture( slug, system )
}
override type Fixture = PostFixture
class PostFixture( _slug: String, _system: ActorSystem )
extends AggregateFixture( _slug, _system ) {
override val module: AggregateRootModule[State, ID] = PostModule
val author: TestProbe = TestProbe()
override def nextId(): TID = Post.identifying.next
object TestPostRootType extends PostModule.PostType {
override def repositoryProps( implicit model: DomainModel ): Props =
PostModule.Repository.localProps( model )
}
override val rootTypes: Set[AggregateRootType] = Set( TestPostRootType )
// override def resources: Map[Symbol, Any] = AuthorListingModule resources system
override def resources: Map[Symbol, Any] = {
val makeAuthorListing = () => author.ref
Map( AuthorListingModule.ResourceKey -> makeAuthorListing )
}
// override def startTasks( system: ActorSystem ): Set[StartTask] = {
//// Set( StartTask( AuthorListingModule.startTask(system), "AuthorListing" ) )
// }
// override def context: Map[Symbol, Any] = trace.block( "context" ) {
// val result = super.context
// val makeAuthorListing = () => trace.block( "makeAuthorList" ){ author.ref }
// result + ( 'authorListing -> makeAuthorListing )
// }
}
object GOOD extends Tag( "good" )
"Post Module should" should {
// "config is okay" taggedAs(WIP) in { f: Fixture =>
// val config = f.system.settings.config
// config.getString( "akka.persistence.journal.plugin" ) mustBe "inmemory-journal"
// config.getString( "akka.persistence.snapshot-store.plugin" ) mustBe "inmemory-snapshot-store"
// }
"add content" in { fixture: Fixture =>
import fixture._
system.eventStream.subscribe( bus.ref, classOf[ReliableMessage] )
system.eventStream.subscribe( bus.ref, classOf[PostPrototol.Event] )
val id = nextId()
val content = PostContent(
author = "Damon",
title = "Add Content",
body = "add body content"
)
val post = PostModule aggregateOf id
post !+ P.AddPost( id, content )
bus.expectMsgPF( max = 3000.millis.dilated, hint = "post added" ) { //DMR: Is this sensitive to total num of tests executed?
case payload: P.PostAdded => payload.content mustBe content
}
}
"not respond before added" in { fixture: Fixture =>
import fixture._
system.eventStream.subscribe( bus.ref, classOf[ReliableMessage] )
system.eventStream.subscribe( bus.ref, classOf[P.Event] )
val id = nextId()
val post = PostModule aggregateOf id
post !+ P.ChangeBody( id, "dummy content" )
post !+ P.Publish( id )
bus.expectNoMessage( 200.millis.dilated )
}
"not respond to incomplete content" in { fixture: Fixture =>
import fixture._
system.eventStream.subscribe( bus.ref, classOf[ReliableMessage] )
system.eventStream.subscribe( bus.ref, classOf[P.Event] )
val id = nextId()
val post = PostModule aggregateOf id
post !+ P.AddPost( id, PostContent( author = "Damon", title = "", body = "no title" ) )
bus.expectNoMessage( 200.millis.dilated )
post !+ P.AddPost(
id,
PostContent( author = "", title = "Incomplete Content", body = "no author" )
)
bus.expectNoMessage( 200.millis.dilated )
}
"have empty contents before use" taggedAs WIP in { fixture: Fixture =>
import fixture._
val id = nextId()
val post = PostModule aggregateOf id
post.sendEnvelope( P.GetContent( id ) )( author.ref )
author.expectMsgPF( max = 200.millis.dilated, hint = "empty contents" ) {
case Envelope( payload: PostContent, h ) => {
payload mustBe PostContent( "", "", "" )
h.messageNumber mustBe MessageNumber( 2 )
h.workId must not be WorkId.unknown
}
}
}
"have contents after posting" in { fixture: Fixture =>
import fixture._
val id = nextId()
val post = PostModule aggregateOf id
val content = PostContent( author = "Damon", title = "Contents", body = "initial contents" )
val clientProbe = TestProbe()
post !+ P.AddPost( id, content )
post.sendEnvelope( P.GetContent( id ) )( clientProbe.ref )
clientProbe.expectMsgPF( max = 400.millis.dilated, hint = "initial contents" ) {
case Envelope( payload: PostContent, _ ) if payload == content => true
}
}
"have changed contents after change" in { fixture: Fixture =>
import fixture._
val id = nextId()
val post = PostModule aggregateOf id
val content = PostContent( author = "Damon", title = "Contents", body = "initial contents" )
val updated = "updated contents"
system.eventStream.subscribe( bus.ref, classOf[ReliableMessage] )
system.eventStream.subscribe( bus.ref, classOf[P.Event] )
val clientProbe = TestProbe()
post !+ P.AddPost( id, content )
bus.expectMsgPF( hint = "PostAdded" ) {
case payload: P.PostAdded => payload.content mustBe content
}
post !+ P.ChangeBody( id, updated )
bus.expectMsgPF( hint = "BodyChanged" ) {
case payload: P.BodyChanged => payload.body mustBe updated
}
post.sendEnvelope( P.GetContent( id ) )( clientProbe.ref )
clientProbe.expectMsgPF( max = 200.millis.dilated, hint = "changed contents" ) {
case Envelope( payload: PostContent, _ ) => payload mustBe content.copy( body = updated )
}
}
"have changed contents after change and published" in { fixture: Fixture =>
import fixture._
val id = nextId()
val post = PostModule aggregateOf id
val content = PostContent( author = "Damon", title = "Contents", body = "initial contents" )
val updated = "updated contents"
val clientProbe = TestProbe()
post !+ P.AddPost( id, content )
post !+ P.ChangeBody( id, updated )
post !+ P.Publish( id )
post.sendEnvelope( P.GetContent( id ) )( clientProbe.ref )
clientProbe.expectMsgPF( max = 400.millis.dilated, hint = "changed contents" ) {
case Envelope( payload: PostContent, _ ) => payload mustBe content.copy( body = updated )
}
}
"dont change contents after published" in { fixture: Fixture =>
import fixture._
val id = nextId()
val post = PostModule aggregateOf id
val content = PostContent( author = "Damon", title = "Contents", body = "initial contents" )
val updated = "updated contents"
val clientProbe = TestProbe()
post !+ P.AddPost( id, content )
post !+ P.ChangeBody( id, updated )
post !+ P.Publish( id )
post !+ P.ChangeBody( id, "BAD CONTENT" )
post.sendEnvelope( P.GetContent( id ) )( clientProbe.ref )
clientProbe.expectMsgPF( max = 400.millis.dilated, hint = "changed contents" ) {
case Envelope( payload: PostContent, _ ) => payload mustBe content.copy( body = updated )
}
}
"follow happy path" in { fixture: Fixture =>
import fixture._
val id = nextId()
val content = PostContent( author = "Damon", title = "Test Add", body = "testing happy path" )
system.eventStream.subscribe( bus.ref, classOf[ReliableMessage] )
system.eventStream.subscribe( bus.ref, classOf[P.Event] )
PostModule.aggregateOf( id ) !+ P.AddPost( id, content )
PostModule.aggregateOf( id ) !+ P.ChangeBody( id, "new content" )
PostModule.aggregateOf( id ) !+ P.Publish( id )
bus.expectMsgPF( hint = "post-added" ) {
case payload: P.PostAdded => payload.content mustBe content
}
bus.expectMsgPF( hint = "body-changed" ) {
case payload: P.BodyChanged => payload.body mustBe "new content"
}
bus.expectMsgPF( hint = "post-published local" ) {
case P.PostPublished( pid, _, title ) => {
pid mustBe id
title mustBe "Test Add"
}
}
author.expectMsgPF( hint = "post-published reliable" ) {
case ReliableMessage( 1, Envelope( P.PostPublished( pid, _, title ), _ ) ) => {
pid mustBe id
title mustBe "Test Add"
}
}
}
"recorded in author index after post added via bus" in { fixture: Fixture =>
import fixture._
val rt = PostModule.rootType
val ar = model.aggregateIndexFor[String, PostModule.TID, PostModule.TID]( rt, 'author )
ar.isRight mustBe true
for {
register <- ar
} {
val id = nextId()
val content =
PostContent( author = "Damon", title = "Test Add", body = "testing author index add" )
system.eventStream.subscribe( bus.ref, classOf[P.Event] )
val post = PostModule.aggregateOf( id )
post !+ P.AddPost( id, content )
bus.expectMsgPF( hint = "post-added" ) {
case payload: P.PostAdded => payload.content mustBe content
}
val countDown = new CountDownFunction[String]
countDown await 200.millis.dilated
whenReady( register.futureGet( "Damon" ) ) { result =>
result mustBe Some( id )
}
scribe.trace( s"""index:Damon = ${register.get( "Damon" )}""" )
register.get( "Damon" ) mustBe Some( id )
}
}
"recorded in title index after post added via event stream" in { fixture: Fixture =>
import fixture._
val rt = PostModule.rootType
val ar = model.aggregateIndexFor[String, PostModule.TID, PostModule.TID]( rt, 'title )
ar.isRight mustBe true
for {
register <- ar
} {
val p = TestProbe()
val id = nextId()
val content =
PostContent( author = "Damon", title = "Test Add", body = "testing author index add" )
system.eventStream.subscribe( bus.ref, classOf[P.Event] )
system.eventStream.subscribe( p.ref, classOf[P.Event] )
val post = PostModule.aggregateOf( id )
post !+ P.AddPost( id, content )
bus.expectMsgPF( hint = "post-added" ) {
case payload: P.PostAdded => payload.content mustBe content
}
p.expectMsgPF( hint = "post-added stream" ) {
case payload: P.PostAdded => payload.content mustBe content
}
val countDown = new CountDownFunction[String]
countDown await 200.millis.dilated
whenReady( register.futureGet( "Test Add" ) ) { result =>
result mustBe Some( id )
}
// countDown await 75.millis.dilated
register.get( "Test Add" ) mustBe Some( id )
}
}
"withdrawn title in index after post delete via event stream" in { fixture: Fixture =>
import fixture._
val rt = PostModule.rootType
val ar = model.aggregateIndexFor[String, PostModule.TID, PostModule.TID]( rt, 'author )
ar.isRight mustBe true
val tr = model.aggregateIndexFor[String, PostModule.TID, PostModule.TID]( rt, 'title )
tr.isRight mustBe true
for {
authorRegister <- ar
titleregister <- tr
} {
val p = TestProbe()
val id = nextId()
val content =
PostContent( author = "Damon", title = "Test Add", body = "testing index add" )
system.eventStream.subscribe( bus.ref, classOf[P.Event] )
system.eventStream.subscribe( p.ref, classOf[P.Event] )
val post = PostModule.aggregateOf( id )
post !+ P.AddPost( id, content )
bus.expectMsgPF( hint = "post-added" ) {
case payload: P.PostAdded => payload.content mustBe content
}
p.expectMsgPF( hint = "post-added stream" ) {
case payload: P.PostAdded => payload.content mustBe content
}
val countDownAdd = new CountDownFunction[String]
countDownAdd await 200.millis.dilated
whenReady( titleregister.futureGet( "Test Add" ) ) { result =>
result mustBe Some( id )
}
// countDown await 75.millis.dilated
titleregister.get( "Test Add" ) mustBe Some( id )
post !+ P.Delete( id )
bus.expectMsgPF( hint = "post-deleted" ) {
case payload: P.Deleted => payload.sourceId mustBe id
}
p.expectMsgPF( hint = "post-deleted stream" ) {
case payload: P.Deleted => payload.sourceId mustBe id
}
val countDownChange = new CountDownFunction[String]
countDownChange await 200.millis.dilated
whenReady( titleregister.futureGet( "Test Add" ) ) { result =>
scribe.error( s"HERE ****: result(Test Add) = $result" )
result mustBe None
}
whenReady( authorRegister.futureGet( "Damon" ) ) { result =>
scribe.error( s"HERE ****: result(Damon) = $result" )
result mustBe None
}
}
}
"revised title in index after post title change via event stream" in { fixture: Fixture =>
import fixture._
val rt = PostModule.rootType
val ar = model.aggregateIndexFor[String, PostModule.TID, PostModule.TID]( rt, 'author )
ar.isRight mustBe true
val tr = model.aggregateIndexFor[String, PostModule.TID, PostModule.TID]( rt, 'title )
tr.isRight mustBe true
for {
authorRegister <- ar
titleRegister <- tr
} {
val p = TestProbe()
val id = nextId()
val content =
PostContent( author = "Damon", title = "Test Add", body = "testing index add" )
system.eventStream.subscribe( bus.ref, classOf[P.Event] )
system.eventStream.subscribe( p.ref, classOf[P.Event] )
val post = PostModule.aggregateOf( id )
post !+ P.AddPost( id, content )
bus.expectMsgPF( hint = "post-added" ) {
case payload: P.PostAdded => payload.content mustBe content
}
p.expectMsgPF( hint = "post-added stream" ) {
case payload: P.PostAdded => payload.content mustBe content
}
val countDownAdd = new CountDownFunction[String]
countDownAdd await 200.millis.dilated
whenReady( authorRegister.futureGet( "Damon" ) ) { result =>
result mustBe Some( id )
}
whenReady( titleRegister.futureGet( "Test Add" ) ) { result =>
result mustBe Some( id )
}
// countDown await 75.millis.dilated
authorRegister.get( "Damon" ) mustBe Some( id )
titleRegister.get( "Test Add" ) mustBe Some( id )
post !+ P.ChangeTitle( id, "New Title" )
bus.expectMsgPF( hint = "title-change" ) {
case payload: P.TitleChanged => {
payload.oldTitle mustBe "Test Add"
payload.newTitle mustBe "New Title"
}
}
p.expectMsgPF( hint = "post-title change stream" ) {
case payload: P.TitleChanged => {
payload.oldTitle mustBe "Test Add"
payload.newTitle mustBe "New Title"
}
}
val countDownChange = new CountDownFunction[String]
countDownChange await 200.millis.dilated
whenReady( titleRegister.futureGet( "New Title" ) ) { result =>
scribe.error( s"HERE ****: result(New Title) = $result" )
result mustBe Some( id )
}
whenReady( titleRegister.futureGet( "Test Add" ) ) { result =>
scribe.error( s"HERE ****: result(Test Add) = $result" )
result mustBe None
}
whenReady( authorRegister.futureGet( "Damon" ) ) { result =>
scribe.error( s"HERE ****: result(Damon) = $result" )
result mustBe Some( id )
}
}
}
}
}
| dmrolfs/demesne | examples/src/test/scala/blog/post/PostModuleSpec.scala | Scala | apache-2.0 | 16,567 |
/*
* Copyright 2014–2017 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.mimir
import quasar.precog.common._
import quasar.yggdrasil._
import quasar.yggdrasil.bytecode._
import quasar.yggdrasil.execution.EvaluationContext
import scalaz._, Scalaz._
trait TransSpecableModule[M[+ _]] extends TransSpecModule with TableModule[M] with EvaluatorMethodsModule[M] {
import dag._
import library._
import instructions._
trait TransSpecable extends EvaluatorMethods {
import trans._
trait TransSpecableOrderFold[T] {
def WrapObject(node: Join)(parent: T, field: String): T
def DerefObjectStatic(node: Join)(parent: T, field: String): T
def DerefArrayStatic(node: Join)(parent: T, index: Int): T
def WrapArray(node: Operate)(parent: T): T
def unmatched(node: DepGraph): T
def done(node: DepGraph): T
}
trait TransSpecableFold[T] extends TransSpecableOrderFold[T] {
def EqualLiteral(node: Join)(parent: T, value: RValue, invert: Boolean): T
def DerefMetadataStatic(node: Join)(parent: T, field: String): T
def ArraySwap(node: Join)(parent: T, index: Int): T
def InnerObjectConcat(node: Join)(parent: T): T
def InnerArrayConcat(node: Join)(parent: T): T
def Map1Left(node: Join)(parent: T, op: Op2F2, graph: DepGraph, value: RValue): T
def Map1Right(node: Join)(parent: T, op: Op2F2, graph: DepGraph, value: RValue): T
def Const(node: dag.Const)(under: T): T
def binOp(node: Join)(leftParent: T, rightParent: => T, op: BinaryOperation): T
def Filter(node: dag.Filter)(leftParent: T, rightParent: => T): T
def Op1(node: Operate)(parent: T, op: UnaryOperation): T
def Cond(node: dag.Cond)(pred: T, left: T, right: T): T
}
def isTransSpecable(to: DepGraph, from: DepGraph): Boolean =
foldDownTransSpecable(to, Some(from))(new TransSpecableFold[Boolean] {
def EqualLiteral(node: Join)(parent: Boolean, value: RValue, invert: Boolean) = parent
def WrapObject(node: Join)(parent: Boolean, field: String) = parent
def DerefObjectStatic(node: Join)(parent: Boolean, field: String) = parent
def DerefMetadataStatic(node: Join)(parent: Boolean, field: String) = parent
def DerefArrayStatic(node: Join)(parent: Boolean, index: Int) = parent
def ArraySwap(node: Join)(parent: Boolean, index: Int) = parent
def InnerObjectConcat(node: Join)(parent: Boolean) = parent
def InnerArrayConcat(node: Join)(parent: Boolean) = parent
def Map1Left(node: Join)(parent: Boolean, op: Op2F2, graph: DepGraph, value: RValue) = parent
def Map1Right(node: Join)(parent: Boolean, op: Op2F2, graph: DepGraph, value: RValue) = parent
def binOp(node: Join)(leftParent: Boolean, rightParent: => Boolean, op: BinaryOperation) = leftParent && rightParent
def Filter(node: dag.Filter)(leftParent: Boolean, rightParent: => Boolean) = leftParent && rightParent
def WrapArray(node: Operate)(parent: Boolean) = parent
def Op1(node: Operate)(parent: Boolean, op: UnaryOperation) = parent
def Cond(node: dag.Cond)(pred: Boolean, left: Boolean, right: Boolean) = pred && left && right
def Const(node: dag.Const)(under: Boolean) = under
def unmatched(node: DepGraph) = false
def done(node: DepGraph) = true
})
private[this] def snd[A, B](a: A, b: B): Option[B] = Some(b)
def mkTransSpec(to: DepGraph, from: DepGraph, ctx: EvaluationContext): Option[TransSpec1] =
mkTransSpecWithState[Option, (TransSpec1, DepGraph)](to, Some(from), ctx, identity, snd, some).map(_._1)
def findAncestor(to: DepGraph, ctx: EvaluationContext): Option[DepGraph] =
mkTransSpecWithState[Option, (TransSpec1, DepGraph)](to, None, ctx, identity, snd, some).map(_._2)
def findTransSpecAndAncestor(to: DepGraph, ctx: EvaluationContext): Option[(TransSpec1, DepGraph)] =
mkTransSpecWithState[Option, (TransSpec1, DepGraph)](to, None, ctx, identity, snd, some)
def findOrderAncestor(to: DepGraph, ctx: EvaluationContext): Option[DepGraph] =
mkTransSpecOrderWithState[Option, (TransSpec1, DepGraph)](to, None, ctx, identity, snd, some).map(_._2)
def transFold[N[_]: Monad, S](to: DepGraph,
from: Option[DepGraph],
ctx: EvaluationContext,
get: S => (TransSpec1, DepGraph),
set: (S, (TransSpec1, DepGraph)) => N[S],
init: ((TransSpec1, DepGraph)) => N[S]) = {
// Bifunctor leftMap would be better here if it existed in pimped type inferrable form
def leftMap(parent: S)(f: TransSpec1 => TransSpec1) = get(parent) match {
case (spec, ancestor) => set(parent, (f(spec), ancestor))
}
new TransSpecableFold[N[S]] {
import trans._
def EqualLiteral(node: Join)(parent: N[S], value: RValue, invert: Boolean) =
parent.flatMap(leftMap(_) { target =>
val inner = trans.Equal(target, trans.transRValue(value, target))
if (invert) op1ForUnOp(Comp).spec(inner) else inner
})
def WrapObject(node: Join)(parent: N[S], field: String) =
parent.flatMap(leftMap(_)(trans.WrapObject(_, field)))
def DerefObjectStatic(node: Join)(parent: N[S], field: String) =
parent.flatMap(leftMap(_)(trans.DerefObjectStatic(_, CPathField(field))))
def DerefMetadataStatic(node: Join)(parent: N[S], field: String) =
parent.flatMap(leftMap(_)(trans.DerefMetadataStatic(_, CPathMeta(field))))
def DerefArrayStatic(node: Join)(parent: N[S], index: Int) =
parent.flatMap(leftMap(_)(trans.DerefArrayStatic(_, CPathIndex(index))))
def ArraySwap(node: Join)(parent: N[S], index: Int) = {
parent.flatMap(leftMap(_)(trans.ArraySwap(_, index)))
}
def InnerObjectConcat(node: Join)(parent: N[S]) =
parent.flatMap(leftMap(_)(trans.InnerObjectConcat(_)))
def InnerArrayConcat(node: Join)(parent: N[S]) =
parent.flatMap(leftMap(_)(trans.InnerArrayConcat(_)))
def Map1Left(node: Join)(parent: N[S], op: Op2F2, graph: DepGraph, value: RValue) =
parent.flatMap(leftMap(_) { target =>
value match {
case cv: CValue =>
trans.Map1(target, op.f2.applyr(cv))
case _ =>
trans.Typed(trans.Typed(target, JNullT), JTextT) // nuke all the things
}
})
def Map1Right(node: Join)(parent: N[S], op: Op2F2, graph: DepGraph, value: RValue) =
parent.flatMap(leftMap(_) { target =>
value match {
case cv: CValue =>
trans.Map1(target, op.f2.applyl(cv))
case _ =>
trans.Typed(trans.Typed(target, JNullT), JTextT) // nuke all the things
}
})
def binOp(node: Join)(leftParent: N[S], rightParent: => N[S], op: BinaryOperation) = {
for {
pl <- leftParent
(l, al) = get(pl)
pr <- rightParent
(r, ar) = get(pr)
result <- if (al == ar) {
set(pl, (transFromBinOp(op)(l, r), al))
} else {
init(Leaf(Source), node)
}
} yield result
}
def Filter(node: dag.Filter)(leftParent: N[S], rightParent: => N[S]) =
for {
pl <- leftParent
(l, al) = get(pl)
pr <- rightParent
(r, ar) = get(pr)
result <- if (al == ar) set(pl, (trans.Filter(l, r), al)) else init(Leaf(Source), node)
} yield result
def WrapArray(node: Operate)(parent: N[S]) =
parent.flatMap(leftMap(_)(trans.WrapArray(_)))
def Op1(node: Operate)(parent: N[S], op: UnaryOperation) =
parent.flatMap(leftMap(_)(parent => op1ForUnOp(op).spec(parent)))
def Cond(node: dag.Cond)(pred: N[S], left: N[S], right: N[S]) = {
for {
pp <- pred
(p, ap) = get(pp)
pl <- left
(l, al) = get(pl)
pr <- right
(r, ar) = get(pr)
result <- if (ap == al && al == ar)
set(pp, (trans.Cond(p, l, r), ap))
else
init(Leaf(Source), node)
} yield result
}
def Const(node: dag.Const)(underN: N[S]) = {
val dag.Const(cv: CValue) = node // TODO !!
underN flatMap { under =>
leftMap(under) { spec =>
trans.ConstLiteral(cv, spec)
}
}
}
def unmatched(node: DepGraph) = init(Leaf(Source), node)
def done(node: DepGraph) = init(Leaf(Source), node)
}
}
def mkTransSpecWithState[N[_]: Monad, S](to: DepGraph,
from: Option[DepGraph],
ctx: EvaluationContext,
get: S => (TransSpec1, DepGraph),
set: (S, (TransSpec1, DepGraph)) => N[S],
init: ((TransSpec1, DepGraph)) => N[S]): N[S] = {
foldDownTransSpecable(to, from)(transFold[N, S](to, from, ctx, get, set, init))
}
def mkTransSpecOrderWithState[N[+ _]: Monad, S](to: DepGraph,
from: Option[DepGraph],
ctx: EvaluationContext,
get: S => (TransSpec1, DepGraph),
set: (S, (TransSpec1, DepGraph)) => N[S],
init: ((TransSpec1, DepGraph)) => N[S]): N[S] = {
foldDownTransSpecableOrder(to, from)(transFold[N, S](to, from, ctx, get, set, init))
}
object ConstInt {
def unapply(c: Const) = c match {
case Const(CNum(n)) => Some(n.toInt)
case Const(CLong(n)) => Some(n.toInt)
case Const(CDouble(n)) => Some(n.toInt)
case _ => None
}
}
object Op2F2ForBinOp {
def unapply(op: BinaryOperation): Option[Op2F2] = op2ForBinOp(op).flatMap {
case op2f2: Op2F2 => Some(op2f2)
case _ => None
}
}
def foldDownTransSpecableOrder[T](to: DepGraph, from: Option[DepGraph])(alg: TransSpecableOrderFold[T]): T = {
def loop(graph: DepGraph): T = graph match {
case node if from.map(_ == node).getOrElse(false) => alg.done(node)
case node @ Join(instructions.WrapObject, Cross(_), Const(CString(field)), right) =>
alg.WrapObject(node)(loop(right), field)
case node @ Join(DerefObject, Cross(_), left, Const(CString(field))) =>
alg.DerefObjectStatic(node)(loop(left), field)
case node @ Join(DerefArray, Cross(_), left, ConstInt(index)) =>
alg.DerefArrayStatic(node)(loop(left), index)
case node @ Operate(instructions.WrapArray, parent) =>
alg.WrapArray(node)(loop(parent))
case node => alg.unmatched(node)
}
loop(to)
}
def foldDownTransSpecable[T](to: DepGraph, from: Option[DepGraph])(alg: TransSpecableFold[T]): T = {
def loop(graph: DepGraph): T = graph match {
case node if from.map(_ == node).getOrElse(false) => alg.done(node)
case node @ dag.Cond(pred, left @ dag.Const(_: CValue), Cross(_), right, IdentitySort | ValueSort(_)) => {
val predRes = loop(pred)
alg.Cond(node)(predRes, alg.Const(left)(predRes), loop(right))
}
case node @ dag.Cond(pred, left, IdentitySort | ValueSort(_), right @ dag.Const(_: CValue), Cross(_)) => {
val predRes = loop(pred)
alg.Cond(node)(predRes, loop(left), alg.Const(right)(predRes))
}
case node @ dag.Cond(pred, left @ dag.Const(_: CValue), Cross(_), right @ dag.Const(_: CValue), Cross(_)) => {
val predRes = loop(pred)
alg.Cond(node)(predRes, alg.Const(left)(predRes), alg.Const(right)(predRes))
}
case node @ dag.Cond(pred, left, IdentitySort | ValueSort(_), right, IdentitySort | ValueSort(_)) =>
alg.Cond(node)(loop(pred), loop(left), loop(right))
case node @ Join(Eq, Cross(_), left, Const(value)) =>
alg.EqualLiteral(node)(loop(left), value, false)
case node @ Join(Eq, Cross(_), Const(value), right) =>
alg.EqualLiteral(node)(loop(right), value, false)
case node @ Join(NotEq, Cross(_), left, Const(value)) =>
alg.EqualLiteral(node)(loop(left), value, true)
case node @ Join(NotEq, Cross(_), Const(value), right) =>
alg.EqualLiteral(node)(loop(right), value, true)
case node @ Join(instructions.WrapObject, Cross(_), Const(CString(field)), right) =>
alg.WrapObject(node)(loop(right), field)
case node @ Join(DerefObject, Cross(_), left, Const(CString(field))) =>
alg.DerefObjectStatic(node)(loop(left), field)
case node @ Join(DerefMetadata, Cross(_), left, Const(CString(field))) =>
alg.DerefMetadataStatic(node)(loop(left), field)
case node @ Join(DerefArray, Cross(_), left, ConstInt(index)) =>
alg.DerefArrayStatic(node)(loop(left), index)
case node @ Join(instructions.ArraySwap, Cross(_), left, ConstInt(index)) =>
alg.ArraySwap(node)(loop(left), index)
case node @ Join(JoinObject, Cross(_), left, Const(RObject.empty)) =>
alg.InnerObjectConcat(node)(loop(left))
case node @ Join(JoinObject, Cross(_), Const(RObject.empty), right) =>
alg.InnerObjectConcat(node)(loop(right))
case node @ Join(JoinArray, Cross(_), left, Const(RArray.empty)) =>
alg.InnerArrayConcat(node)(loop(left))
case node @ Join(JoinArray, Cross(_), Const(RArray.empty), right) =>
alg.InnerArrayConcat(node)(loop(right))
case node @ Join(Op2F2ForBinOp(op), Cross(_), left, Const(value)) =>
alg.Map1Left(node)(loop(left), op, left, value)
case node @ Join(Op2F2ForBinOp(op), Cross(_), Const(value), right) =>
alg.Map1Right(node)(loop(right), op, right, value)
case node @ Join(op, joinSort @ (IdentitySort | ValueSort(_)), left, right) =>
alg.binOp(node)(loop(left), loop(right), op)
case node @ dag.Filter(joinSort @ (IdentitySort | ValueSort(_)), left, right) =>
alg.Filter(node)(loop(left), loop(right))
case node @ Operate(instructions.WrapArray, parent) =>
alg.WrapArray(node)(loop(parent))
case node @ Operate(op, parent) =>
alg.Op1(node)(loop(parent), op)
case node => alg.unmatched(node)
}
loop(to)
}
}
}
| drostron/quasar | mimir/src/main/scala/quasar/mimir/TransSpecable.scala | Scala | apache-2.0 | 16,081 |
package io.buoyant.telemetry.influxdb
import com.fasterxml.jackson.annotation.JsonIgnore
import com.twitter.finagle.Stack
import io.buoyant.telemetry.{MetricsTree, Telemeter, TelemeterConfig, TelemeterInitializer}
class InfluxDbTelemeterInitializer extends TelemeterInitializer {
type Config = InfluxDbConfig
val configClass = classOf[InfluxDbConfig]
override val configId = "io.l5d.influxdb"
}
object InfluxDbTelemeterInitializer extends InfluxDbTelemeterInitializer
class InfluxDbConfig extends TelemeterConfig {
@JsonIgnore def mk(params: Stack.Params): Telemeter = new InfluxDbTelemeter(params[MetricsTree])
}
| denverwilliams/linkerd | telemetry/influxdb/src/main/scala/io/buoyant/telemetry/influxdb/InfluxDbTelemeterInitializer.scala | Scala | apache-2.0 | 626 |
package suql.parser
import suql.testing.UnitSpec
import scala.collection.immutable.HashMap
class ParserErrorTestCasesSpec extends UnitSpec {
val parser = new SuqlPackratParser
val errorTestCases = HashMap(
"\\"hello world" -> "unterminated string literal"
)
for ((input, errorDescription) <- errorTestCases) {
it should s"fail to parse $errorDescription" in {
parser.___parse(input).isLeft shouldBe true
}
}
}
| rmuch/suql | src/test/scala/suql/parser/ParserErrorTestCasesSpec.scala | Scala | mit | 442 |
package com.catinthedark.lib
/**
* Created by over on 13.12.14.
*/
abstract class Stub(name: String) extends YieldUnit[Unit] {
override def toString = name
override def onActivate(data: Any) = {}
override def onExit() = {}
override def run(delay: Float): (Option[Unit], Any) = (None, null)
}
| cat-in-the-dark/old48_35_game | src/main/scala/com/catinthedark/lib/Stub.scala | Scala | mit | 307 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.xml.parsers.dom
import java.io.ByteArrayInputStream
import javax.xml.parsers.DocumentBuilderFactory
import javax.xml.stream.XMLInputFactory
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SQLContext
import org.apache.spark.sql.types._
import org.apache.spark.sql.xml.util.InferSchema
import org.w3c.dom.Node
import scala.collection.Seq
import scala.collection.mutable.ArrayBuffer
/**
* Wraps parser to iteratoration process.
*/
private[sql] object DomXmlPartialSchemaParser {
def apply(xml: RDD[String],
samplingRatio: Double,
parseMode: String,
excludeAttributeFlag: Boolean,
treatEmptyValuesAsNulls: Boolean): RDD[DataType] = {
require(samplingRatio > 0, s"samplingRatio ($samplingRatio) should be greater than 0")
val schemaData = if (samplingRatio > 0.99) {
xml
} else {
xml.sample(withReplacement = false, samplingRatio, 1)
}
schemaData.mapPartitions { iter =>
iter.flatMap { xml =>
val builder = DocumentBuilderFactory.newInstance().newDocumentBuilder()
// It does not have to skip for white space, since [[XmlInputFormat]]
// always finds the root tag without a heading space.
val childNode = builder.parse(new ByteArrayInputStream(xml.getBytes))
.getChildNodes.item(0)
val conf = DomConfiguration(excludeAttributeFlag, treatEmptyValuesAsNulls)
val parser = new DomXmlParser(childNode, conf)
if (parser.isEmpty) {
None
} else {
Some(inferObject(parser))
}
}
}
}
/**
* Infer the type of a xml document from the parser's token stream
*/
private def inferField(parser: DomXmlParser,
node: Node,
conf: DomConfiguration): DataType = {
inferField(parser.inferDataType(node), parser, node, conf)
}
private def inferArrayEelementField(parser: DomXmlParser,
node: Node,
conf: DomConfiguration): DataType = {
inferField(parser.inferArrayElementType(node), parser, node, conf)
}
private def inferField(dataType: Int,
parser: DomXmlParser,
node: Node,
conf: DomConfiguration): DataType = {
import org.apache.spark.sql.xml.parsers.dom.DomXmlParser._
dataType match {
case LONG =>
LongType
case DOUBLE =>
DoubleType
case BOOLEAN =>
BooleanType
case STRING =>
StringType
case NULL =>
NullType
case OBJECT =>
inferObject(new DomXmlParser(node, conf))
case ARRAY =>
partiallyInferArray(parser, node, conf)
case _ =>
// TODO: Now it skips unsupported types (we might have to treat null values).
StringType
}
}
def inferObject(parser: DomXmlParser): DataType = {
val builder = Seq.newBuilder[StructField]
val partialInferredArrayTypes = collection.mutable.Map[String, ArrayBuffer[DataType]]()
parser.foreach{ node =>
val field = node.getNodeName
val inferredType = inferField(parser, node, parser.getConf)
inferredType match {
// For XML, it can contains the same keys.
// So we need to manually merge them to an array.
case ArrayType(st, _) =>
val dataTypes = partialInferredArrayTypes.getOrElse(field, ArrayBuffer.empty[DataType])
dataTypes += st
partialInferredArrayTypes += (field -> dataTypes)
case _ =>
builder += StructField(field, inferField(parser, node, parser.getConf), nullable = true)
}
}
// We need to manually merges all the [[ArrayType]]s.
partialInferredArrayTypes.foreach{
case (field, dataTypes) =>
val elementType = dataTypes.reduceLeft(InferSchema.compatibleType)
builder += StructField(field, ArrayType(elementType), nullable = true)
}
StructType(builder.result().sortBy(_.name))
}
def partiallyInferArray(parser: DomXmlParser, node: Node, conf: DomConfiguration): DataType = {
ArrayType(inferArrayEelementField(parser, node, conf))
}
}
| HyukjinKwon/spark-xml | src/main/scala/org/apache/spark/sql/xml/parsers/dom/DomXmlPartialSchemaParser.scala | Scala | apache-2.0 | 5,035 |
package skinny.controller.feature
import javax.servlet.ServletOutputStream
import skinny.engine.SkinnyEngineBase
import skinny.logging.LoggerProvider
/**
* Chunked Response (Transfer-Encoding: chunked).
*/
trait ChunkedResponseFeature { self: SkinnyEngineBase with LoggerProvider =>
def withOutputStream(f: ServletOutputStream => Unit): Unit = {
val stream = response.getOutputStream
try {
f.apply(stream)
} catch {
case e: Exception if Option(e.getCause).isDefined =>
val message = Option(e.getMessage).getOrElse(Option(e.getCause).map(_.getMessage).getOrElse(""))
if (logger.isDebugEnabled) {
logger.debug(s"Chunked response error (message: ${message})", e)
} else {
logger.info(s"Chunked response error (message: ${message})")
}
try stream.close() catch {
case scala.util.control.NonFatal(e) =>
logger.debug(s"Failed to close output stream because ${e.getMessage})", e)
}
}
}
def writeChunk(chunk: Array[Byte])(implicit stream: ServletOutputStream = response.getOutputStream): Unit = {
Option(chunk).foreach { c =>
stream.write(c)
stream.flush()
}
}
}
| holycattle/skinny-framework | framework/src/main/scala/skinny/controller/feature/ChunkedResponseFeature.scala | Scala | mit | 1,209 |
package com.datawizards.splot.examples.charts.bar
import com.datawizards.splot.api.implicits._
object BarChartLegendCustomization extends App {
val data = Seq(1.0, 4.0, 9.0)
data
.buildPlot()
.bar()
.legendVisible(false)
.display()
data
.buildPlot()
.bar()
.seriesName("custom name")
.legendVisible(true)
.display()
}
| piotr-kalanski/SPlot | src/main/scala/com/datawizards/splot/examples/charts/bar/BarChartLegendCustomization.scala | Scala | apache-2.0 | 364 |
package com.andre_cruz
object OptionUtils {
def optionIf[T](predicate: => Boolean)(result: => T): Option[T] = {
if (predicate)
Option(result)
else
None
}
}
| codecruzer/scala-utils | src/main/scala/com/andre_cruz/OptionUtils.scala | Scala | apache-2.0 | 190 |
/**
* Test cases for ChineseCalendar.scala.
*
* @author Yujian Zhang <yujian{dot}zhang[at]gmail(dot)com>
*
* License:
* GNU General Public License v2
* http://www.gnu.org/licenses/gpl-2.0.html
* Copyright (C) 2014 Yujian Zhang
*/
import net.whily.chinesecalendar.ChineseCalendar._
import net.whily.chinesecalendar.Chinese._
import org.scalatest._
class ChineseCalendarTest extends FunSpec with Matchers {
describe("Test Chinese Calendar calculation") {
it("Check months()") {
months("己未 己丑 戊午 戊子 丁巳 丁亥 丙辰 丙戌 丙辰 乙酉 乙卯 甲申") should === (Array(
Month("一月", "己未"), Month("二月", "己丑"), Month("三月", "戊午"),
Month("四月", "戊子"), Month("五月", "丁巳"), Month("六月", "丁亥"),
Month("七月", "丙辰"), Month("八月", "丙戌"), Month("九月", "丙辰"),
Month("十月", "乙酉"), Month("十一月", "乙卯"), Month("十二月", "甲申")))
months("甲寅 癸未 癸丑 壬午 壬子 辛巳 辛亥 庚辰 閏 庚戌 己卯 己酉 戊寅 戊申") should === (Array(
Month("一月", "甲寅"), Month("二月", "癸未"), Month("三月", "癸丑"),
Month("四月", "壬午"), Month("五月", "壬子"), Month("六月", "辛巳"),
Month("七月", "辛亥"), Month("八月", "庚辰"), Month("閏八月", "庚戌"),
Month("九月", "己卯"), Month("十月", "己酉"), Month("十一月", "戊寅"),
Month("十二月", "戊申")))
months("己卯 閏 戊申 戊寅 戊申 丁丑 丁未 丙子 丙午 乙亥 乙巳 甲戌 甲辰 癸酉") should === (Array(
Month("一月", "己卯"), Month("閏一月", "戊申"), Month("二月", "戊寅"),
Month("三月", "戊申"), Month("四月", "丁丑"), Month("五月", "丁未"),
Month("六月", "丙子"), Month("七月", "丙午"), Month("八月", "乙亥"),
Month("九月", "乙巳"), Month("十月", "甲戌"), Month("十一月", "甲辰"),
Month("十二月", "癸酉")))
months("己亥 己巳 進 戊戌 戊辰 丁酉 丙寅 丙申 乙丑 乙未 甲子 甲午") should === (Array(
Month("一月", "己亥"), Month("二月", "己巳"), Month("四月", "戊戌"),
Month("五月", "戊辰"), Month("六月", "丁酉"), Month("七月", "丙寅"),
Month("八月", "丙申"), Month("九月", "乙丑"), Month("十月", "乙未"),
Month("十一月", "甲子"), Month("十二月", "甲午")))
months("丁亥 丁巳 丙戌 丙辰 乙酉 乙卯 甲申 甲寅 癸未 癸丑 壬午 壬子 後 壬午") should === (Array(
Month("一月", "丁亥"), Month("二月", "丁巳"), Month("三月", "丙戌"),
Month("四月", "丙辰"), Month("五月", "乙酉"), Month("六月", "乙卯"),
Month("七月", "甲申"), Month("八月", "甲寅"), Month("九月", "癸未"),
Month("十月", "癸丑"), Month("十一月", "壬午"), Month("十二月", "壬子"),
Month("後十二月", "壬午")))
months("乙未 甲子 甲午 癸亥 壬辰 壬戌 辛卯 辛酉 庚寅 庚申 己丑 己未 後 戊子", 10) should === (Array(
Month("十月", "乙未"), Month("十一月", "甲子"), Month("十二月", "甲午"),
Month("一月", "癸亥"), Month("二月", "壬辰"), Month("三月", "壬戌"),
Month("四月", "辛卯"), Month("五月", "辛酉"), Month("六月", "庚寅"),
Month("七月", "庚申"), Month("八月", "己丑"), Month("九月", "己未"),
Month("後九月", "戊子")))
months("庚辰 己酉 己卯 戊申 戊寅 丁未 丙子 丙午 乙亥 甲辰 甲戌 甲辰", 1, "武周") should === (Array(
Month("正月", "庚辰"), Month("臘月", "己酉"), Month("一月", "己卯"),
Month("二月", "戊申"), Month("三月", "戊寅"), Month("四月", "丁未"),
Month("五月", "丙子"), Month("六月", "丙午"), Month("七月", "乙亥"),
Month("八月", "甲辰"), Month("九月", "甲戌"), Month("十月", "甲辰")))
months("壬午 辛亥 辛巳 辛亥 庚辰 庚戌 己卯 己酉 戊寅 丁未 丁丑 丙午 丙子 乙巳", 5, "唐肅宗") should === (Array(
Month("建子月", "壬午"), Month("建丑月", "辛亥"), Month("建寅月", "辛巳"),
Month("建卯月", "辛亥"), Month("建辰月", "庚辰"), Month("建巳月", "庚戌"),
Month("五月", "己卯"), Month("六月", "己酉"), Month("七月", "戊寅"),
Month("八月", "丁未"), Month("九月", "丁丑"), Month("十月", "丙午"),
Month("十一月", "丙子"), Month("十二月", "乙巳")))
}
it("Check parseDate().") {
parseDate("漢武帝元朔六年") should be (parseDate("漢武帝元朔六年十月初一"))
}
it("Check toDate().") {
// Check dates according to 三千五百年历日天象 (张培瑜 著)
toDate("周桓王元年") should be (date(-719, 12, 12))
toDate("周赧王元年") should be (date(-314, 12, 6))
toDate("周赧王四十五年") should be (date(-270, 11, 30))
toDate("周赧王五十九年十一月") should be (date(-255, 10, 16))
// Check dates according to 三千五百年历日天象 (张培瑜 著)
toDate("魯隱公元年") should be (date(-721, 1,16))
// 以下為《中國古代曆法》表3-1所列的《春秋》記載的含有明確日干支並經過驗證的日食。
toDate("魯隱公三年二月己巳") should be (date(-719, 2, 22))
toDate("魯桓公三年七月壬辰") should be (date(-708, 7, 17))
toDate("魯莊公二十五年六月辛未") should be (date(-668, 5, 27))
toDate("魯莊公二十六年十二月癸亥") should be (date(-667, 11, 10))
toDate("魯莊公三十年九月庚午") should be (date(-663, 8, 28))
toDate("魯僖公五年九月戊申") should be (date(-654, 8, 19))
toDate("魯僖公十二年三月庚午") should be (date(-647, 4, 6))
toDate("魯文公元年二月癸亥") should be (date(-625, 2, 3))
toDate("魯文公十五年六月辛丑") should be (date(-611, 4, 28))
toDate("魯宣公十年四月丙辰") should be (date(-598, 3, 6))
toDate("魯成公十六年六月丙寅") should be (date(-574, 5, 9))
toDate("魯成公十七年十二月丁巳") should be (date(-573, 10, 22))
toDate("魯襄公十四年二月乙未") should be (date(-558, 1, 14))
toDate("魯襄公二十一年九月庚戌") should be (date(-551, 8, 20))
toDate("魯襄公二十三年二月癸酉") should be (date(-549, 1, 5))
toDate("魯襄公二十四年七月甲子") should be (date(-548, 6, 19))
toDate("魯襄公二十七年十二月乙亥") should be (date(-545, 10, 13))
toDate("魯昭公七年四月甲辰") should be (date(-534, 3, 18))
toDate("魯昭公十五年六月丁巳") should be (date(-526, 4, 18))
toDate("魯昭公二十一年七月壬午") should be (date(-520, 6, 10))
toDate("魯昭公二十二年十二月癸酉") should be (date(-519, 11, 23))
toDate("魯昭公二十四年五月乙未") should be (date(-517, 4, 9))
toDate("魯昭公三十一年十二月辛亥") should be (date(-510, 11, 14))
toDate("魯定公五年三月辛亥") should be (date(-504, 2, 16))
toDate("魯定公十二年十一月丙寅") should be (date(-497, 9, 22))
toDate("魯定公十五年八月庚辰") should be (date(-494, 7, 22))
toDate("魯哀公十四年五月庚申") should be (date(-480, 4, 19))
// Check dates according to 三千五百年历日天象 (张培瑜 著)
toDate("秦昭襄王元年十一月") should be (date(-306, 12, 8))
// TODO: 秦孝文王即位
// TODO: toDate("秦孝文王元年十月己亥") should be (date(-250, 11, 12))
toDate("秦孝文王元年十一月") should be (date(-250, 12, 19))
// Check historical dates from zh.wikipedia.org.
// 秦王政即位
toDate("秦莊襄王三年五月丙午") should be (date(-246, 7, 6))
// 秦始皇崩 (http://blog.sina.com.cn/s/blog_50823c400100iqqw.html)
toDate("秦始皇三十七年八月丙寅") should be (date(-209, 9, 10))
// 漢高祖崩
toDate("漢高祖十二年四月甲辰") should be (date(-194, 6, 1))
// 漢惠帝崩
toDate("漢惠帝七年八月戊寅") should be (date(-187, 9, 26))
// 漢高后崩
toDate("漢高后八年七月辛巳") should be (date(-179, 8, 18))
// 漢文帝崩
toDate("漢文帝後七年六月己亥") should be (date(-156, 7, 6))
// 漢景帝崩
toDate("漢景帝後三年正月甲子") should be (date(-140, 3, 9))
// 漢昭帝即位
toDate("漢武帝後元二年二月戊辰") should be (date(-86, 3, 30))
// 漢昭帝崩
toDate("漢昭帝元平元年四月癸未") should be (date(-73, 6, 5))
// 昌邑王即位
toDate("漢昭帝元平元年六月丙寅") should be (date(-73, 7, 18))
// 昌邑王被废黜
toDate("漢昭帝元平元年六月癸巳") should be (date(-73, 8, 14))
// 漢宣帝即位
toDate("漢昭帝元平元年七月庚申") should be (date(-73, 9, 10))
// 漢宣帝崩
toDate("漢宣帝黃龍元年十二月甲戌") should be (date(-47, 1, 10))
// 漢平帝即位
toDate("漢哀帝元壽二年九月辛酉") should be (date(0, 10, 17))
// Test for calendar system change.
toDate("漢武帝太初元年十月朔") should be (date(-104, 11, 26))
toDate("漢武帝太初元年十月初二") should be (date(-104, 11, 27))
// Check based on book tables.
toDate("漢平帝元始元年") should be (date(1, 2, 12))
toDate("漢平帝元始二年") should be (date(2, 2, 2))
toDate("漢平帝元始三年") should be (date(3, 2, 21))
toDate("漢平帝元始元年一月朔") should be (date(1, 2, 12))
toDate("漢平帝元始元年正月朔") should be (date(1, 2, 12))
toDate("漢平帝元始元年正月初二") should be (date(1, 2, 13))
toDate("漢平帝元始元年一月十一") should be (date(1, 2, 22))
toDate("漢平帝元始元年一月晦") should be (date(1, 3, 13))
toDate("漢平帝元始元年二月朔") should be (date(1, 3, 14))
toDate("漢平帝元始元年二月十一") should be (date(1, 3, 24))
toDate("漢平帝元始元年二月己丑") should be (date(1, 3, 14))
toDate("漢平帝元始元年二月己亥") should be (date(1, 3, 24))
toDate("漢平帝元始元年二月己酉") should be (date(1, 4, 3))
toDate("漢平帝元始元年春二月己酉") should be (date(1, 4, 3))
toDate("漢平帝元始元年三月廿一") should be (date(1, 5, 2))
toDate("漢平帝元始四年二月十一") should be (date(4, 3, 20))
toDate("魏明帝景初元年四月初一") should be (date(237, 4, 13))
toDate("魏明帝景初三年後十二月初一") should be (date(240, 1, 12))
toDate("晉武帝咸寧元年") should be (date(275, 2, 13))
// Additional tests based on bug fixes.
toDate("魏陳留王咸熙元年五月") should be (date(264, 6, 12))
// Test for 唐武后
toDate("唐武后載初元年正月") should be (date(689, 12, 18))
toDate("唐武后天授三年臘月") should be (date(691, 12, 25))
toDate("唐武后證聖元年閏二月") should be (date(695, 3, 21))
toDate("唐武后久視元年十一月") should be (date(700, 12, 15))
toDate("唐武后大足元年正月") should be (date(701, 2, 13))
// Test for 載
toDate("唐玄宗天寶二年十二月三十") should be (date(744, 1, 19))
toDate("唐玄宗天寶三載正月") should be (date(744, 1, 20))
toDate("唐肅宗至德三載正月晦") should be (date(758, 3, 13))
toDate("唐肅宗乾元元年二月") should be (date(758, 3, 14))
// Test for 唐肅宗
toDate("唐肅宗元年建寅月初一") should be (date(762, 1, 30))
toDate("公元一九五一年二月初一") should be (date(1951, 3, 8))
}
it("Check fromDate().") {
fromDate(date(1, 2, 12)) should === (List("漢平帝元始元年正月初一"))
fromDate(date(1, 2, 22)) should === (List("漢平帝元始元年正月十一"))
fromDate(date(237, 4, 13)) should === (List("魏明帝景初元年四月初一", "蜀後主建興十五年三月初一", "吳大帝嘉禾六年三月初一"))
// Check DynastyOrder.
fromDate(date(543, 5, 6)) should === (List("梁武帝大同九年三月十七", "西魏文帝大統九年三月十七", "東魏孝靜帝武定元年三月十七"))
// Test for 唐武后
fromDate(date(689, 12, 18)) should === (List("唐武后載初元年正月初一"))
// Test for 載
fromDate(date(744, 1, 19)) should === (List("唐玄宗天寶二年十二月三十"))
fromDate(date(744, 1, 20)) should === (List("唐玄宗天寶三載正月初一"))
fromDate(date(758, 3, 13)) should === (List("唐肅宗至德三載正月廿九"))
fromDate(date(758, 3, 14)) should === (List("唐肅宗乾元元年二月初一"))
}
it("Check monthLength().") {
monthLength("漢武帝太初元年十月") should be (29)
monthLength("漢武帝太初元年九月") should be (29)
monthLength("漢武帝太初二年九月") should be (30)
monthLength("漢平帝元始元年正月") should be (30)
monthLength("漢平帝元始元年二月") should be (29)
monthLength("漢平帝元始元年十二月") should be (30)
monthLength("魏高貴鄉公甘露元年十二月") should be (29)
monthLength("吳會稽王太平元年十二月") should be (30)
}
it("Check plusDays().") {
// Within same month.
parseDate("漢平帝元始元年二月十一").plusDays(10) should be (parseDate("漢平帝元始元年二月廿一"))
parseDate("漢平帝元始元年二月己丑").plusDays(11) should be (parseDate("漢平帝元始元年二月庚子"))
parseDate("漢平帝元始元年二月廿一").plusDays(-10) should be (parseDate("漢平帝元始元年二月十一"))
parseDate("漢平帝元始元年二月庚子").plusDays(-11) should be (parseDate("漢平帝元始元年二月己丑"))
parseDate("漢平帝元始元年二月晦").plusDays(-10) should be (parseDate("漢平帝元始元年二月十九"))
// Different months, positive input.
parseDate("漢平帝元始元年").plusDays(30) should be (parseDate("漢平帝元始元年二月初一"))
parseDate("漢平帝元始元年二月初一").plusDays(29) should be (parseDate("漢平帝元始元年三月初一"))
parseDate("漢平帝元始元年二月初一").plusDays(60) should be (parseDate("漢平帝元始元年四月初二"))
parseDate("漢平帝元始元年十二月初一").plusDays(31) should be (parseDate("漢平帝元始二年一月初二"))
parseDate("漢平帝元始元年二月己丑").plusDays(30) should be (parseDate("漢平帝元始元年三月初二"))
// Diferent months, negative input.
parseDate("漢平帝元始元年二月初一").plusDays(-30) should be (parseDate("漢平帝元始元年一月初一"))
parseDate("漢平帝元始元年三月初一").plusDays(-29) should be (parseDate("漢平帝元始元年二月初一"))
parseDate("漢平帝元始元年四月初二").plusDays(-60) should be (parseDate("漢平帝元始元年二月初一"))
parseDate("漢平帝元始二年一月初二").plusDays(-31) should be (parseDate("漢平帝元始元年十二月初一"))
parseDate("漢平帝元始元年三月初二").plusDays(-30) should be (parseDate("漢平帝元始元年二月初一"))
parseDate("漢平帝元始元年").plusDays(-1) should be (parseDate("漢哀帝元壽二年十二月廿九"))
}
it("Check containingSegments().") {
val eraSegment1 = EraSegment("漢哀帝元壽", parseDate("漢哀帝元壽元年正月初一"),
date(1, 2, 11), "漢哀帝建平", "漢平帝元始")
containingSegments(date(1, 2, 3)) should be (List(eraSegment1))
containingSegment(parseDate("漢哀帝元壽元年正月初一")) should be (Some(eraSegment1))
val eraSegment2 = EraSegment("魏明帝青龍", parseDate("魏明帝青龍元年二月初一"),
date(237, 4, 12), "魏明帝太和", "魏明帝景初")
val eraSegment3 = EraSegment("蜀後主建興", parseDate("蜀後主建興元年五月初一"),
date(238, 2, 1), "蜀昭烈帝章武", "蜀後主延熙")
val eraSegment4 = EraSegment("吳大帝嘉禾", parseDate("吳大帝嘉禾元年正月初一"),
date(238, 8, 27), "吳大帝黃龍", "吳大帝赤烏")
containingSegments(date(234, 5, 6)) should be (List(eraSegment2, eraSegment3, eraSegment4))
}
it("Check firstDayNextMonth().") {
parseDate("漢惠帝七年九月初八").firstDayNextMonth(false) should be (parseDate("漢高后元年十月初一"))
parseDate("漢武帝元朔五年後九月十二").firstDayNextMonth(false) should be (parseDate("漢武帝元朔六年十月初一"))
parseDate("漢武帝元朔六年九月十四").firstDayNextMonth(false) should be (parseDate("漢武帝元狩元年十月初一"))
parseDate("漢武帝元封六年後九月十七").firstDayNextMonth(false) should be (parseDate("漢武帝太初元年十月初一"))
parseDate("漢平帝元始元年二月己丑").firstDayNextMonth(false) should be (parseDate("漢平帝元始元年三月初一"))
parseDate("蜀昭烈帝章武三年四月初二").firstDayNextMonth(false) should be (parseDate("蜀後主建興元年五月初一"))
parseDate("蜀後主炎興元年十一月十一").firstDayNextMonth(false) should be (parseDate("魏陳留王景元四年十二月初一"))
parseDate("隋文帝開皇二十年十二月朔").firstDayNextMonth(false) should be (parseDate("隋文帝仁壽元年一月初一"))
// Test for 載.
parseDate("唐玄宗天寶二年十二月三十").firstDayNextMonth(false).year should be ("三載")
parseDate("唐肅宗至德三載正月初一").firstDayNextMonth(false).year should be ("一年")
}
it("Check lastDayPrevMonth().") {
parseDate("漢平帝元始元年三月初一").lastDayPrevMonth(false) should be (parseDate("漢平帝元始元年二月廿九"))
parseDate("蜀昭烈帝章武元年四月").lastDayPrevMonth(false) should be (parseDate("魏文帝黃初二年三月三十"))
parseDate("蜀後主建興元年五月初一").lastDayPrevMonth(false) should be (parseDate("蜀昭烈帝章武三年四月廿九"))
}
it("Check sameDayNextMonth().") {
parseDate("漢惠帝七年九月初八").sameDayNextMonth() should be (parseDate("漢高后元年十月初八"))
parseDate("漢惠帝七年九月三十").sameDayNextMonth() should be (parseDate("漢高后元年十月晦"))
checkSameDayNextMonth()
}
it("Check sameDayPrevMonth().") {
parseDate("漢惠帝七年九月初八").sameDayPrevMonth() should be (parseDate("漢惠帝七年八月初八"))
parseDate("漢惠帝七年九月三十").sameDayPrevMonth() should be (parseDate("漢惠帝七年八月晦"))
checkSameDayPrevMonth()
}
it("Check sameDayNextYear().") {
parseDate("漢惠帝七年九月初八").sameDayNextYear() should be (parseDate("漢高后元年九月初八"))
parseDate("漢惠帝七年九月三十").sameDayNextYear() should be (parseDate("漢高后元年九月晦"))
parseDate("漢武帝元封六年九月十四").sameDayNextYear() should be (parseDate("漢武帝太初元年九月十四"))
parseDate("漢武帝元封六年後九月十七").sameDayNextYear() should be (parseDate("漢武帝太初元年九月十七"))
parseDate("漢武帝太初元年九月十七").sameDayNextYear() should be (parseDate("漢武帝太初二年九月十七"))
parseDate("魏明帝青龍四年三月十七").sameDayNextYear() should be (parseDate("魏明帝景初元年四月十七"))
parseDate("魏明帝青龍五年一月初八").sameDayNextYear() should be (parseDate("魏明帝景初二年一月初八"))
parseDate("魏明帝青龍五年二月初七").sameDayNextYear() should be (parseDate("魏明帝景初二年二月初七"))
checkSameDayNextYear()
}
it("Check sameDayPrevYear().") {
parseDate("漢高后元年九月初八").sameDayPrevYear() should be (parseDate("漢惠帝七年九月初八"))
parseDate("漢高后元年九月晦").sameDayPrevYear() should be (parseDate("漢惠帝七年九月晦"))
parseDate("漢武帝太初元年九月十四").sameDayPrevYear() should be (parseDate("漢武帝元封六年九月十四"))
parseDate("漢武帝太初元年後十月十七").sameDayPrevYear() should be (parseDate("漢武帝太初元年十月十七"))
parseDate("漢武帝太初二年九月十七").sameDayPrevYear() should be (parseDate("漢武帝太初元年九月十七"))
parseDate("魏明帝景初元年四月十七").sameDayPrevYear() should be (parseDate("魏明帝青龍四年四月十七"))
parseDate("魏明帝景初二年一月初八").sameDayPrevYear() should be (parseDate("魏明帝青龍五年一月初八"))
parseDate("魏明帝景初二年二月初七").sameDayPrevYear() should be (parseDate("魏明帝青龍五年二月初七"))
parseDate("魏明帝景初二年三月初七").sameDayPrevYear() should be (parseDate("魏明帝青龍五年二月初七"))
checkSameDayPrevYear()
}
it("Check yearSexagenary().") {
// BCEYears
parseDate("秦昭襄王五十四年").yearSexagenary() should be ("戊申")
parseDate("漢武帝征和元年").yearSexagenary() should be ("己丑")
// CEYears
parseDate("漢平帝元始二年二月晦").yearSexagenary() should be ("壬戌")
parseDate("晉穆帝永和九年三月初三").yearSexagenary() should be ("癸丑")
// ShuYears
parseDate("蜀後主景耀三年").yearSexagenary() should be ("庚辰")
// WuYears
parseDate("吳會稽王太平三年").yearSexagenary() should be ("戊寅")
}
it("Check sexagenaries().") {
sexagenaries("甲子", 3).mkString(" ") should be ("甲子 乙丑 丙寅")
// Test wrap around.
sexagenaries("辛酉", 5).mkString(" ") should be ("辛酉 壬戌 癸亥 甲子 乙丑")
}
it("Check nextCharacter().") {
nextCharacter("公元前842年") should === (Array("1"))
nextCharacter("公元前842年1") should === (Array("2"))
nextCharacter("公元前842年12") should === (Array("月"))
nextCharacter("公元前842年12月1") should === (Array("1", "2", "3", "4", "5", "6", "7", "8", "9"))
nextCharacter("公元前5") should === (Array("1", "2", "3", "4", "5", "6", "7", "8", "9", "0", "年"))
nextCharacter("公元前5年") should === (Array("1", "2", "3", "4", "5", "6", "7", "8", "9"))
nextCharacter("公元前5年1") should === (Array("1", "2", "0", "月"))
nextCharacter("公元前5年2") should === (Array("月"))
nextCharacter("公元前5年10") should === (Array("月"))
nextCharacter("公元前5年2月") should === (Array("1", "2", "3", "4", "5", "6", "7", "8", "9"))
nextCharacter("公元前5年2月1") should === (Array("1", "2", "3", "4", "5", "6", "7", "8", "9", "0", "日"))
nextCharacter("公元前5年2月12") should === (Array("日"))
nextCharacter("公元前5年2月2") should === (Array("1", "2", "3", "4", "5", "6", "7", "8", "9", "0", "日"))
nextCharacter("353年") should === (Array("1", "2", "3", "4", "5", "6", "7", "8", "9"))
nextCharacter("353年1") should === (Array("1", "2", "0", "月"))
nextCharacter("353年2") should === (Array("月"))
nextCharacter("353年10") should === (Array("月"))
nextCharacter("353年2月") should === (Array("1", "2", "3", "4", "5", "6", "7", "8", "9"))
nextCharacter("353年2月1") should === (Array("1", "2", "3", "4", "5", "6", "7", "8", "9", "0", "日"))
nextCharacter("353年2月12") should === (Array("日"))
nextCharacter("353年2月2") should === (Array("1", "2", "3", "4", "5", "6", "7", "8", "0", "日"))
nextCharacter("353年2月3") should === (Array("日"))
nextCharacter("353年3月3") should === (Array("1", "0", "日"))
nextCharacter("353年4月3") should === (Array("0", "日"))
nextCharacter("1582年10月") should === (Array("1", "2", "3", "4"))
nextCharacter("1582年10月1") should === (Array("5", "6", "7", "8", "9", "日"))
nextCharacter("2101年") should === (Array("1"))
nextCharacter("2101年1") should === (Array("月"))
nextCharacter("2101年1月2") should === (Array("1", "2", "3", "4", "5", "6", "7", "8", "0", "日"))
nextCharacter("秦王政") should === (Array("元", "二", "三", "四", "五", "六", "七", "八", "九", "十"))
nextCharacter("秦王政元") should === (Array("年"))
nextCharacter("秦王政十") should === (Array("一", "二", "三", "四", "五", "六", "七", "八", "九", "年"))
nextCharacter("秦王政二") should === (Array("十", "年"))
nextCharacter("秦王政二十") should === (Array("一", "二", "三", "四", "五", "年"))
nextCharacter("秦始皇") should === (Array("二", "三"))
nextCharacter("秦始皇二十") should === (Array("六", "七", "八", "九"))
nextCharacter("秦始皇三十") should === (Array("一", "二", "三", "四", "五", "六", "七", "年"))
nextCharacter("秦始皇三十六年") should === (Array("正", "二", "三", "四", "五", "六", "七", "八", "九", "十"))
nextCharacter("秦始皇三十六年十") should === (Array("一", "二", "月"))
nextCharacter("秦始皇三十六年十二") should === (Array("月"))
nextCharacter("秦始皇三十六年十二月") should === (Array("甲", "乙", "丙", "丁", "戊", "己", "庚", "辛", "壬", "癸", "初", "十", "二", "廿", "三", "朔", "晦"))
nextCharacter("秦始皇三十六年十二月初") should === (Array("一", "二", "三", "四", "五", "六", "七", "八", "九", "十"))
nextCharacter("秦始皇三十六年十二月朔") should === (Array(""))
nextCharacter("漢獻帝中平六年") should === (Array("十", "閏"))
nextCharacter("漢獻帝中平六年十") should === (Array("二"))
nextCharacter("漢獻帝中平六年十二月") should === (Array("甲", "乙", "丙", "丁", "戊", "己", "庚", "辛", "壬", "癸", "初", "十", "二", "廿", "朔", "晦"))
// Check the order of results.
nextCharacter("唐").startsWith(Array("高", "太")) should be (true)
// Check the handling of 正月 and 臘月.
nextCharacter("唐武后載初元年") should === (Array("一", "二", "三", "四", "五", "六", "七", "八", "正", "臘"))
nextCharacter("唐武后長安二年") should === (Array("正", "二", "三", "四", "五", "六", "七", "八", "九", "十"))
// Check the handling of 載.
nextCharacter("唐肅宗至德三") should === (Array("載"))
nextCharacter("唐肅宗至德三載") should === (Array("正"))
nextCharacter("唐武后載") should === (Array("初"))
nextCharacter("唐武后延載") should === (Array("元"))
// Check the handling of 唐肅宗.
nextCharacter("唐肅宗") should === (Array("至", "乾", "上", "寶", "元"))
nextCharacter("唐肅宗寶") should === (Array("應"))
nextCharacter("唐肅宗寶應") should === (Array("元", "二"))
// Check for 民國
nextCharacter("民") should === (Array("國"))
nextCharacter("民國") should === (Array("元", "二", "三", "四", "五", "六", "七", "八", "九", "十"))
// Check for 公元
nextCharacter("公元一") should === (Array("九"))
nextCharacter("公元一九") should === (Array("四", "五", "六", "七", "八", "九"))
nextCharacter("公元一九四") should === (Array("九"))
//nextCharacter("公元二") should === (Array("〇", "一"))
//nextCharacter("公元二〇") should === (Array("〇", "一", "二", "三", "四", "五", "六", "七", "八", "九"))
//nextCharacter("公元二一") should === (Array("〇"))
for (era <- eraNames()) {
for (i <- 1 to era.length) {
val next = nextCharacter(era.substring(0, i))
next.length should be >= 1
next(0) should have length 1
}
}
}
it("Check simplified2Traditional2().") {
simplified2Traditional2("遼道宗咸") should be ("遼道宗咸")
simplified2Traditional2("光绪") should be ("光緒")
simplified2Traditional2("光緒") should be ("光緒")
}
it("Check data sanity.") {
sanityCheck should be (true)
}
it("Check Simplified/Traditional Chinese conversion of era names.") {
for (eraName <- eraNames()) {
simplified2Traditional(traditional2Simplified(eraName)) should be (eraName)
}
}
it("Check conversion for every day.") {
checkEveryDay() should be (true)
}
}
}
| whily/chinesecalendar | src/test/scala/net/whily/chinesecalendar/ChineseCalendarTest.scala | Scala | gpl-2.0 | 29,269 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.streaming.scheduler.rate
import org.apache.spark.SparkConf
import org.apache.spark.streaming.Duration
import org.apache.spark.streaming.StreamingConf._
/**
* A component that estimates the rate at which an `InputDStream` should ingest
* records, based on updates at every batch completion.
*
* Please see `org.apache.spark.streaming.scheduler.RateController` for more details.
*/
private[streaming] trait RateEstimator extends Serializable {
/**
* Computes the number of records the stream attached to this `RateEstimator`
* should ingest per second, given an update on the size and completion
* times of the latest batch.
*
* @param time The timestamp of the current batch interval that just finished
* @param elements The number of records that were processed in this batch
* @param processingDelay The time in ms that took for the job to complete
* @param schedulingDelay The time in ms that the job spent in the scheduling queue
*/
def compute(
time: Long,
elements: Long,
processingDelay: Long,
schedulingDelay: Long): Option[Double]
}
object RateEstimator {
/**
* Return a new `RateEstimator` based on the value of
* `spark.streaming.backpressure.rateEstimator`.
*
* The only known and acceptable estimator right now is `pid`.
*
* @return An instance of RateEstimator
* @throws IllegalArgumentException if the configured RateEstimator is not `pid`.
*/
def create(conf: SparkConf, batchInterval: Duration): RateEstimator =
conf.get(BACKPRESSURE_RATE_ESTIMATOR) match {
case "pid" =>
val proportional = conf.get(BACKPRESSURE_PID_PROPORTIONAL)
val integral = conf.get(BACKPRESSURE_PID_INTEGRAL)
val derived = conf.get(BACKPRESSURE_PID_DERIVED)
val minRate = conf.get(BACKPRESSURE_PID_MIN_RATE)
new PIDRateEstimator(batchInterval.milliseconds, proportional, integral, derived, minRate)
case estimator =>
throw new IllegalArgumentException(s"Unknown rate estimator: $estimator")
}
}
| shaneknapp/spark | streaming/src/main/scala/org/apache/spark/streaming/scheduler/rate/RateEstimator.scala | Scala | apache-2.0 | 2,873 |
package com.twitter.finagle.tracing
/**
* This is a tracing system similar to Dapper:
*
* “Dapper, a Large-Scale Distributed Systems Tracing Infrastructure”,
* Benjamin H. Sigelman, Luiz André Barroso, Mike Burrows, Pat
* Stephenson, Manoj Plakal, Donald Beaver, Saul Jaspan, Chandan
* Shanbhag, 2010.
*
* It is meant to be independent of whatever underlying RPC mechanism
* is being used, and it is up to the underlying codec to implement
* the transport.
*/
import com.twitter.finagle.Init
import com.twitter.util.{Future, Duration, Time, Local, Stopwatch, Try, Throw, Return}
import java.net.InetSocketAddress
import java.util.UUID
import scala.util.Random
import com.twitter.app.GlobalFlag
import com.twitter.finagle.context.Contexts
import com.twitter.io.Buf
import com.twitter.finagle.util.ByteArrays
object debugTrace extends GlobalFlag(false, "Print all traces to the console.")
/**
* `Trace` maintains the state of the tracing stack
* The current `TraceId` has a terminal flag, indicating whether it
* can be overridden with a different `TraceId`. Setting the current
* `TraceId` as terminal forces all future annotations to share that
* `TraceId`.
* When reporting, we report to all tracers in the list of `Tracer`s.
*/
object Trace {
private case class TraceCtx(terminal: Boolean, tracers: List[Tracer]) {
def withTracer(tracer: Tracer) = copy(tracers=tracer :: this.tracers)
def withTerminal(terminal: Boolean) =
if (terminal == this.terminal) this
else copy(terminal=terminal)
}
private object TraceCtx {
val empty = TraceCtx(false, Nil)
}
private val traceCtx = new Contexts.local.Key[TraceCtx]
private val someTrue = Some(true)
private[finagle] val idCtx = new Contexts.broadcast.Key[TraceId]("com.twitter.finagle.tracing.TraceContext") {
private val local = new ThreadLocal[Array[Byte]] {
override def initialValue() = new Array[Byte](56)
}
def marshal(id: TraceId) =
Buf.ByteArray.Owned(TraceId.serialize(id))
/**
* The wire format is (big-endian):
* ''spanId:16 parentId:16 traceId:16 flags:8''
*/
def tryUnmarshal(body: Buf): Try[TraceId] = {
if (body.length != 56)
return Throw(new IllegalArgumentException("Expected 56 bytes"))
val bytes = local.get()
body.write(bytes, 0)
val span64 = ByteArrays.get128be(bytes, 0)
val parent64 = ByteArrays.get128be(bytes, 16)
val trace64 = ByteArrays.get128be(bytes, 32)
val flags64 = ByteArrays.get128be(bytes, 48)
val flags = Flags(flags64.toLong)
val sampled = if (flags.isFlagSet(Flags.SamplingKnown)) {
Some(flags.isFlagSet(Flags.Sampled))
} else None
val traceId = TraceId(
if (trace64 == parent64) None else Some(SpanId(trace64)),
if (parent64 == span64) None else Some(SpanId(parent64)),
SpanId(span64),
sampled,
flags)
Return(traceId)
}
}
private def random: String = UUID.randomUUID().toString
private val defaultId = TraceId(None, None, SpanId(random), None, Flags())
@volatile private var tracingEnabled = true
private def ctx: TraceCtx = Contexts.local.get(traceCtx) match {
case Some(ctx) => ctx
case None => TraceCtx.empty
}
/**
* True if there is an identifier for the current trace.
*/
def hasId: Boolean = Contexts.broadcast.contains(idCtx)
/**
* Get the current trace identifier. If no identifiers have been
* pushed, a default one is provided.
*/
def id: TraceId =
if (hasId) Contexts.broadcast(idCtx) else defaultId
/**
* Get the current identifier, if it exists.
*/
def idOption: Option[TraceId] =
Contexts.broadcast.get(idCtx)
/**
* @return true if the current trace id is terminal
*/
def isTerminal: Boolean = ctx.terminal
/**
* @return the current list of tracers
*/
def tracers: List[Tracer] = ctx.tracers
/**
* Turn trace recording on.
*/
def enable() = tracingEnabled = true
/**
* Turn trace recording off.
*/
def disable() = tracingEnabled = false
/**
* Create a derived id from the current TraceId.
*/
def nextId: TraceId = {
val currentId = idOption
TraceId(currentId map { _.traceId },
currentId map { _.spanId },
SpanId(random),
currentId map { _.sampled } getOrElse None,
currentId map { _.flags} getOrElse Flags())
}
/**
* Run computation `f` with the given traceId.
*
* @param traceId the TraceId to set as the current trace id
* @param terminal true if traceId is a terminal id. Future calls to set() after a terminal
* id is set will not set the traceId
*/
def letId[R](traceId: TraceId, terminal: Boolean = false)(f: => R): R = {
if (isTerminal) f
else if (terminal) {
Contexts.local.let(traceCtx, ctx.withTerminal(terminal)) {
Contexts.broadcast.let(idCtx, traceId)(f)
}
} else Contexts.broadcast.let(idCtx, traceId)(f)
}
/**
* A version of [com.twitter.finagle.tracing.Trace.letId] providing an
* optional ID. If the argument is None, the computation `f` is run without
* altering the trace environment.
*/
def letIdOption[R](traceIdOpt: Option[TraceId])(f: => R): R =
traceIdOpt match {
case Some(traceId) => letId(traceId)(f)
case None => f
}
/**
* Run computation `f` with `tracer` added onto the tracer stack.
*/
def letTracer[R](tracer: Tracer)(f: => R): R =
Contexts.local.let(traceCtx, ctx.withTracer(tracer))(f)
/**
* Run computation `f` with the given tracer, and a derivative TraceId.
* The implementation of this function is more efficient than calling
* letTracer, nextId and letId sequentially as it minimizes the number
* of request context changes.
*
* @param tracer the tracer to be pushed
* @param terminal true if the next traceId is a terminal id. Future
* attempts to set nextId will be ignored.
*/
def letTracerAndNextId[R](tracer: Tracer, terminal: Boolean = false)(f: => R): R =
letTracerAndId(tracer, nextId, terminal)(f)
/**
* Run computation `f` with the given tracer and trace id.
*
* @param terminal true if the next traceId is a terminal id. Future
* attempts to set nextId will be ignored.
*/
def letTracerAndId[R](tracer: Tracer, id: TraceId, terminal: Boolean = false)(f: => R): R = {
if (ctx.terminal) {
letTracer(tracer)(f)
} else {
val newCtx = ctx.withTracer(tracer).withTerminal(terminal)
val newId = id.sampled match {
case None => id.copy(_sampled = tracer.sampleTrace(id))
case Some(_) => id
}
Contexts.local.let(traceCtx, newCtx) {
Contexts.broadcast.let(idCtx, newId)(f)
}
}
}
/**
* Run computation `f` with all tracing state (tracers, trace id)
* cleared.
*/
def letClear[R](f: => R): R =
Contexts.local.letClear(traceCtx) {
Contexts.broadcast.letClear(idCtx) {
f
}
}
/**
* Convenience method for event loops in services. Put your
* service handling code inside this to get proper tracing with all
* the correct fields filled in.
*/
def traceService[T](service: String, rpc: String, hostOpt: Option[InetSocketAddress]=None)(f: => T): T = {
Trace.letId(Trace.nextId) {
Trace.recordBinary("finagle.version", Init.finagleVersion)
Trace.recordServiceName(service)
Trace.recordRpc(rpc)
hostOpt map { Trace.recordServerAddr(_) }
Trace.record(Annotation.ServerRecv())
try f finally {
Trace.record(Annotation.ServerSend())
}
}
}
/**
* Returns true if tracing is enabled with a good tracer pushed and the current
* trace is sampled
*/
def isActivelyTracing: Boolean =
tracingEnabled && (id match {
case TraceId(_, _, _, Some(false), flags) if !flags.isDebug => false
case TraceId(_, _, _, _, Flags(Flags.Debug)) => true
case _ =>
tracers.nonEmpty && (tracers.size > 1 || tracers.head != NullTracer)
})
/**
* Record a raw record without checking if it's sampled/enabled/etc.
*/
private[this] def uncheckedRecord(rec: Record) {
tracers.distinct.foreach { t: Tracer => t.record(rec) }
}
/**
* Record a raw ''Record''. This will record to a _unique_ set of
* tracers in the stack.
*/
def record(rec: => Record) {
if (debugTrace())
System.err.println(rec)
if (isActivelyTracing)
uncheckedRecord(rec)
}
/**
* Time an operation and add an annotation with that duration on it
* @param message The message describing the operation
* @param f operation to perform
* @tparam T return type
* @return return value of the operation
*/
def time[T](message: String)(f: => T): T = {
val elapsed = Stopwatch.start()
val rv = f
record(message, elapsed())
rv
}
/**
* Runs the function f and logs that duration until the future is satisfied with the given name.
*/
def timeFuture[T](message: String)(f: Future[T]): Future[T] = {
val start = Time.now
f.ensure {
record(message, start.untilNow)
}
f
}
/*
* Convenience methods that construct records of different kinds.
*/
def record(ann: Annotation) {
if (debugTrace())
System.err.println(Record(id, Time.now, ann, None))
if (isActivelyTracing)
uncheckedRecord(Record(id, Time.now, ann, None))
}
def record(ann: Annotation, duration: Duration) {
if (debugTrace())
System.err.println(Record(id, Time.now, ann, Some(duration)))
if (isActivelyTracing)
uncheckedRecord(Record(id, Time.now, ann, Some(duration)))
}
def record(message: String) {
record(Annotation.Message(message))
}
def record(message: String, duration: Duration) {
record(Annotation.Message(message), duration)
}
@deprecated("Use recordRpc and recordServiceName", "6.13.x")
def recordRpcname(service: String, rpc: String) {
record(Annotation.Rpcname(service, rpc))
}
def recordServiceName(serviceName: String) {
record(Annotation.ServiceName(serviceName))
}
def recordRpc(name: String) {
record(Annotation.Rpc(name))
}
def recordClientAddr(ia: InetSocketAddress) {
record(Annotation.ClientAddr(ia))
}
def recordServerAddr(ia: InetSocketAddress) {
record(Annotation.ServerAddr(ia))
}
def recordLocalAddr(ia: InetSocketAddress) {
record(Annotation.LocalAddr(ia))
}
def recordBinary(key: String, value: Any) {
record(Annotation.BinaryAnnotation(key, value))
}
def recordBinaries(annotations: Map[String, Any]) {
if (isActivelyTracing) {
for ((key, value) <- annotations) {
recordBinary(key, value)
}
}
}
}
| cogitate/twitter-finagle-uuid | finagle-core/src/main/scala/com/twitter/finagle/tracing/Trace.scala | Scala | apache-2.0 | 10,881 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.accounts.frsse2008
import play.api.libs.json.Format
import uk.gov.hmrc.ct.accounts.AC14
import uk.gov.hmrc.ct.accounts.frs105.boxes.AC415
import uk.gov.hmrc.ct.accounts.frsse2008.micro._
import uk.gov.hmrc.ct.box.formats.OptionalIntegerFormat
package object formats {
implicit val ac14Format: Format[AC14] = new OptionalIntegerFormat[AC14](AC14.apply)
implicit val ac15Format: Format[AC15] = new OptionalIntegerFormat[AC15](AC15.apply)
implicit val ac16Format: Format[AC16] = new OptionalIntegerFormat[AC16](AC16.apply)
implicit val ac17Format: Format[AC17] = new OptionalIntegerFormat[AC17](AC17.apply)
implicit val ac18Format: Format[AC18] = new OptionalIntegerFormat[AC18](AC18.apply)
implicit val ac19Format: Format[AC19] = new OptionalIntegerFormat[AC19](AC19.apply)
implicit val ac20Format: Format[AC20] = new OptionalIntegerFormat[AC20](AC20.apply)
implicit val ac21Format: Format[AC21] = new OptionalIntegerFormat[AC21](AC21.apply)
implicit val ac22Format: Format[AC22] = new OptionalIntegerFormat[AC22](AC22.apply)
implicit val ac23Format: Format[AC23] = new OptionalIntegerFormat[AC23](AC23.apply)
implicit val ac26Format: Format[AC26] = new OptionalIntegerFormat[AC26](AC26.apply)
implicit val ac27Format: Format[AC27] = new OptionalIntegerFormat[AC27](AC27.apply)
implicit val ac28Format: Format[AC28] = new OptionalIntegerFormat[AC28](AC28.apply)
implicit val ac29Format: Format[AC29] = new OptionalIntegerFormat[AC29](AC29.apply)
implicit val ac30Format: Format[AC30] = new OptionalIntegerFormat[AC30](AC30.apply)
implicit val ac31Format: Format[AC31] = new OptionalIntegerFormat[AC31](AC31.apply)
implicit val ac32Format: Format[AC32] = new OptionalIntegerFormat[AC32](AC32.apply)
implicit val ac33Format: Format[AC33] = new OptionalIntegerFormat[AC33](AC33.apply)
implicit val ac34Format: Format[AC34] = new OptionalIntegerFormat[AC34](AC34.apply)
implicit val ac35Format: Format[AC35] = new OptionalIntegerFormat[AC35](AC35.apply)
implicit val ac36Format: Format[AC36] = new OptionalIntegerFormat[AC36](AC36.apply)
implicit val ac37Format: Format[AC37] = new OptionalIntegerFormat[AC37](AC37.apply)
implicit val ac38Format: Format[AC38] = new OptionalIntegerFormat[AC38](AC38.apply)
implicit val ac39Format: Format[AC39] = new OptionalIntegerFormat[AC39](AC39.apply)
implicit val ac40Format: Format[AC40] = new OptionalIntegerFormat[AC40](AC40.apply)
implicit val ac41Format: Format[AC41] = new OptionalIntegerFormat[AC41](AC41.apply)
implicit val ac405Format: Format[AC405] = new OptionalIntegerFormat[AC405](AC405.apply)
implicit val ac406Format: Format[AC406] = new OptionalIntegerFormat[AC406](AC406.apply)
implicit val ac410Format: Format[AC410] = new OptionalIntegerFormat[AC410](AC410.apply)
implicit val ac411Format: Format[AC411] = new OptionalIntegerFormat[AC411](AC411.apply)
implicit val ac415Format: Format[AC415] = new OptionalIntegerFormat[AC415](AC415.apply)
implicit val ac416Format: Format[AC416] = new OptionalIntegerFormat[AC416](AC416.apply)
implicit val ac420Format: Format[AC420] = new OptionalIntegerFormat[AC420](AC420.apply)
implicit val ac421Format: Format[AC421] = new OptionalIntegerFormat[AC421](AC421.apply)
implicit val ac425Format: Format[AC425] = new OptionalIntegerFormat[AC425](AC425.apply)
implicit val ac426Format: Format[AC426] = new OptionalIntegerFormat[AC426](AC426.apply)
implicit val ac435Format: Format[AC435] = new OptionalIntegerFormat[AC435](AC435.apply)
implicit val ac436Format: Format[AC436] = new OptionalIntegerFormat[AC436](AC436.apply)
}
| hmrc/ct-calculations | src/main/scala/uk/gov/hmrc/ct/accounts/frsse2008/formats/package.scala | Scala | apache-2.0 | 4,219 |
package bulu.actor.build
import akka.actor.Actor
import bulu.util._
import akka.actor.ActorLogging
import org.apache.hadoop.hbase.util.Bytes
import scala.collection.mutable.ArrayBuffer
import org.apache.hadoop.hbase.client.Put
import akka.actor.ActorRef
class CellSaver(cube: String, sinker:ActorRef) extends Actor with ActorLogging {
var table: HBase = null
var count=0
override def preStart(): Unit = {
table = new HBase(HBase.getTableName(cube, TableCategory.CuboidBase));
}
override def postStop(): Unit = {
table.close
}
def receive: Receive = {
case SaveCell(agg) =>
count+=1
val maps = for (mea <- agg._2) yield (Convert.measureName2Qualifier(mea._1),
HBase.bigDecimal2Bytes(new java.math.BigDecimal(mea._2.toDouble)))
try{
table.put(Convert.toByteArray(agg._1), Bytes.toBytes(HBase.DefaultFamily), maps)
}catch{
case e:Exception=> log.error("put exception ",e)
}
sinker ! CellSaved
// case CellFinished =>
// table.close
// sender ! SaveCellFinished(count)
}
} | hwzhao/bulu | src/main/scala/bulu/actor/build/CellSaver.scala | Scala | apache-2.0 | 1,060 |
package io.buoyant.consul.v1
import com.twitter.finagle.Service
import com.twitter.finagle.http.{Request, Response}
import com.twitter.io.Buf
import com.twitter.util.Future
import io.buoyant.test.Awaits
import org.scalatest.FunSuite
class HealthApiTest extends FunSuite with Awaits {
val nodesBuf = Buf.Utf8("""[{"Node":{"Node":"Sarahs-MBP-2","Address":"192.168.1.37"}, "Service": {"Service":"hosted_web","Tags":["master"],"Port":8084, "Address":""}}]""")
var lastUri = ""
def stubService(buf: Buf) = Service.mk[Request, Response] { req =>
val rsp = Response()
rsp.setContentTypeJson()
rsp.content = buf
rsp.headerMap.set("X-Consul-Index", "4")
lastUri = req.uri
Future.value(rsp)
}
test("serviceNodes endpoint returns a seq of ServiceNodes") {
val service = stubService(nodesBuf)
val response = await(HealthApi(service).serviceNodes("hosted_web")).value
assert(response.size == 1)
assert(response.head.ServiceName == Some("hosted_web"))
assert(response.head.Node == Some("Sarahs-MBP-2"))
assert(response.head.ServiceAddress == Some(""))
assert(response.head.ServicePort == Some(8084))
}
test("serviceNodes endpoint supports consistency parameter") {
val service = stubService(nodesBuf)
val api = HealthApi(service)
await(api.serviceNodes("foo"))
assert(!lastUri.contains("consistent"))
assert(!lastUri.contains("stale"))
await(api.serviceNodes("foo", consistency = Some(ConsistencyMode.Default)))
assert(!lastUri.contains("consistent"))
assert(!lastUri.contains("stale"))
await(api.serviceNodes("foo", consistency = Some(ConsistencyMode.Stale)))
assert(lastUri.contains("stale=true"))
await(api.serviceNodes("foo", consistency = Some(ConsistencyMode.Consistent)))
assert(lastUri.contains("consistent=true"))
}
}
| denverwilliams/linkerd | consul/src/test/scala/io/buoyant/consul/v1/HealthApiTest.scala | Scala | apache-2.0 | 1,839 |
/*
* Copyright 2012 Twitter Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.twitter.zipkin.config
import com.twitter.io.TempFile
import com.twitter.ostrich.admin.RuntimeEnvironment
import com.twitter.util.Eval
import com.twitter.zipkin.builder.Builder
import com.twitter.zipkin.collector.ZipkinCollector
import org.specs.Specification
class ConfigSpec extends Specification {
"/config" should {
val eval = new Eval
"validate collector configs" in {
val configFiles = Seq(
//"/collector-dev.scala",
//"/collector-hbase.scala",
"/collector-cassandra.scala"
) map { TempFile.fromResourcePath(_) }
for (file <- configFiles) {
file.getName() in {
val config = eval[Builder[RuntimeEnvironment => ZipkinCollector]](file)
config must notBeNull
config.apply()
}
}
}
}
}
| zerdliu/zipkin | zipkin-collector-service/src/test/scala/com/twitter/zipkin/config/ConfigSpec.scala | Scala | apache-2.0 | 1,411 |
package org.spixi.flink.handson.model
trait TimeEvent[T] {
def timestamp: Long
def value: T
}
| spi-x-i/flink-handson | simple-handson/src/main/scala/org/spixi/flink/handson/model/TimeEvent.scala | Scala | gpl-3.0 | 100 |
package lila.storm
import com.softwaremill.macwire._
import play.api.Configuration
import lila.common.config._
import lila.user.UserRepo
@Module
final class Env(
appConfig: Configuration,
db: lila.db.Db,
colls: lila.puzzle.PuzzleColls,
cacheApi: lila.memo.CacheApi,
userRepo: UserRepo
)(implicit
ec: scala.concurrent.ExecutionContext
) {
private lazy val dayColl = db(CollName("storm_day"))
lazy val selector = wire[StormSelector]
private val signSecret = appConfig.get[Secret]("storm.secret")
lazy val sign = wire[StormSign]
lazy val json = wire[StormJson]
lazy val highApi = wire[StormHighApi]
lazy val dayApi = wire[StormDayApi]
val forms = StormForm
}
| luanlv/lila | modules/storm/src/main/Env.scala | Scala | mit | 707 |
package io.flow.proxy
import io.apibuilder.spec.v0.models.Service
import io.flow.build.{Application, BuildType, DownloadCache}
import io.flow.registry.v0.{Client => RegistryClient}
import play.api.libs.json.Json
case class Controller() extends io.flow.build.Controller {
/**
* Allowlist of applications in the 'api' repo that do not exist in registry
*/
private[this] val ExcludeAllowList = Seq("common", "healthcheck", "usage", "gift-card")
/**
* This is the hostname of the services when running in docker on
* our development machines.
*/
private[this] val DockerHostname = "172.17.0.1"
private[this] val DevelopmentHostname = "localhost"
override val name = "Proxy"
override val command = "proxy"
def buildUserPermissionsFile(
buildType: BuildType,
services: Seq[Service]
): Unit = {
val routes = services.flatMap(s=>s.resources.flatMap(r=>r.operations.flatMap{o=>
o.attributes.find(_.name == "auth") match {
case Some(a)=> {
val ts = a.value \\ "techniques"
val rs = a.value \\ "roles"
ts.as[Seq[String]]
.filterNot(_ == "user")
.map(t=>(t, Map("method" -> o.method.toString, "path"-> o.path))) ++
rs.asOpt[Seq[String]]
.map(r=>r.map { t =>
(t, Map("method" -> o.method.toString, "path" -> o.path))
}).getOrElse(Nil)
}
case None => {
Seq(("anonymous", Map("method" -> o.method.toString, "path"-> o.path)))
}
}
}))
val rs= routes.groupBy(_._1).map(r=> (r._1, Map("routes" -> r._2.map(_._2).distinct)))
val m = Json.toJson(rs)
val path = s"/tmp/${buildType}-authorization.json"
writeToFile(path, Json.prettyPrint(m))
println(s" - $path")
}
def run(
buildType: BuildType,
downloadCache: DownloadCache,
allServices: Seq[Service]
) (
implicit ec: scala.concurrent.ExecutionContext
): Unit = {
val services = allServices.
filter { s => s.resources.nonEmpty }.
filterNot { s => ExcludeAllowList.exists(ew => s.name.startsWith(ew)) }
val serviceHostResolver = ServiceHostResolver(allServices)
val version = downloadCache.downloadService(Application.latest("flow", buildType.key)) match {
case Left(error) => sys.error(s"Failed to download '$buildType' service from API Builder: $error")
case Right(svc) => svc.version
}
println("Building authorization from: " + services.map(_.name).mkString(", "))
buildUserPermissionsFile(buildType, services)
println("Building proxy from: " + services.map(_.name).mkString(", "))
val registryClient = new RegistryClient()
try {
buildProxyFile(buildType, services, version, "production") { service =>
s"https://${serviceHostResolver.host(service.name)}.api.flow.io"
}
val cache = RegistryApplicationCache(registryClient)
def externalPort(service: Service): Long = cache.externalPort(
registryName = serviceHostResolver.host(service.name),
serviceName = service.name
)
buildProxyFile(buildType, services, version, "development") { service =>
s"http://$DevelopmentHostname:${externalPort(service)}"
}
buildProxyFile(buildType, services, version, "workstation") { service =>
s"http://$DockerHostname:${externalPort(service)}"
}
} finally {
registryClient.closeAsyncHttpClient()
}
}
private[this] def buildProxyFile(
buildType: BuildType,
services: Seq[Service],
version: String,
env: String
) (
hostProvider: Service => String
): Unit = {
services.toList match {
case Nil => {
println(s" - $env: No services - skipping proxy file")
}
case _ => {
val serversYaml = services.map { service =>
Seq(
s"- name: ${service.name}",
s" host: ${hostProvider(service)}"
).mkString("\\n")
}.mkString("\\n")
val operationsYaml = services.flatMap { service =>
service.resources.flatMap(_.operations).map { op =>
Seq(
s"- method: ${op.method.toString.toUpperCase}",
s" path: ${op.path}",
s" server: ${service.name}"
).mkString("\\n")
}
}.mkString("\\n")
val all = s"""version: $version
servers:
${Text.indent(serversYaml, 2)}
operations:
${Text.indent(operationsYaml, 2)}
"""
val path = s"/tmp/${buildType}-proxy.$env.config"
writeToFile(path, all)
println(s" - $env: $path")
}
}
}
private[this] def writeToFile(path: String, contents: String): Unit = {
import java.io.{BufferedWriter, File, FileWriter}
val bw = new BufferedWriter(new FileWriter(new File(path)))
try {
bw.write(contents)
} finally {
bw.close()
}
}
}
| flowcommerce/api-lint | src/main/scala/io/flow/proxy/Controller.scala | Scala | mit | 4,878 |
package scala.macros
//http://meta.plasm.us/posts/2013/07/12/vampire-methods-for-structural-types/
import scala.annotation.StaticAnnotation
import scala.language.experimental.macros
import scala.reflect.macros.Context
class body(tree: Any) extends StaticAnnotation
object StructuralMacros {
def makeInstance = macro makeInstance_impl
def makeInstance_impl(c: Context) = c.universe.reify[Any] {
class Workaround {
def z: Int = 13
@body(42) def v: Int = macro StructuralMacros.selectField_impl
}
new Workaround {}
}
def selectField_impl(c: Context) = c.Expr(
c.macroApplication.symbol.annotations.filter(
_.tpe <:< c.typeOf[body]
).head.scalaArgs.head
)
}
| bdas/macros-scala | macro/src/main/scala/scala/macros/Vampire.scala | Scala | apache-2.0 | 709 |
package com.nabijaczleweli.minecrasmer.util
trait IOreDictRegisterable {
def registerOreDict(): Unit
}
| nabijaczleweli/ASMifier | src/main/scala/com/nabijaczleweli/minecrasmer/util/IOreDictRegisterable.scala | Scala | mit | 105 |
package tanukkii.akkahttp.aws
package dynamodb
import akka.http.scaladsl.model.{HttpRequest, HttpResponse}
import akka.stream.Materializer
import com.amazonaws.services.dynamodbv2.model._
import scala.concurrent.Future
trait DynamoDBClient extends AWSClient {
import AWSClientConversions._
private val mu = MarshallersAndUnmarshallers(DynamoDBServiceContext.protocolFactory)
import mu._
def listTables(request: ListTablesRequest)(implicit cf: ConnectionFlow[HttpRequest, HttpResponse], mat: Materializer): Future[ListTablesResult] = {
sendRequest(request).flatMap(response => convertFromHttpResponse[ListTablesResult, DynamoDBService](response))(mat.executionContext)
}
def query(request: QueryRequest)(implicit cf: ConnectionFlow[HttpRequest, HttpResponse], mat: Materializer): Future[QueryResult] = {
sendRequest(request).flatMap(response => convertFromHttpResponse[QueryResult, DynamoDBService](response))(mat.executionContext)
}
def scan(request: ScanRequest)(implicit cf: ConnectionFlow[HttpRequest, HttpResponse], mat: Materializer): Future[ScanResult] = {
sendRequest(request).flatMap(response => convertFromHttpResponse[ScanResult, DynamoDBService](response))(mat.executionContext)
}
def updateItem(request: UpdateItemRequest)(implicit cf: ConnectionFlow[HttpRequest, HttpResponse], mat: Materializer): Future[UpdateItemResult] = {
sendRequest(request).flatMap(response => convertFromHttpResponse[UpdateItemResult, DynamoDBService](response))(mat.executionContext)
}
def putItem(request: PutItemRequest)(implicit cf: ConnectionFlow[HttpRequest, HttpResponse], mat: Materializer): Future[PutItemResult] = {
sendRequest(request).flatMap(response => convertFromHttpResponse[PutItemResult, DynamoDBService](response))(mat.executionContext)
}
def describeTable(request: DescribeTableRequest)(implicit cf: ConnectionFlow[HttpRequest, HttpResponse], mat: Materializer): Future[DescribeTableResult] = {
sendRequest(request).flatMap(response => convertFromHttpResponse[DescribeTableResult, DynamoDBService](response))(mat.executionContext)
}
def createTable(request: CreateTableRequest)(implicit cf: ConnectionFlow[HttpRequest, HttpResponse], mat: Materializer): Future[CreateTableResult] = {
sendRequest(request).flatMap(response => convertFromHttpResponse[CreateTableResult, DynamoDBService](response))(mat.executionContext)
}
def updateTable(request: UpdateTableRequest)(implicit cf: ConnectionFlow[HttpRequest, HttpResponse], mat: Materializer): Future[UpdateTableResult] = {
sendRequest(request).flatMap(response => convertFromHttpResponse[UpdateTableResult, DynamoDBService](response))(mat.executionContext)
}
def deleteTable(request: DeleteTableRequest)(implicit cf: ConnectionFlow[HttpRequest, HttpResponse], mat: Materializer): Future[DeleteTableResult] = {
sendRequest(request).flatMap(response => convertFromHttpResponse[DeleteTableResult, DynamoDBService](response))(mat.executionContext)
}
def getItem(request: GetItemRequest)(implicit cf: ConnectionFlow[HttpRequest, HttpResponse], mat: Materializer): Future[GetItemResult] = {
sendRequest(request).flatMap(response => convertFromHttpResponse[GetItemResult, DynamoDBService](response))(mat.executionContext)
}
def batchWriteItem(request: BatchWriteItemRequest)(implicit cf: ConnectionFlow[HttpRequest, HttpResponse], mat: Materializer): Future[BatchWriteItemResult] = {
sendRequest(request).flatMap(response => convertFromHttpResponse[BatchWriteItemResult, DynamoDBService](response))(mat.executionContext)
}
def batchGetItem(request: BatchGetItemRequest)(implicit cf: ConnectionFlow[HttpRequest, HttpResponse], mat: Materializer): Future[BatchGetItemResult] = {
sendRequest(request).flatMap(response => convertFromHttpResponse[BatchGetItemResult, DynamoDBService](response))(mat.executionContext)
}
def sendDeleteItem(request: DeleteItemRequest)(implicit cf: ConnectionFlow[HttpRequest, HttpResponse], mat: Materializer): Future[DeleteItemResult] = {
sendRequest(request).flatMap(response => convertFromHttpResponse[DeleteItemResult, DynamoDBService](response))(mat.executionContext)
}
def deleteItem(request: DeleteItemRequest)(implicit cf: ConnectionFlow[HttpRequest, HttpResponse], mat: Materializer): Future[DeleteItemResult] = {
sendRequest(request).flatMap(response => convertFromHttpResponse[DeleteItemResult, DynamoDBService](response))(mat.executionContext)
}
}
object DynamoDBClient extends DynamoDBClient | TanUkkii007/akka-http-aws | akka-http-aws-dynamodb/src/main/scala/tanukkii/akkahttp/aws/dynamodb/DynamoDBClient.scala | Scala | mit | 4,517 |
/*
*
* Copyright 2015.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.jobimtext.misc
import de.tudarmstadt.lt.scalautils.FormatUtils
import org.apache.spark.rdd.RDD
/**
* Created by Steffen Remus.
*/
object JoinBySharedFeaturesCartesian {
/**
*
* @param lines_in
* @return
*/
def apply(lines_in:RDD[String]):RDD[String] = {
val data_in = repr(lines_in)
val data_out = join_shared_features(data_in)
val lines_out = data_out.map({case (e1,e2,f1,l1,l2) => "%s\\t%s\\t%s\\t%s\\t%s".format(e1,e2,f1,FormatUtils.format(l1),FormatUtils.format(l2))})
return lines_out
}
def repr(lines_in:RDD[String]):RDD[(String, String, Double)] = {
return lines_in.map(_.split("\\t"))
.map({case Array(e,f,prob,log10prob) => (e, f, log10prob.toDouble)})
}
/**
*
* @param data_in (e,f,log10prob)
* @return (e1,e2,f,log10prob1,log10prob2)
*/
def join_shared_features(data_in:RDD[(String, String, Double)]):RDD[(String,String, String, Double, Double)] = {
val data_out = data_in.cartesian(data_in) // less efficient than groupbykey, but parallalizable
.filter({case ((e1,f1,l1),(e2,f2,l2)) => f1 == f2 && e1 != e2})
.repartition(data_in.sparkContext.defaultParallelism)
.map({case ((e1,f1,l1),(e2,f2,l2)) => (e1,e2,f1,l1,l2)})
return data_out
}
}
| tudarmstadt-lt/JoBimTextCT | org.jobimtext.ct/src/main/scala/org/jobimtext/misc/JoinBySharedFeaturesCartesian.scala | Scala | apache-2.0 | 1,862 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.analysis.TypeCheckResult
import org.apache.spark.sql.catalyst.expressions.codegen.{CodeGenContext, GeneratedExpressionCode}
import org.apache.spark.sql.catalyst.util.TypeUtils
import org.apache.spark.sql.types._
/**
* An expression that is evaluated to the first non-null input.
*
* {{{
* coalesce(1, 2) => 1
* coalesce(null, 1, 2) => 1
* coalesce(null, null, 2) => 2
* coalesce(null, null, null) => null
* }}}
*/
case class Coalesce(children: Seq[Expression]) extends Expression {
/** Coalesce is nullable if all of its children are nullable, or if it has no children. */
override def nullable: Boolean = children.forall(_.nullable)
// Coalesce is foldable if all children are foldable.
override def foldable: Boolean = children.forall(_.foldable)
override def checkInputDataTypes(): TypeCheckResult = {
if (children == Nil) {
TypeCheckResult.TypeCheckFailure("input to function coalesce cannot be empty")
} else {
TypeUtils.checkForSameTypeInputExpr(children.map(_.dataType), "function coalesce")
}
}
override def dataType: DataType = children.head.dataType
override def eval(input: InternalRow): Any = {
var result: Any = null
val childIterator = children.iterator
while (childIterator.hasNext && result == null) {
result = childIterator.next().eval(input)
}
result
}
override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
s"""
boolean ${ev.isNull} = true;
${ctx.javaType(dataType)} ${ev.primitive} = ${ctx.defaultValue(dataType)};
""" +
children.map { e =>
val eval = e.gen(ctx)
s"""
if (${ev.isNull}) {
${eval.code}
if (!${eval.isNull}) {
${ev.isNull} = false;
${ev.primitive} = ${eval.primitive};
}
}
"""
}.mkString("\\n")
}
}
/**
* Evaluates to `true` iff it's NaN.
*/
case class IsNaN(child: Expression) extends UnaryExpression
with Predicate with ImplicitCastInputTypes {
override def inputTypes: Seq[AbstractDataType] = Seq(TypeCollection(DoubleType, FloatType))
override def nullable: Boolean = false
override def eval(input: InternalRow): Any = {
val value = child.eval(input)
if (value == null) {
false
} else {
child.dataType match {
case DoubleType => value.asInstanceOf[Double].isNaN
case FloatType => value.asInstanceOf[Float].isNaN
}
}
}
override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
val eval = child.gen(ctx)
child.dataType match {
case DoubleType | FloatType =>
s"""
${eval.code}
boolean ${ev.isNull} = false;
${ctx.javaType(dataType)} ${ev.primitive} = ${ctx.defaultValue(dataType)};
${ev.primitive} = !${eval.isNull} && Double.isNaN(${eval.primitive});
"""
}
}
}
/**
* An Expression evaluates to `left` iff it's not NaN, or evaluates to `right` otherwise.
* This Expression is useful for mapping NaN values to null.
*/
case class NaNvl(left: Expression, right: Expression)
extends BinaryExpression with ImplicitCastInputTypes {
override def dataType: DataType = left.dataType
override def inputTypes: Seq[AbstractDataType] =
Seq(TypeCollection(DoubleType, FloatType), TypeCollection(DoubleType, FloatType))
override def eval(input: InternalRow): Any = {
val value = left.eval(input)
if (value == null) {
null
} else {
left.dataType match {
case DoubleType =>
if (!value.asInstanceOf[Double].isNaN) value else right.eval(input)
case FloatType =>
if (!value.asInstanceOf[Float].isNaN) value else right.eval(input)
}
}
}
override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
val leftGen = left.gen(ctx)
val rightGen = right.gen(ctx)
left.dataType match {
case DoubleType | FloatType =>
s"""
${leftGen.code}
boolean ${ev.isNull} = false;
${ctx.javaType(dataType)} ${ev.primitive} = ${ctx.defaultValue(dataType)};
if (${leftGen.isNull}) {
${ev.isNull} = true;
} else {
if (!Double.isNaN(${leftGen.primitive})) {
${ev.primitive} = ${leftGen.primitive};
} else {
${rightGen.code}
if (${rightGen.isNull}) {
${ev.isNull} = true;
} else {
${ev.primitive} = ${rightGen.primitive};
}
}
}
"""
}
}
}
/**
* An expression that is evaluated to true if the input is null.
*/
case class IsNull(child: Expression) extends UnaryExpression with Predicate {
override def nullable: Boolean = false
override def eval(input: InternalRow): Any = {
child.eval(input) == null
}
override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
val eval = child.gen(ctx)
ev.isNull = "false"
ev.primitive = eval.isNull
eval.code
}
}
/**
* An expression that is evaluated to true if the input is not null.
*/
case class IsNotNull(child: Expression) extends UnaryExpression with Predicate {
override def nullable: Boolean = false
override def eval(input: InternalRow): Any = {
child.eval(input) != null
}
override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
val eval = child.gen(ctx)
ev.isNull = "false"
ev.primitive = s"(!(${eval.isNull}))"
eval.code
}
}
/**
* A predicate that is evaluated to be true if there are at least `n` non-null and non-NaN values.
*/
case class AtLeastNNonNulls(n: Int, children: Seq[Expression]) extends Predicate {
override def nullable: Boolean = false
override def foldable: Boolean = children.forall(_.foldable)
override def toString: String = s"AtLeastNNulls(n, ${children.mkString(",")})"
private[this] val childrenArray = children.toArray
override def eval(input: InternalRow): Boolean = {
var numNonNulls = 0
var i = 0
while (i < childrenArray.length && numNonNulls < n) {
val evalC = childrenArray(i).eval(input)
if (evalC != null) {
childrenArray(i).dataType match {
case DoubleType =>
if (!evalC.asInstanceOf[Double].isNaN) numNonNulls += 1
case FloatType =>
if (!evalC.asInstanceOf[Float].isNaN) numNonNulls += 1
case _ => numNonNulls += 1
}
}
i += 1
}
numNonNulls >= n
}
override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
val nonnull = ctx.freshName("nonnull")
val code = children.map { e =>
val eval = e.gen(ctx)
e.dataType match {
case DoubleType | FloatType =>
s"""
if ($nonnull < $n) {
${eval.code}
if (!${eval.isNull} && !Double.isNaN(${eval.primitive})) {
$nonnull += 1;
}
}
"""
case _ =>
s"""
if ($nonnull < $n) {
${eval.code}
if (!${eval.isNull}) {
$nonnull += 1;
}
}
"""
}
}.mkString("\\n")
s"""
int $nonnull = 0;
$code
boolean ${ev.isNull} = false;
boolean ${ev.primitive} = $nonnull >= $n;
"""
}
}
| ArvinDevel/onlineAggregationOnSparkV2 | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/nullExpressions.scala | Scala | apache-2.0 | 8,346 |
package com.github.mdr.mash.ns.os
import com.github.mdr.mash.completions.CompletionSpec
import com.github.mdr.mash.functions.{ BoundParams, MashFunction, Parameter, ParameterModel }
import com.github.mdr.mash.inference.TypedArguments
import com.github.mdr.mash.os.linux.LinuxFileSystem
import com.github.mdr.mash.runtime.MashList
object GlobFunction extends MashFunction("os.glob") {
private val fileSystem = LinuxFileSystem
object Params {
val Pattern = Parameter(
nameOpt = Some("pattern"),
summaryOpt = Some("Pattern to match path names against"))
}
import Params._
val params = ParameterModel(Pattern)
def call(boundParams: BoundParams): MashList = {
val pattern = boundParams.validateString(Pattern).s
MashList(fileSystem.glob(pattern).map(PathSummaryClass.asMashObject))
}
override def typeInferenceStrategy = Seq(PathSummaryClass)
override def getCompletionSpecs(argPos: Int, arguments: TypedArguments) = Seq(CompletionSpec.File)
override def summaryOpt = Some("Return files matching a glob pattern")
} | mdr/mash | src/main/scala/com/github/mdr/mash/ns/os/GlobFunction.scala | Scala | mit | 1,063 |
package model
import util.Read
object Perceptron {
def q15() = {
val dataset = Read.fromFile("/hw1_15_train.dat")
def iter(h: Hypothesis, i: Int, n: Int, no_error: Boolean): Int =
if (n > 60) n
else if (i == 0 && no_error && h.hasSeparated(dataset)) n
else {
val nextIndex = (i + 1) % dataset.length
val instance = dataset(i)
if (h.isCorrectOn(instance)) iter(h, nextIndex, n, i == 0 || no_error)
else iter(h.update(instance), nextIndex, n + 1, false)
}
iter(Hypothesis(Vector.fill(dataset.head.x.length)(0.0)), 0, 0, true)
}
}
| hsinhuang/codebase | ntumlone-002/Perceptron/src/main/scala/model/Perceptron.scala | Scala | gpl-2.0 | 602 |
package cz.kamenitxan.jakon.utils.security.oauth
import java.sql.Connection
import java.util
import com.github.scribejava.apis.GoogleApi20
import com.github.scribejava.core.builder.ServiceBuilder
import com.github.scribejava.core.model.{OAuth2AccessToken, OAuthRequest, Verb}
import com.google.gson.Gson
import cz.kamenitxan.jakon.core.configuration.{Configuration, ConfigurationValue, Settings}
import cz.kamenitxan.jakon.utils.Utils
import cz.kamenitxan.jakon.utils.Utils.StringImprovements
import spark.Request
import scala.jdk.CollectionConverters._
@Configuration
object Google extends OauthProvider {
@ConfigurationValue(name = "OAUTH.google.clientId", required = false)
var clientId: String = _
@ConfigurationValue(name = "OAUTH.google.clientSecret", required = false)
var clientSecret: String = _
private lazy val gson = new Gson()
lazy val isEnabled = Utils.nonEmpty(clientId) && Utils.nonEmpty(clientSecret)
def authInfo(req: Request, redirectTo: String) = OauthInfo("google", createAuthUrl(req, redirectTo))
lazy val service = new ServiceBuilder(clientId)
.apiSecret(clientSecret)
.defaultScope("email")
.callback(s"http://${Settings.getHostname}${
if (Settings.getPort != 80) {
s":${Settings.getPort}"
}
}/admin/login/oauth?provider=${this.getClass.getSimpleName}")
.build(GoogleApi20.instance)
def createAuthUrl(req: Request, redirectTo: String): String = {
if (!isEnabled) return ""
val secretState = setSecretState(req)
// Obtain the Authorization URL// Obtain the Authorization URL
//pass access_type=offline to get refresh token
//https://developers.google.com/identity/protocols/OAuth2WebServer#preparing-to-start-the-oauth-20-flow
val additionalParams = new java.util.HashMap[String, String]
additionalParams.put("access_type", "offline")
//force to reget refresh token (if usera are asked not the first time)
additionalParams.put("prompt", "consent")
if (redirectTo != null) additionalParams.put(OauthProvider.REDIRECT_TO, redirectTo)
val authorizationUrl: String = service.createAuthorizationUrlBuilder.state(secretState).additionalParams(additionalParams).build
authorizationUrl
}
// TODO: endpoint discovery https://stackoverflow.com/questions/55541686/google-oauth2-userinfo-api-not-returning-users-name-data
override def handleAuthResponse(req: Request)(implicit conn: Connection): Boolean = {
val secret = req.queryParams("secretState")
val code = req.queryParams("code")
if (secret.isNullOrEmpty || code.isNullOrEmpty) {
return false
}
var accessToken: OAuth2AccessToken = service.getAccessToken(code)
val re = new OAuthRequest(Verb.GET, "https://openidconnect.googleapis.com/v1/userinfo?email")
service.signRequest(accessToken, re)
val response = service.execute(re)
if (200 == response.getCode) {
System.out.println(response.getBody)
val authInfo = gson.fromJson(response.getBody, classOf[util.Map[String, String]]).asScala.toMap
val email = authInfo("email")
if (email.nonEmpty) {
logIn(req, email)
} else {
false
}
} else {
false
}
}
} | kamenitxan/Jakon | modules/backend/src/main/scala/cz/kamenitxan/jakon/utils/security/oauth/Google.scala | Scala | bsd-3-clause | 3,103 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.nn
import com.intel.analytics.bigdl.keras.{KerasBaseSpec, KerasRunner}
import com.intel.analytics.bigdl.tensor.Tensor
class HighwaySpec extends KerasBaseSpec {
"highway forward backward" should "work properly" in {
val kerasCode =
"""
|input_tensor = Input(shape=[2])
|input = np.random.uniform(0, 1, [3, 2])
|output_tensor = Highway(activation='tanh')(input_tensor)
|model = Model(input=input_tensor, output=output_tensor)
""".stripMargin
val highway = Highway[Float](2, activation = Tanh[Float])
def weightConverter(in: Array[Tensor[Float]]): Array[Tensor[Float]] =
Array(in(1).t(), in(3), in(0).t(), in(2))
checkHighwayOutputAndGrad(highway, kerasCode, weightConverter)
}
"highway forward backward noBias" should "work properly" in {
val kerasCode =
"""
|input_tensor = Input(shape=[2])
|input = np.random.uniform(0, 1, [3, 2])
|output_tensor = Highway(activation='tanh', bias=None)(input_tensor)
|model = Model(input=input_tensor, output=output_tensor)
""".stripMargin
val highway = Highway[Float](2, activation = Tanh[Float], withBias = false)
def weightConverter(in: Array[Tensor[Float]]): Array[Tensor[Float]] =
Array(in(1).t(), in(0).t())
checkHighwayOutputAndGrad(highway, kerasCode, weightConverter)
}
"highway forward backward no activation" should "work properly" in {
val kerasCode =
"""
|input_tensor = Input(shape=[2])
|input = np.random.uniform(0, 1, [3, 2])
|output_tensor = Highway(bias=None)(input_tensor)
|model = Model(input=input_tensor, output=output_tensor)
""".stripMargin
val highway = Highway[Float](2, withBias = false)
def weightConverter(in: Array[Tensor[Float]]): Array[Tensor[Float]] =
Array(in(1).t(), in(0).t())
checkHighwayOutputAndGrad(highway, kerasCode, weightConverter)
}
def checkHighwayOutputAndGrad(bmodel: Graph[Float],
kerasCode: String,
weightConverter: (Array[Tensor[Float]]) => Array[Tensor[Float]],
precision: Double = 1e-5): Unit = {
ifskipTest()
val (gradInput, gradWeight, weights, input, target, output) = KerasRunner.run(kerasCode)
// Ensure they share the same weights
if (weights != null) {
bmodel.setWeightsBias(weightConverter(weights))
}
val boutput = bmodel.forward(input).toTensor[Float]
boutput.almostEqual(output, precision) should be(true)
val bgradInput = bmodel.backward(input, boutput.clone()).toTensor[Float]
bgradInput.almostEqual(gradInput, precision) should be(true)
}
"Highway serializer" should "work properly" in {
val module = Highway[Float](2, activation = Tanh[Float])
val input = Tensor[Float](3, 2).randn()
val res1 = module.forward(input.clone()).toTensor[Float].clone()
val clone = module.cloneModule()
val tmpFile = java.io.File.createTempFile("module", ".bigdl")
module.saveModule(tmpFile.getAbsolutePath, null, true)
val loaded = Module.loadModule[Float](tmpFile.getAbsolutePath)
val res2 = loaded.forward(input.clone())
val namedModule = Utils.getNamedModules[Float](clone)
val namedModule2 = Utils.getNamedModules[Float](loaded)
res1 should be(res2)
if (tmpFile.exists()) {
tmpFile.delete()
}
}
}
| qiuxin2012/BigDL | spark/dl/src/test/scala/com/intel/analytics/bigdl/nn/HighwaySpec.scala | Scala | apache-2.0 | 3,960 |
import io.gatling.build.SonatypeReleasePlugin
import BuildSettings._
import Bundle._
import ConfigFiles._
import CopyLogback._
import Dependencies._
import VersionFile._
import pl.project13.scala.sbt.JmhPlugin
import sbt.Keys._
import sbt._
object GatlingBuild extends Build {
/******************/
/** Root project **/
/******************/
lazy val root = Project("gatling-parent", file("."))
.enablePlugins(SonatypeReleasePlugin)
.dependsOn(Seq(commons, core, http, jms, jdbc, redis).map(_ % "compile->compile;test->test"): _*)
.aggregate(commons, core, jdbc, redis, http, jms, charts, metrics, app, recorder, testFramework, bundle, compiler)
.settings(basicSettings: _*)
.settings(noArtifactToPublish)
.settings(docSettings(benchmarks, bundle): _*)
.settings(libraryDependencies ++= docDependencies)
/*************/
/** Modules **/
/*************/
def gatlingModule(id: String) = Project(id, file(id))
.enablePlugins(SonatypeReleasePlugin)
.settings(gatlingModuleSettings: _*)
lazy val commons = gatlingModule("gatling-commons")
.settings(libraryDependencies ++= commonsDependencies(scalaVersion.value))
lazy val core = gatlingModule("gatling-core")
.dependsOn(commons % "compile->compile;test->test")
.settings(libraryDependencies ++= coreDependencies)
.settings(generateVersionFileSettings: _*)
.settings(copyGatlingDefaults(compiler): _*)
lazy val jdbc = gatlingModule("gatling-jdbc")
.dependsOn(core % "compile->compile;test->test")
.settings(libraryDependencies ++= jdbcDependencies)
lazy val redis = gatlingModule("gatling-redis")
.dependsOn(core % "compile->compile;test->test")
.settings(libraryDependencies ++= redisDependencies)
lazy val http = gatlingModule("gatling-http")
.dependsOn(core % "compile->compile;test->test")
.settings(libraryDependencies ++= httpDependencies)
lazy val jms = gatlingModule("gatling-jms")
.dependsOn(core % "compile->compile;test->test")
.settings(libraryDependencies ++= jmsDependencies)
.settings(parallelExecution in Test := false)
lazy val charts = gatlingModule("gatling-charts")
.dependsOn(core % "compile->compile;test->test")
.settings(libraryDependencies ++= chartsDependencies)
.settings(excludeDummyComponentLibrary: _*)
.settings(chartTestsSettings: _*)
lazy val metrics = gatlingModule("gatling-metrics")
.dependsOn(core % "compile->compile;test->test")
.settings(libraryDependencies ++= metricsDependencies)
lazy val compiler = gatlingModule("gatling-compiler")
.settings(scalaVersion := "2.10.6")
.settings(libraryDependencies ++= compilerDependencies(scalaVersion.value))
lazy val benchmarks = gatlingModule("gatling-benchmarks")
.dependsOn(core, http)
.enablePlugins(JmhPlugin)
.settings(libraryDependencies ++= benchmarkDependencies)
lazy val app = gatlingModule("gatling-app")
.dependsOn(core, http, jms, jdbc, redis, metrics, charts)
lazy val recorder = gatlingModule("gatling-recorder")
.dependsOn(core % "compile->compile;test->test", http)
.settings(libraryDependencies ++= recorderDependencies)
lazy val testFramework = gatlingModule("gatling-test-framework")
.dependsOn(app)
.settings(libraryDependencies ++= testFrameworkDependencies)
lazy val bundle = gatlingModule("gatling-bundle")
.dependsOn(core, http)
.settings(generateConfigFiles(core): _*)
.settings(generateConfigFiles(recorder): _*)
.settings(copyLogbackXml(core): _*)
.settings(bundleSettings: _*)
.settings(noArtifactToPublish)
}
| ryez/gatling | project/GatlingBuild.scala | Scala | apache-2.0 | 3,606 |
/***
** _______
** |__ __| reqT - a requriements engineering tool
** _ __ ___ __ _ | | (c) 2011-2014, Lund University
** | __|/ _ \\ / _ || | http://reqT.org
** | | | __/| (_| || |
** |_| \\___| \\__ ||_|
** | |
** |_|
** reqT is open source, licensed under the BSD 2-clause license:
** http://opensource.org/licenses/bsd-license.php
**************************************************************************/
package reqT
/**
* reqT.NanoZap -- A minimalistic DSL for testing.
*/
trait NanoZap {
implicit class StringTest(string: String) {
def test(isOk: Boolean):String =
if (isOk) "" else s"*** TEST FAILED: $string\\n"
}
def test(name: String)(report: String): Boolean = {
print(s"NanoZap test($name) ... ")
if (report.isEmpty) println("Ok!") else println(s"ZAPPED!\\n$report")
report == ""
}
}
object NanoZap extends NanoZap
| reqT/NanoZap | src/main/scala/NanoZap.scala | Scala | bsd-2-clause | 1,004 |
import io.github.voidcontext.bricksetclient.api._
import io.github.voidcontext.bricksetclient.client.BricksetClient
import scala.concurrent.{Await, Future}
import scala.concurrent.duration._
import scala.util.{Success, Failure}
object OwnedSets {
val apikey = "apikey"
val username = "username"
val password = "password"
def main(args: Array[String]) {
val client = BricksetClient(apikey)
val apiError = new Exception("API error")
val completedFuture: Future[Unit] = client.login(username, password) flatMap {
case Success(hash) => client.getOwnedSets(hash)
case Failure(err) => Future.failed(err)
} map { sets =>
sets.foreach { set => println(set.name.get) }
} recover {
case err: Exception => println(err.getMessage)
}
Await.ready(completedFuture, 30.seconds)
}
}
| voidcontext/scala-brickset-client | examples/ownedsets.scala | Scala | mit | 850 |
/*
* Copyright 2014–2018 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.physical.marklogic.fs
import slamdata.Predef._
import quasar.Data
import quasar.contrib.pathy._
import quasar.contrib.scalaz.eitherT._
import quasar.fs._
import pathy.Path._
import scalaz.{Node => _, _}, Scalaz._
import scalaz.concurrent.Task
final class FormatAwareFileSystemSpec extends MultiFormatFileSystemTest {
def dirNode(name: String): Node = Node.ImplicitDir(DirName(name))
def fileNode(name: String): Node = Node.Data(FileName(name))
val beAFormatConflictError = beLike[Throwable] {
case e: Exception => e.getMessage must contain("different format exists")
}
def multiFormatFileSystemShould(js: BackendEffect ~> Task, xml: BackendEffect ~> Task) = {
"FileSystem operations should respect the mount format" >> {
"creating a file that already exists in a different format should fail" >> {
val f: AFile = rootDir </> dir("createconflict") </> file("thefile")
val data = Vector(Data._int(1), Data._int(2))
val save = write.saveThese(f, data)
val saves = (runFsE(js)(save) *> runFsE(xml)(save)).run.attempt
val jsData = read.scanAll(f).translate(runFsE(js)).runLog.run
(saves |@| jsData)((saveResult, jsd) =>
(saveResult.toEither must beLeft(beAFormatConflictError)) and
(jsd.toEither must beRight(containTheSameElementsAs(data)))
).unsafePerformSync
}
"ls should not show files in other formats" >> {
val cdir: ADir = rootDir </> dir("lscommon")
val jsFile = cdir </> file("jsfile")
val xmlFile = cdir </> file("xmlfile")
val jsObj: Data = Data.Obj("js" -> Data.True)
val xmlObj: Data = Data.Obj("xml" -> Data.True)
val saveJs = write.saveThese(jsFile, Vector(jsObj))
val saveXml = write.saveThese(xmlFile, Vector(xmlObj))
(runFsE(xml)(saveXml) *> runFsE(js)(saveJs *> query.ls(cdir)))
.run.map(_.toEither must beRight(contain(exactly(fileNode("jsfile")))))
.unsafePerformSync
}
"ls should exclude dirs that only contain files in other formats" >> {
val cdir: ADir = rootDir </> dir("excludedirs")
val jsFile = cdir </> file("jsfile")
val xmlFile = cdir </> dir("xmlfiles") </> file("xmlfile")
val jsObj: Data = Data.Obj("js" -> Data.True)
val xmlObj: Data = Data.Obj("xml" -> Data.True)
val saveJs = write.saveThese(jsFile, Vector(jsObj))
val saveXml = write.saveThese(xmlFile, Vector(xmlObj))
(runFsE(xml)(saveXml) *> runFsE(js)(saveJs *> query.ls(cdir)))
.run.map(_.toEither must beRight(contain(exactly(fileNode("jsfile")))))
.unsafePerformSync
}
"ls should include dirs contain descendants in current format" >> {
val cdir: ADir = rootDir </> dir("includedirs")
val jsFile = cdir </> dir("jsdir1") </> dir("jsdir2") </> file("jsfile")
val xmlFile = cdir </> dir("xmlfiles") </> file("xmlfile")
val jsObj: Data = Data.Obj("js" -> Data.True)
val xmlObj: Data = Data.Obj("xml" -> Data.True)
val saveJs = write.saveThese(jsFile, Vector(jsObj))
val saveXml = write.saveThese(xmlFile, Vector(xmlObj))
(runFsE(xml)(saveXml) *> runFsE(js)(saveJs *> query.ls(cdir)))
.run.map(_.toEither must beRight(contain(exactly(dirNode("jsdir1")))))
.unsafePerformSync
}
"move file should fail when dst exists in another format" >> {
val cdir: ADir = rootDir </> dir("movefails")
val jsFile = cdir </> file("jsfile")
val xmlFile = cdir </> file("xmlfile")
val jsObj: Data = Data.Obj("js" -> Data.True)
val xmlObj: Data = Data.Obj("xml" -> Data.True)
val saveJs = write.saveThese(jsFile, Vector(jsObj))
val saveXml = write.saveThese(xmlFile, Vector(xmlObj))
val moveJs = manage.moveFile(jsFile, xmlFile, MoveSemantics.Overwrite)
val attemptMove = (
runFsE(xml)(saveXml) *>
runFsE(js)(saveJs *> moveJs)
).run.attempt map (_.toEither must beLeft(beAFormatConflictError))
val checkExists = (
runFs(xml)(query.fileExists(xmlFile)) |@|
runFs(js)(query.fileExists(jsFile))
)((xmlExists, jsExists) => (xmlExists && jsExists) must beTrue)
(attemptMove |@| checkExists)(_ and _).unsafePerformSync
}
"move dir should move into shared physical dir with other format" >> {
val cdir: ADir = rootDir </> dir("moveshareddir")
val jsDir = cdir </> dir("jsdir")
val jsA = jsDir </> file("A")
val jsB = jsDir </> file("B")
val xmlDir = cdir </> dir("xmldir")
val xmlC = xmlDir </> file("C")
val xmlD = xmlDir </> file("D")
val data = Vector(Data._int(42))
val writeTpl = (write.saveThese _).tupled
val saveJs = List(jsA, jsB).strengthR(data).traverse(writeTpl)
val saveXml = List(xmlC, xmlD).strengthR(data).traverse(writeTpl)
val attemptMove =
runFsE(xml)(saveXml) *>
runFsE( js)(saveJs *> manage.moveDir(jsDir, xmlDir, MoveSemantics.FailIfExists))
val checkSuccess = (
runFsE( js)(query.ls( jsDir)).run |@|
runFsE( js)(query.ls(xmlDir)).run |@|
runFsE(xml)(query.ls(xmlDir)).run
) { (lsjs, jslsxml, lsxml) =>
(lsjs.toEither must beLeft) and
(jslsxml.toEither must beRight(contain(exactly(fileNode("A"), fileNode("B"))))) and
(lsxml.toEither must beRight(contain(exactly(fileNode("C"), fileNode("D")))))
}
(attemptMove.run *> checkSuccess).unsafePerformSync
}
"move dir should fail when any dst file exists in another format" >> {
val cdir: ADir = rootDir </> dir("movedirfails")
val jsDir = cdir </> dir("jsdir")
val jsA = jsDir </> file("A")
val jsB = jsDir </> file("B")
val xmlDir = cdir </> dir("xmldir")
val xmlB = xmlDir </> file("B")
val xmlC = xmlDir </> file("C")
val data = Vector(Data._int(42))
val writeTpl = (write.saveThese _).tupled
val saveJs = List(jsA, jsB).strengthR(data).traverse(writeTpl)
val saveXml = List(xmlB, xmlC).strengthR(data).traverse(writeTpl)
val attemptMove = (
runFsE(xml)(saveXml) *>
runFsE(js)(saveJs *> manage.moveDir(jsDir, xmlDir, MoveSemantics.Overwrite))
).run.attempt map (_.toEither must beLeft(beAFormatConflictError))
val ensureNothingChanged = (
runFsE( js)(query.ls( jsDir)).run |@|
runFsE(xml)(query.ls(xmlDir)).run
) { (lsjs, lsxml) =>
(lsjs.toEither must beRight(contain(exactly(fileNode("A"), fileNode("B"))))) and
(lsxml.toEither must beRight(contain(exactly(fileNode("B"), fileNode("C")))))
}
(attemptMove |@| ensureNothingChanged)(_ and _).unsafePerformSync
}
"move dir should not affect files in other formats" >> {
val cdir: ADir = rootDir </> dir("movefromshared")
val srcDir = cdir </> dir("srcdir")
val jsA = srcDir </> file("A")
val jsB = srcDir </> file("B")
val xmlC = srcDir </> file("C")
val xmlD = srcDir </> file("D")
val dstDir = cdir </> dir("dstdir")
val data = Vector(Data._int(42))
val writeTpl = (write.saveThese _).tupled
val saveJs = List(jsA, jsB).strengthR(data).traverse(writeTpl)
val saveXml = List(xmlC, xmlD).strengthR(data).traverse(writeTpl)
val attemptMove =
runFsE(xml)(saveXml) *>
runFsE( js)(saveJs *> manage.moveDir(srcDir, dstDir, MoveSemantics.FailIfExists))
val checkSuccess = (
runFsE( js)(query.ls(srcDir)).run |@|
runFsE( js)(query.ls(dstDir)).run |@|
runFsE(xml)(query.ls(srcDir)).run
) { (jssrc, jsdst, xmlsrc) =>
(jssrc.toEither must beLeft) and
(jsdst.toEither must beRight(contain(exactly(fileNode("A"), fileNode("B"))))) and
(xmlsrc.toEither must beRight(contain(exactly(fileNode("C"), fileNode("D")))))
}
(attemptMove.run *> checkSuccess).unsafePerformSync
}
"delete dir should not affect files in other formats" >> {
val cdir: ADir = rootDir </> dir("deleteshared")
val jsA = cdir </> file("A")
val jsB = cdir </> file("B")
val xmlC = cdir </> file("C")
val xmlD = cdir </> file("D")
val data = Vector(Data._int(42))
val writeTpl = (write.saveThese _).tupled
val saveJs = List(jsA, jsB).strengthR(data).traverse(writeTpl)
val saveXml = List(xmlC, xmlD).strengthR(data).traverse(writeTpl)
val attemptDelete =
runFsE(xml)(saveXml) *>
runFsE( js)(saveJs *> manage.delete(cdir))
val checkSuccess = (
runFsE( js)(query.ls(cdir)).run |@|
runFsE(xml)(query.ls(cdir)).run
) { (jsls, xmlls) =>
(jsls.toEither must beLeft) and
(xmlls.toEither must beRight(contain(exactly(fileNode("C"), fileNode("D")))))
}
(attemptDelete.run *> checkSuccess).unsafePerformSync
}
"delete dir should not affect subdirs that still contain files in other formats" >> {
val cdir: ADir = rootDir </> dir("deletesharedsub")
val jsA = cdir </> file("A")
val jsB = cdir </> file("B")
val xmlsub = cdir </> dir("xmlsub")
val xmlC = xmlsub </> file("C")
val xmlD = xmlsub </> file("D")
val data = Vector(Data._int(42))
val writeTpl = (write.saveThese _).tupled
val saveJs = List(jsA, jsB).strengthR(data).traverse(writeTpl)
val saveXml = List(xmlC, xmlD).strengthR(data).traverse(writeTpl)
val attemptDeleteParent =
runFsE(xml)(saveXml) *>
runFsE( js)(saveJs *> manage.delete(cdir))
val checkSuccess = (
runFsE( js)(query.ls(cdir)).run |@|
runFsE(xml)(query.ls(cdir)).run
) { (jsls, xmlls) =>
(jsls.toEither must beLeft) and
(xmlls.toEither must beRight(contain(exactly(dirNode("xmlsub")))))
}
(attemptDeleteParent.run *> checkSuccess).unsafePerformSync
}
}
}
}
| jedesah/Quasar | marklogicIt/src/test/scala/quasar/physical/marklogic/fs/FormatAwareFileSystemSpec.scala | Scala | apache-2.0 | 11,280 |
package io.akkawarsjawa.api
import spray.httpx.Json4sSupport
import org.json4s._
object Json4sProtocol extends Json4sSupport {
implicit def json4sFormats: Formats = DefaultFormats
}
| PiotrTrzpil/warsjawa-akka | src/main/scala/io/akkawarsjawa/api/Json4sProtocol.scala | Scala | apache-2.0 | 188 |
package org.jetbrains.plugins.scala.lang.macros
import org.jetbrains.plugins.scala.DependencyManagerBase.RichStr
import org.jetbrains.plugins.scala.base.libraryLoaders.{IvyManagedLoader, LibraryLoader}
import org.jetbrains.plugins.scala.debugger.{ScalaVersion, Scala_2_12, Scala_2_13}
import org.jetbrains.plugins.scala.lang.typeConformance.TypeConformanceTestBase
/**
* Nikolay.Tropin
* 29-Jan-18
*/
class ShapelessConformanceTest_2_12 extends ShapelessConformanceTestBase()(Scala_2_12)
class ShapelessConformanceTest_2_13 extends ShapelessConformanceTestBase()(Scala_2_13)
abstract class ShapelessConformanceTestBase()(override implicit val version: ScalaVersion) extends TypeConformanceTestBase {
override protected def additionalLibraries(): Seq[LibraryLoader] =
IvyManagedLoader("com.chuusai" %% "shapeless" % "2.3.3") :: Nil
def testWitnessSelectDynamic(): Unit = doTest(
s"""
|object Test {
| type `"foo"` = shapeless.Witness.`"foo"`.T
|}
|val foo: Test.`"foo"` = "foo"
|//True
""".stripMargin
)
def testWitnessValSelectDynamic(): Unit = doTest(
s"""
|object Test {
| val W = shapeless.Witness
| type `"foo"` = W.`"foo"`.T
|}
|val foo: Test.`"foo"` = "foo"
|//True
""".stripMargin
)
def testWitnessSelectDynamicWrongLiteral(): Unit = doTest(
s"""
|object Test {
| type `"foo"` = shapeless.Witness.`"foo"`.T
|}
|val foo: Test.`"foo"` = "bar"
|//False
""".stripMargin
)
def testWitnessValSelectDynamicWrongLiteral(): Unit = doTest(
s"""
|object Test {
| val W = shapeless.Witness
| type `"foo"` = W.`"foo"`.T
|}
|val foo: Test.`"foo"` = "bar"
|//False
""".stripMargin
)
def testWitnessNegativeLiteral(): Unit = doTest(
"""
|object Test {
| val W = shapeless.Witness
| type MinusOne = W.`-1`.T
|}
|val minusOne: Test.MinusOne = -1
|//True
""".stripMargin
)
def testWitnessInfixExpression(): Unit = doTest(
"""
|object Test {
| val W = shapeless.Witness
| type Zero = W.`1 - 1`.T
|}
|val z: Test.Zero = 0
|//True
""".stripMargin
)
def testWitnessInfixExpressionWrong(): Unit = doTest(
"""
|object Test {
| val W = shapeless.Witness
| type Zero = W.`1 - 1`.T
|}
|val z: Test.Zero = 1
|//False
""".stripMargin
)
} | jastice/intellij-scala | scala/scala-impl/test/org/jetbrains/plugins/scala/lang/macros/ShapelessConformanceTest.scala | Scala | apache-2.0 | 2,499 |
package com.sksamuel.avro4s.decoders
import com.sksamuel.avro4s.{Avro4sDecodingException, Decoder}
import org.apache.avro.Schema
import java.nio.ByteBuffer
trait ByteDecoders:
given Decoder[Array[Byte]] = ArrayByteDecoder
given Decoder[ByteBuffer] = ByteBufferDecoder
given Decoder[List[Byte]] = ArrayByteDecoder.map(_.toList)
given Decoder[Seq[Byte]] = ArrayByteDecoder.map(_.toList)
given Decoder[Vector[Byte]] = ArrayByteDecoder.map(_.toVector)
/**
* A [[Decoder]] for byte arrays that accepts any compatible type regardless of schema.
*/
object ArrayByteDecoder extends Decoder[Array[Byte]] :
override def decode(schema: Schema): Any => Array[Byte] = { value =>
value match {
case buffer: ByteBuffer => buffer.array
case array: Array[Byte] => array
case fixed: org.apache.avro.generic.GenericFixed => fixed.bytes
case _ => throw new Avro4sDecodingException(s"ArrayByteDecoder cannot decode '$value'", value)
}
}
object ByteBufferDecoder extends Decoder[ByteBuffer] :
override def decode(schema: Schema): Any => ByteBuffer = { value =>
value match {
case buffer: ByteBuffer => buffer
case array: Array[Byte] => ByteBuffer.wrap(array)
case fixed: org.apache.avro.generic.GenericFixed => ByteBuffer.wrap(fixed.bytes)
case _ => throw new Avro4sDecodingException(s"ByteBufferDecoder cannot decode '$value'", value)
}
}
/**
* A Strict [[Decoder]] for byte arays that only works if the schema is FIXED.
*/
object FixedByteArrayDecoder extends Decoder[Array[Byte]] :
override def decode(schema: Schema): Any => Array[Byte] =
require(schema.getType == Schema.Type.FIXED, {
s"Fixed byte array decoder only supports schema type FIXED, got $schema"
})
{ value =>
value match {
case fixed: org.apache.avro.generic.GenericFixed => fixed.bytes
case _ => throw new Avro4sDecodingException(s"FixedByteArrayDecoder cannot decode '$value'", value)
}
} | sksamuel/avro4s | avro4s-core/src/main/scala/com/sksamuel/avro4s/decoders/bytes.scala | Scala | apache-2.0 | 1,984 |
/***********************************************************************
* Copyright (c) 2013-2018 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.utils.geohash
import java.util.Iterator
import scala.collection.mutable.TreeSet
/**
* This iterator traverses a bounding box returning the GeoHashes inside the box
* in "z-order" starting with the lower left GeoHash and finishing with the upper
* right GeoHash.
*/
class BoundingBoxGeoHashIterator(twoGh: TwoGeoHashBoundingBox) extends Iterator[GeoHash] {
val (latSteps, lonSteps) = GeoHash.getLatitudeLongitudeSpanCount(twoGh.ll, twoGh.ur, twoGh.prec)
val Array(endLatIndex, endLonIndex) = GeoHash.gridIndicesForLatLong(twoGh.ur)
// This is the number of GeoHashes that our iterator will return.
val ns = latSteps*lonSteps
// We maintain a queue of possible next available GeoHashes.
val queue = TreeSet[GeoHash](twoGh.ll)
var nextGh = twoGh.ll
def hasNext(): Boolean = queue.nonEmpty
def next(): GeoHash = {
if (hasNext) {
// The next GeoHash is the least of the candidates in the queue.
nextGh = queue.head
// Standing at the "next" GeoHash, we need to compute the GeoHash to right and the one above.
val latIndex = GeoHash.gridIndexForLatitude(nextGh)
val lonIndex = GeoHash.gridIndexForLongitude(nextGh)
if (lonIndex + 1 <= endLonIndex) {
val nextLonGh = GeoHash.composeGeoHashFromBitIndicesAndPrec(
latIndex, lonIndex + 1, nextGh.prec)
if (twoGh.bbox.covers(nextLonGh.getPoint))
queue add nextLonGh
}
if (latIndex + 1 <= endLatIndex) {
val nextLatGh = GeoHash.composeGeoHashFromBitIndicesAndPrec(
latIndex + 1, lonIndex, nextGh.prec)
// If the calculated GeoHashes are still with the box, we add them to the queue.
if (twoGh.bbox.covers(nextLatGh.getPoint))
queue add nextLatGh
}
queue.remove(nextGh)
nextGh
}
else throw new NoSuchElementException("No more geohashes available in iterator")
}
def remove = throw new UnsupportedOperationException("Remove operation not supported")
} | jahhulbert-ccri/geomesa | geomesa-utils/src/main/scala/org/locationtech/geomesa/utils/geohash/BoundingBoxGeoHashIterator.scala | Scala | apache-2.0 | 2,490 |
package org.http4s
import org.http4s.headers._
class HeadersSpec extends Http4sSpec {
val clength = `Content-Length`.unsafeFromLong(10)
val raw = Header.Raw("raw-header".ci, "Raw value")
val base = Headers(clength.toRaw, raw)
"Headers" should {
"Not find a header that isn't there" in {
base.get(`Content-Base`) should beNone
}
"Find an existing header and return its parsed form" in {
base.get(`Content-Length`) should beSome(clength)
base.get("raw-header".ci) should beSome(raw)
}
"Replaces headers" in {
val newlen = `Content-Length`.zero
base.put(newlen).get(newlen.key) should beSome(newlen)
base.put(newlen.toRaw).get(newlen.key) should beSome(newlen)
}
"also find headers created raw" in {
val headers = Headers(
org.http4s.headers.`Cookie`(org.http4s.Cookie("foo", "bar")),
Header("Cookie", org.http4s.Cookie("baz", "quux").toString)
)
headers.get(org.http4s.headers.Cookie).map(_.values.length) must beSome(2)
}
"Find the headers with DefaultHeaderKey keys" in {
val headers = Headers(
`Set-Cookie`(org.http4s.Cookie("foo", "bar")),
Header("Accept-Patch", ""),
Header("Access-Control-Allow-Credentials", "")
)
headers.get(`Accept-Patch`).map(_.value) must beSome("")
}
"Remove duplicate headers which are not of type Recurring on concatenation (++)" in {
val hs = Headers(clength) ++ Headers(clength)
hs.toList.length must_== 1
hs.head must_== clength
}
"Allow multiple Set-Cookie headers" in {
val h1 = `Set-Cookie`(org.http4s.Cookie("foo1", "bar1")).toRaw
val h2 = `Set-Cookie`(org.http4s.Cookie("foo2", "bar2")).toRaw
val hs = Headers(clength) ++ Headers(h1) ++ Headers(h2)
hs.count(_.parsed match { case `Set-Cookie`(_) => true; case _ => false }) must_== 2
hs.exists(_ == clength) must_== true
}
"Work with Raw headers (++)" in {
val foo = ContentCoding.unsafeFromString("foo")
val bar = ContentCoding.unsafeFromString("bar")
val h1 = `Accept-Encoding`(foo).toRaw
val h2 = `Accept-Encoding`(bar).toRaw
val hs = Headers(clength.toRaw) ++ Headers(h1) ++ Headers(h2)
hs.get(`Accept-Encoding`) must beSome(`Accept-Encoding`(foo, bar))
hs.exists(_ == clength) must_== true
}
"Avoid making copies if there are duplicate collections" in {
base ++ Headers.empty eq base must_== true
Headers.empty ++ base eq base must_== true
}
"Preserve original headers when processing" in {
val rawAuth = Header("Authorization", "test this")
// Mapping to strings because Header equality is based on the *parsed* version
(Headers(rawAuth) ++ base).map(_.toString) must contain(===(rawAuth.toString))
}
"hash the same when constructed with the same contents" in {
val h1 = Headers(Header("Test-Header", "Value"))
val h2 = Headers(Header("Test-Header", "Value"))
val h3 = Headers(List(Header("Test-Header", "Value"), Header("TestHeader", "other value")))
val h4 = Headers(List(Header("TestHeader", "other value"), Header("Test-Header", "Value")))
val h5 = Headers(List(Header("Test-Header", "Value"), Header("TestHeader", "other value")))
h1.hashCode() must_== h2.hashCode()
h1.equals(h2) must_== true
h2.equals(h1) must_== true
h1.equals(h3) must_== false
h3.equals(h4) must_== false
h3.equals(h5) must_== true
}
}
}
| reactormonk/http4s | tests/src/test/scala/org/http4s/HeadersSpec.scala | Scala | apache-2.0 | 3,520 |
/*
* Scala.js (https://www.scala-js.org/)
*
* Copyright EPFL.
*
* Licensed under Apache License 2.0
* (https://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package org.scalajs.linker.standard
import org.scalajs.linker.interface._
/** Common configuration given to all phases of the linker. */
final class CommonPhaseConfig private (
/** Core specification. */
val coreSpec: CoreSpec,
/** Whether things that can be parallelized should be parallelized.
* On the JavaScript platform, this setting is typically ignored.
*/
val parallel: Boolean,
/** Whether the linker runs in batch mode.
*
* In batch mode, the linker phase can throw away intermediate state that
* is otherwise maintained for incremental runs.
*
* This setting is only a hint. A linker phase may ignore it. This applies
* in both directions: a phase not supporting incrementality can ignore
* `batchMode = false`, and a contrario, a phase mainly designed for
* incremental runs may ignore `batchMode = true`.
*/
val batchMode: Boolean
) {
private def this() = {
this(
coreSpec = CoreSpec.Defaults,
parallel = true,
batchMode = false)
}
}
private[linker] object CommonPhaseConfig {
private[linker] def apply(): CommonPhaseConfig = new CommonPhaseConfig()
private[linker] def fromStandardConfig(config: StandardConfig): CommonPhaseConfig = {
val coreSpec = CoreSpec(config.semantics, config.moduleKind, config.esFeatures)
new CommonPhaseConfig(coreSpec, config.parallel, config.batchMode)
}
}
| scala-js/scala-js | linker/shared/src/main/scala/org/scalajs/linker/standard/CommonPhaseConfig.scala | Scala | apache-2.0 | 1,710 |
import org.specs._
import com.redis._
import com.redis.operations._
import org.specs.mock.Mockito
import org.mockito.Mock._
import org.mockito.Mockito._
import org.mockito.Mockito.doNothing
class RedisTestClient(val connection: Connection) extends Operations with ListOperations with SetOperations with NodeOperations with KeySpaceOperations with SortOperations {
var db: Int = 0
def getConnection(key: String): Connection = connection
} | baroquebobcat/pubsubhubbub-rb | vendor/redis-1.02/client-libraries/scala/src/test/scala/com/redis/helpers/RedisClientTestHelper.scala | Scala | apache-2.0 | 443 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.launcher
/**
* This class makes SparkSubmitOptionParser visible for Spark code outside of the `launcher`
* package, since Java doesn't have a feature similar to `private[spark]`, and we don't want
* that class to be public.
* 这个类使SparkSubmitOptionParser在`launcher`包之外的Spark代码可见,
* 因为Java没有类似于`private [spark]`的功能,我们不希望该类被公开。
*/
private[spark] abstract class SparkSubmitArgumentsParser extends SparkSubmitOptionParser
| tophua/spark1.52 | core/src/main/scala/org/apache/spark/launcher/SparkSubmitArgumentsParser.scala | Scala | apache-2.0 | 1,323 |
/*
* Copyright 2016-2018 SN127.fi
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package fi.sn127.tackler.core
import cats.implicits._
import fi.sn127.tackler.api.Metadata
import fi.sn127.tackler.model._
class Balance(val title: String,
val bal: Seq[BalanceTreeNode],
val deltas: Map[Option[Commodity], BigDecimal],
val metadata: Option[Metadata]) {
def isEmpty: Boolean = bal.isEmpty
}
object Balance {
/**
* Recursive get balance tree nodes, starting from "me"
*
* @param me is name of root account for this sub-tree
* @param accSums list of all account sums
* @return list of balance tree nodes
*/
@SuppressWarnings(Array("org.wartremover.warts.Recursion"))
protected def getBalanceTreeNodes(
me: (AccountTreeNode, BigDecimal),
accSums: Seq[(AccountTreeNode, BigDecimal)])
: Seq[BalanceTreeNode] = {
val (myAccTN, mySum) = me
// find my childs
val childs = accSums.filter({ case (atn, _) => myAccTN.isParentOf(atn)})
// calculate balance tree nodes of my childs
val childsBalanceTrees = childs.flatMap(c =>
getBalanceTreeNodes(c, accSums)
)
// calculate top sum of my children's balance trees
// (as it is needed for my own balance tree node)
val myChildsSum = childsBalanceTrees
.filter(btn => btn.acctn.parent === myAccTN.account)
.map(btn => btn.subAccTreeSum)
.sum
val myBTN = BalanceTreeNode(myAccTN, myChildsSum + mySum, mySum)
List(myBTN) ++ childsBalanceTrees
}
/**
* Bubble up from leafs to root, and generate for any missing (gap)
* AccountTreeNode new entry with zero sum.
*
* @param myAccTNSum starting AccTNSum entry
* @param accSums current incomplete (in sense of Chart of Account) account sums
* @return for this branch (from leaf to root) new set of AccTNSums without gaps
*/
@SuppressWarnings(Array(
"org.wartremover.warts.Recursion",
"org.wartremover.warts.TraversableOps"))
protected def bubbleUpAccTN(
myAccTNSum: (AccountTreeNode, BigDecimal),
accSums: Seq[(AccountTreeNode, BigDecimal)])
: Seq[(AccountTreeNode, BigDecimal)] = {
val myAccTN = myAccTNSum._1
if (myAccTN.depth === 1) {
// we are on top, so either "I" exist already
// or I has been created by my child;
// end of recursion
List(myAccTNSum)
} else {
// Not on top => find my parent
val parent = accSums.filter({ case (atn, _) => atn.isParentOf(myAccTN) })
assert(parent.isEmpty || parent.length === 1)
if (parent.isEmpty) {
if (myAccTN.depth > 2) {
val par = myAccTN.parent.substring(0, myAccTN.parent.lastIndexOf(":"))
val account = myAccTN.parent
val name = myAccTN.parent.split(":").last
val newParent = AccountTreeNode(myAccTN.depth - 1, myAccTN.root, par, account, name, myAccTN.commodity)
bubbleUpAccTN((newParent, BigDecimal(0)), accSums) ++ List(myAccTNSum)
} else {
// I am depth 2 and I don't have parent, => let's create root account
// end of recursion
// todo Chart of Accounts...
val par = ""
val account = myAccTN.parent
val name = myAccTN.parent
val newParent = AccountTreeNode(myAccTN.depth - 1, myAccTN.root, par, account, name, myAccTN.commodity)
List((newParent, BigDecimal(0)), myAccTNSum)
}
} else {
// my parent exists, just bubble up together
bubbleUpAccTN(parent.head, accSums) ++ List(myAccTNSum)
}
}
}
@SuppressWarnings(Array("org.wartremover.warts.TraversableOps"))
protected def balance(txns: Txns): Seq[BalanceTreeNode] = {
// Calculate sum of postings for each account,
// resulting size of this set is "small"
// e.g. max size is size of Chart of Accounts
// TODO: AccountTreeNode: provide default groupBy machinery
val accountSums: Seq[(AccountTreeNode, BigDecimal)] = txns
.flatMap(txn => txn.posts)
.groupBy(p => AccountTreeNode.groupBy(p.acctn))
.map((kv: (String, Seq[Posting])) => {
val post = kv._2.head
val accSum = kv._2.map(_.amount).sum
(post.acctn, accSum)
}).toSeq
// From every account bubble up and insert missing parent AccTNs.
// This will generate duplicates, because we are arriving from different branches
// to the same fork in trunk.
// (we are using old incomplete set of AccTNSums, not the new, complete set,
// which will be the result of this function, so the same fork will be "missing"
// multiple times.
val completeCoASumTree = accountSums.flatMap({ acc =>
bubbleUpAccTN(acc, accountSums)
}).distinct
// Get all root accounts
val roots: Seq[(AccountTreeNode, BigDecimal)] =
completeCoASumTree.filter({case (acctn, _) => acctn.depth === 1})
// Start from root's and get all subtree BalanceTreeNodes
val bal = roots.flatMap(rootAccSum => {
getBalanceTreeNodes(rootAccSum, completeCoASumTree)
})
bal.sorted(OrderByPost)
}
def apply(title: String, txnData: TxnData, accounts: Filtering[BalanceTreeNode]): Balance = {
val bal = balance(txnData.txns)
val fbal = bal.filter(accounts.predicate)
if (fbal.nonEmpty) {
val deltas: Map[Option[Commodity], BigDecimal] = fbal
.groupBy(_.acctn.commStr)
.map({ case (c, bs) =>
if (c === "") {
(None, bs.map(_.accountSum).sum)
} else {
(Some(new Commodity(c)), bs.map(_.accountSum).sum)
}
})
new Balance(title, fbal, deltas, txnData.metadata)
} else {
new Balance(title, Seq.empty[BalanceTreeNode],
Map.empty[Option[Commodity], BigDecimal], None)
}
}
}
| jaa127/tackler | core/src/main/scala/fi/sn127/tackler/core/Balance.scala | Scala | apache-2.0 | 6,305 |
package com.seanshubin.scala.training.core
trait QueryParser {
def parse(queryString: String): Query
}
| SeanShubin/scala-training | core/src/main/scala/com/seanshubin/scala/training/core/QueryParser.scala | Scala | unlicense | 106 |
/* sbt -- Simple Build Tool
* Copyright 2009, 2010 Mikko Peltonen, Stuart Roebuck, Mark Harrah
*/
package sbt
import BasicCommandStrings.ClearOnFailure
import State.FailureWall
import annotation.tailrec
import java.io.File
import Types.const
trait Watched {
/** The files watched when an action is run with a preceeding ~ */
def watchPaths(s: State): Seq[File] = Nil
def terminateWatch(key: Int): Boolean = Watched.isEnter(key)
/**
* The time in milliseconds between checking for changes. The actual time between the last change made to a file and the
* execution time is between `pollInterval` and `pollInterval*2`.
*/
def pollInterval: Int = Watched.PollDelayMillis
/** The message to show when triggered execution waits for sources to change.*/
def watchingMessage(s: WatchState): String = Watched.defaultWatchingMessage(s)
/** The message to show before an action is run. */
def triggeredMessage(s: WatchState): String = Watched.defaultTriggeredMessage(s)
}
object Watched {
val defaultWatchingMessage: WatchState => String = _.count + ". Waiting for source changes... (press enter to interrupt)"
val defaultTriggeredMessage: WatchState => String = const("")
val clearWhenTriggered: WatchState => String = const(clearScreen)
def clearScreen: String = "\\033[2J\\033[0;0H"
private[this] class AWatched extends Watched
def multi(base: Watched, paths: Seq[Watched]): Watched =
new AWatched {
override def watchPaths(s: State) = (base.watchPaths(s) /: paths)(_ ++ _.watchPaths(s))
override def terminateWatch(key: Int): Boolean = base.terminateWatch(key)
override val pollInterval = (base +: paths).map(_.pollInterval).min
override def watchingMessage(s: WatchState) = base.watchingMessage(s)
override def triggeredMessage(s: WatchState) = base.triggeredMessage(s)
}
def empty: Watched = new AWatched
val PollDelayMillis = 500
def isEnter(key: Int): Boolean = key == 10 || key == 13
def printIfDefined(msg: String) = if (!msg.isEmpty) System.out.println(msg)
def executeContinuously(watched: Watched, s: State, next: String, repeat: String): State =
{
@tailrec def shouldTerminate: Boolean = (System.in.available > 0) && (watched.terminateWatch(System.in.read()) || shouldTerminate)
val sourcesFinder = PathFinder { watched watchPaths s }
val watchState = s get ContinuousState getOrElse WatchState.empty
if (watchState.count > 0)
printIfDefined(watched watchingMessage watchState)
val (triggered, newWatchState, newState) =
try {
val (triggered, newWatchState) = SourceModificationWatch.watch(sourcesFinder, watched.pollInterval, watchState)(shouldTerminate)
(triggered, newWatchState, s)
} catch {
case e: Exception =>
val log = s.log
log.error("Error occurred obtaining files to watch. Terminating continuous execution...")
State.handleException(e, s, log)
(false, watchState, s.fail)
}
if (triggered) {
printIfDefined(watched triggeredMessage newWatchState)
(ClearOnFailure :: next :: FailureWall :: repeat :: s).put(ContinuousState, newWatchState)
} else {
while (System.in.available() > 0) System.in.read()
s.put(ContinuousState, WatchState.empty)
}
}
val ContinuousState = AttributeKey[WatchState]("watch state", "Internal: tracks state for continuous execution.")
val Configuration = AttributeKey[Watched]("watched-configuration", "Configures continuous execution.")
} | jasonchaffee/sbt | main/command/src/main/scala/sbt/Watched.scala | Scala | bsd-3-clause | 3,578 |
package scorex.network.message
import java.net.{InetAddress, InetSocketAddress}
import java.util
import com.google.common.primitives.{Bytes, Ints}
import scorex.block.Block
import scorex.consensus.ConsensusModule
import scorex.crypto.EllipticCurveImpl
import scorex.crypto.singing.SigningFunctions
import scorex.crypto.singing.SigningFunctions.Signature
import scorex.network.message.Message._
import scorex.transaction.{History, TransactionModule}
import scala.util.Try
class BasicMessagesRepo()(implicit val transactionalModule: TransactionModule[_],
consensusModule: ConsensusModule[_]) {
object GetPeersSpec extends MessageSpec[Unit] {
override val messageCode: Message.MessageCode = 1: Byte
override val messageName: String = "GetPeers message"
override def deserializeData(bytes: Array[Byte]): Try[Unit] =
Try(require(bytes.isEmpty, "Non-empty data for GetPeers"))
override def serializeData(data: Unit): Array[Byte] = Array()
}
object PeersSpec extends MessageSpec[Seq[InetSocketAddress]] {
private val AddressLength = 4
private val PortLength = 4
private val DataLength = 4
override val messageCode: Message.MessageCode = 2: Byte
override val messageName: String = "Peers message"
override def deserializeData(bytes: Array[Byte]): Try[Seq[InetSocketAddress]] = Try {
val lengthBytes = util.Arrays.copyOfRange(bytes, 0, DataLength)
val length = Ints.fromByteArray(lengthBytes)
assert (bytes.length == DataLength + (length * (AddressLength + PortLength)), "Data does not match length")
(0 until length).map { i =>
val position = lengthBytes.length + (i * (AddressLength + PortLength))
val addressBytes = util.Arrays.copyOfRange(bytes, position, position + AddressLength)
val address = InetAddress.getByAddress(addressBytes)
val portBytes = util.Arrays.copyOfRange(bytes, position + AddressLength, position + AddressLength + PortLength)
new InetSocketAddress(address, Ints.fromByteArray(portBytes))
}
}
override def serializeData(peers: Seq[InetSocketAddress]): Array[Byte] = {
val length = peers.size
val lengthBytes = Ints.toByteArray(length)
peers.foldLeft(lengthBytes) { case (bs, peer) =>
Bytes.concat(bs, peer.getAddress.getAddress, Ints.toByteArray(peer.getPort))
}
}
}
trait SignaturesSeqSpec extends MessageSpec[Seq[SigningFunctions.Signature]] {
import scorex.crypto.EllipticCurveImpl.SignatureLength
private val DataLength = 4
override def deserializeData(bytes: Array[Byte]): Try[Seq[Signature]] = Try {
val lengthBytes = bytes.take(DataLength)
val length = Ints.fromByteArray(lengthBytes)
assert(bytes.length == DataLength + (length * SignatureLength), "Data does not match length")
(0 to length - 1).map { i =>
val position = DataLength + (i * SignatureLength)
bytes.slice(position, position + SignatureLength)
}.toSeq
}
override def serializeData(signatures: Seq[Signature]): Array[Byte] = {
val length = signatures.size
val lengthBytes = Ints.toByteArray(length)
//WRITE SIGNATURES
signatures.foldLeft(lengthBytes) { case (bs, header) => Bytes.concat(bs, header) }
}
}
object GetSignaturesSpec extends SignaturesSeqSpec {
override val messageCode: MessageCode = 20: Byte
override val messageName: String = "GetSignatures message"
}
object SignaturesSpec extends SignaturesSeqSpec {
override val messageCode: MessageCode = 21: Byte
override val messageName: String = "Signatures message"
}
object GetBlockSpec extends MessageSpec[Block.BlockId] {
override val messageCode: MessageCode = 22: Byte
override val messageName: String = "GetBlock message"
override def serializeData(signature: Block.BlockId): Array[Byte] = signature
override def deserializeData(bytes: Array[Byte]): Try[Block.BlockId] = Try {
require(bytes.length == EllipticCurveImpl.SignatureLength, "Data does not match length")
bytes
}
}
object BlockMessageSpec extends MessageSpec[Block] {
override val messageCode: MessageCode = 23: Byte
override val messageName: String = "Block message"
override def serializeData(block: Block): Array[Byte] = block.bytes
override def deserializeData(bytes: Array[Byte]): Try[Block] = Block.parseBytes(bytes)
}
object ScoreMessageSpec extends MessageSpec[History.BlockchainScore] {
override val messageCode: MessageCode = 24: Byte
override val messageName: String = "Score message"
override def serializeData(score: History.BlockchainScore): Array[Byte] = {
val scoreBytes = score.toByteArray
val bb = java.nio.ByteBuffer.allocate(scoreBytes.length)
bb.put(scoreBytes)
bb.array()
}
override def deserializeData(bytes: Array[Byte]): Try[History.BlockchainScore] = Try {
BigInt(1, bytes)
}
}
val specs = Seq(GetPeersSpec, PeersSpec, GetSignaturesSpec, SignaturesSpec,
GetBlockSpec, BlockMessageSpec, ScoreMessageSpec)
} | ScorexProject/Scorex-Lagonaki | scorex-basics/src/main/scala/scorex/network/message/BasicMessagesRepo.scala | Scala | cc0-1.0 | 5,115 |
package org.jetbrains.jps.incremental
import _root_.java.io._
import _root_.java.net.URL
import _root_.java.util.Properties
/**
* @author Pavel Fatin
*/
package object scala {
type Closeable = {
def close()
}
def using[A <: Closeable, B](resource: A)(block: A => B): B = {
try {
block(resource)
} finally {
resource.close()
}
}
def extractor[A, B](f: A => B) = new Extractor[A, B](f)
class Extractor[A, B](f: A => B) {
def unapply(a: A): Some[B] = Some(f(a))
}
implicit def toRightBiasedEiter[A, B](either: Either[A, B]): Either.RightProjection[A, B] = either.right
implicit class PipedObject[T](val v: T) extends AnyVal {
def |>[R](f: T => R) = f(v)
}
def readProperty(classLoader: ClassLoader, resource: String, name: String): Option[String] = {
Option(classLoader.getResourceAsStream(resource))
.flatMap(it => using(new BufferedInputStream(it))(readProperty(_, name)))
}
def readProperty(file: File, resource: String, name: String): Option[String] = {
try {
val url = new URL("jar:%s!/%s".format(file.toURI.toString, resource))
Option(url.openStream).flatMap(it => using(new BufferedInputStream(it))(readProperty(_, name)))
} catch {
case _: IOException => None
}
}
private def readProperty(input: InputStream, name: String): Option[String] = {
val properties = new Properties()
properties.load(input)
Option(properties.getProperty(name))
}
}
| consulo/consulo-scala | compiler-settings/src/org/jetbrains/jps/incremental/scala/package.scala | Scala | apache-2.0 | 1,478 |
package services
import java.util.UUID
import akka.actor.{ActorRef, ActorSystem}
import akka.util.Timeout
import controllers.ShiftPersistence
import drt.shared.CrunchApi._
import drt.shared.KeyCloakApi.{KeyCloakGroup, KeyCloakUser}
import drt.shared.Terminals.Terminal
import drt.shared._
import drt.shared.redlist.RedList
import org.slf4j.{Logger, LoggerFactory}
import play.api.mvc.{Headers, Session}
import scala.collection.immutable.Map
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
import scala.concurrent.duration._
import scala.io.Codec
import scala.language.postfixOps
import scala.util.Try
trait AirportToCountryLike {
lazy val airportInfoByIataPortCode: Map[String, AirportInfo] = {
val bufferedSource = scala.io.Source.fromURL(getClass.getResource("/airports.dat"))(Codec.UTF8)
bufferedSource.getLines().map { l =>
val t = Try {
val splitRow: Array[String] = l.split(",")
val sq: String => String = stripQuotes
AirportInfo(sq(splitRow(1)), sq(splitRow(2)), sq(splitRow(3)), sq(splitRow(4)))
}
t.getOrElse({
AirportInfo("failed on", l, "boo", "ya")
})
}.map(ai => (ai.code, ai)).toMap
}
def stripQuotes(row1: String): String = {
row1.substring(1, row1.length - 1)
}
def airportInfosByAirportCodes(codes: Set[String]): Future[Map[String, AirportInfo]] = Future {
val res = codes.map(code => (code, airportInfoByIataPortCode.get(code)))
val successes: Set[(String, AirportInfo)] = res collect {
case (code, Some(ai)) =>
(code, ai)
}
successes.toMap
}
}
object AirportToCountry extends AirportToCountryLike {
def isRedListed(portToCheck: PortCode, forDate: MillisSinceEpoch): Boolean = airportInfoByIataPortCode
.get(portToCheck.iata)
.exists(ai => RedList.countryCodesByName(forDate).contains(ai.country))
}
abstract class ApiService(val airportConfig: AirportConfig,
val shiftsActor: ActorRef,
val fixedPointsActor: ActorRef,
val staffMovementsActor: ActorRef,
val headers: Headers,
val session: Session)
extends Api with ShiftPersistence {
override implicit val timeout: akka.util.Timeout = Timeout(30 seconds)
override val log: Logger = LoggerFactory.getLogger(this.getClass)
def portStateActor: ActorRef
def actorSystem: ActorSystem
def forecastWeekSummary(startDay: MillisSinceEpoch, terminal: Terminal): Future[Option[ForecastPeriodWithHeadlines]]
def getShiftsForMonth(month: MillisSinceEpoch, terminal: Terminal): Future[ShiftAssignments]
def updateShifts(shiftsToUpdate: Seq[StaffAssignment]): Unit
def getKeyCloakUsers(): Future[List[KeyCloakUser]]
def getKeyCloakGroups(): Future[List[KeyCloakGroup]]
def getKeyCloakUserGroups(userId: UUID): Future[Set[KeyCloakGroup]]
def addUserToGroups(userId: UUID, groups: Set[String]): Future[Unit]
def removeUserFromGroups(userId: UUID, groups: Set[String]): Future[Unit]
def getShowAlertModalDialog(): Boolean
}
| UKHomeOffice/drt-scalajs-spa-exploration | server/src/main/scala/services/ApiService.scala | Scala | apache-2.0 | 3,120 |
package io.getquill.h2
import io.getquill.PeopleMonixSpec
import org.scalatest.Matchers._
class PeopleMonixJdbcSpec extends PeopleMonixSpec {
val context = testContext
import testContext._
override def beforeAll = {
testContext.transaction {
for {
_ <- testContext.run(query[Couple].delete)
_ <- testContext.run(query[Person].filter(_.age > 0).delete)
_ <- testContext.run(liftQuery(peopleEntries).foreach(p => peopleInsert(p)))
_ <- testContext.run(liftQuery(couplesEntries).foreach(p => couplesInsert(p)))
} yield ()
}.runSyncUnsafe()
}
"Example 1 - differences" in {
testContext.run(`Ex 1 differences`).runSyncUnsafe() should contain theSameElementsAs `Ex 1 expected result`
}
"Example 2 - range simple" in {
testContext.run(`Ex 2 rangeSimple`(lift(`Ex 2 param 1`), lift(`Ex 2 param 2`))).runSyncUnsafe() should contain theSameElementsAs `Ex 2 expected result`
}
"Example 3 - satisfies" in {
testContext.run(`Ex 3 satisfies`).runSyncUnsafe() should contain theSameElementsAs `Ex 3 expected result`
}
"Example 4 - satisfies" in {
testContext.run(`Ex 4 satisfies`).runSyncUnsafe() should contain theSameElementsAs `Ex 4 expected result`
}
"Example 5 - compose" in {
testContext.run(`Ex 5 compose`(lift(`Ex 5 param 1`), lift(`Ex 5 param 2`))).runSyncUnsafe() mustEqual `Ex 5 expected result`
}
"Example 6 - predicate 0" in {
testContext.run(satisfies(eval(`Ex 6 predicate`))).runSyncUnsafe() mustEqual `Ex 6 expected result`
}
"Example 7 - predicate 1" in {
testContext.run(satisfies(eval(`Ex 7 predicate`))).runSyncUnsafe() mustEqual `Ex 7 expected result`
}
"Example 8 - contains empty" in {
testContext.run(`Ex 8 and 9 contains`(liftQuery(`Ex 8 param`))).runSyncUnsafe() mustEqual `Ex 8 expected result`
}
"Example 9 - contains non empty" in {
testContext.run(`Ex 8 and 9 contains`(liftQuery(`Ex 9 param`))).runSyncUnsafe() mustEqual `Ex 9 expected result`
}
"Example 10 - pagination" in {
testContext.run(`Ex 10 page 1 query`).runSyncUnsafe() mustEqual `Ex 10 page 1 expected`
testContext.run(`Ex 10 page 2 query`).runSyncUnsafe() mustEqual `Ex 10 page 2 expected`
}
"Example 11 - streaming" in {
collect(testContext.stream(`Ex 11 query`)) should contain theSameElementsAs `Ex 11 expected`
}
}
| mentegy/quill | quill-jdbc-monix/src/test/scala/io/getquill/h2/PeopleMonixJdbcSpec.scala | Scala | apache-2.0 | 2,365 |
package model.ui
import backend.data.mongodb.service.UserDataServiceMongo
import backend.data.service.UserDataService
import backend.data.mongodb.service.TagDataServiceMongo
import backend.data.service.TagDataService
import play.api.Play
import model.blog.PostEnriched
/**
* Meta tags which are used to fill the meta tags in the html pages.
*
* @author Stefan Bleibinhaus
*
*/
case class MetaTags(
val description: String,
val keywords: String,
val author: String)
object MetaTags {
val empty = MetaTags("", "", "")
private val userDataService: UserDataService = UserDataServiceMongo
private val tagDataService: TagDataService = TagDataServiceMongo
private val blogTitle =
Play.current.configuration.getString("blogTitle").getOrElse("bleibinha.us/blog")
private val blogDescription =
Play.current.configuration.getString("blogDescription").getOrElse(blogTitle)
/**
* Use this constructor for the Homepage
*
* @return
*/
def apply(): MetaTags =
MetaTags(
blogDescription,
tagDataService.keywords,
userDataService.getUsername())
/**
* Use this constructor for pages which do not show a single post
*
* @param pagename The name of page, e.g. "About page"
* @return
*/
def apply(pagename: String): MetaTags =
MetaTags(
description(pagename),
"",
userDataService.getUsername())
/**
* Use this constructor for posts
*
* @param enrichedPost
* @return
*/
def apply(enrichedPost: PostEnriched): MetaTags = MetaTags(
description(enrichedPost.title),
enrichedPost.tags.mkString(","),
userDataService.getUsername())
private def description(pagename: String): String = blogDescription + ", " + pagename
} | ExNexu/scablo | app/model/ui/MetaTags.scala | Scala | bsd-2-clause | 1,758 |
for index in 1...5 {
println("\\(index) times 5 is \\(index * 5)")
}
// 1 times 5 is 5
// 2 times 5 is 10
// 3 times 5 is 15
// 4 times 5 is 20
// 5 times 5 is 25
| leverich/swiftislikescala | comparisons/basics/inclusive_range_operator/swift.scala | Scala | mit | 165 |
package manager
import connectors.K8055
import model._
import play.api.Logger
import scala.concurrent.ExecutionContext.Implicits.global
object SequenceExecutionManager extends SequenceExecutionManager{
override val sequenceManager = SequenceManager
override val timer = Timer
}
trait SequenceExecutionManager {
val sequenceManager: SequenceManager
val timer: Timer
val START_STEP = 0
var currentStep = START_STEP
var running = false
def runSequence():Unit = {
if(running) {
if(currentStep < 1) currentStep = 1
sequenceManager.getStep(currentStep).fold(stopSequencer() )(step => performStep(step))
}
}
def stopSequencer():Unit = {running = false}
def startSequencer():Unit = {running = true}
def incStep():Unit = {currentStep += 1}
def decStep():Unit = {currentStep -= 1}
def performStep(step: Step): Unit = {
step.eventType match {
case (EventType.ON) => K8055.patchDeviceState(DeviceState(step.deviceId, Some(true), None)); incStep() //Digital Out
case (EventType.OFF) => K8055.patchDeviceState(DeviceState(step.deviceId, Some(false), None)); incStep() //Digital/Analogue Out/Monitor
case (EventType.SET_VALUE) => K8055.patchDeviceState(DeviceState(step.deviceId, None, step.value)); incStep()
case (EventType.WAIT_RISING) => runWaitRising(step)
case (EventType.WAIT_FALLING) => runWaitFalling(step)
case (EventType.WAIT_TIME) => runWaitTime(step)
case (EventType.WAIT_ON) => runWaitOn(step)
case (EventType.WAIT_OFF) => runWaitOff(step)
case (EventType.WAIT_COUNT) => incStep()
case (EventType.STROBE_ON_TIME) => K8055.patchDeviceState(DeviceState(step.deviceId, None, None, step.value)); incStep()
case (EventType.STROBE_OFF_TIME) => K8055.patchDeviceState(DeviceState(step.deviceId, None, None, None, step.value)); incStep()
case (EventType.DESCRIPTION) => incStep()
case _ => Logger.warn("Bad Step Type: " + step); incStep()
}
}
def runWaitTime(step:Step): Unit = {
if(timer.waitingFor(step.id)) { //already running
if(timer.finished(step.id)) {
incStep()
timer.step = -1
}
}
else{ //set up a Timer
step.value.fold(Logger.warn("No duration specified, can't wait for: " + step)) {
duration => timer.setTimer( step.id, step.value.getOrElse(0))
}
}
}
def runWaitRising(step: Step) = runWait(step, (x:Int,y:Int) => x >= y)
def runWaitFalling(step: Step) = runWait(step, (x:Int,y:Int) => x < y)
def runWait(step: Step, compareFn:(Int,Int)=>Boolean): Unit = {
K8055.getDevice(step.deviceId).map{
sensor => sensor.analogueState.fold() {
reading => step.value.fold() {
target => if(compareFn(reading,target)) incStep()
}
}
}
}
def runWaitOn(step: Step): Unit = {
K8055.getDevice(step.deviceId).map{
inputDevice => inputDevice.digitalState.fold() {
isOn => if(isOn) incStep()
}
}
}
def runWaitOff(step: Step): Unit = {
K8055.getDevice(step.deviceId).map{
inputDevice => inputDevice.digitalState.fold() {
isOn => if(!isOn) incStep()
}
}
}
}
| bullimog/k8055-sequencer | app/manager/SequenceExecutionManager.scala | Scala | apache-2.0 | 3,174 |
/**
* *****************************************************************************
* Copyright 2014 Katja Hahn
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ****************************************************************************
*/
package com.github.katjahahn.parser.sections.idata
import scala.collection.JavaConverters._
import scala.collection.mutable.ListBuffer
import com.github.katjahahn.parser.optheader.OptionalHeader.MagicNumber._
import com.github.katjahahn.parser.ByteArrayUtil._
import com.github.katjahahn.parser.IOUtil.{ NL, SpecificationFormat }
import com.github.katjahahn.parser.StandardField
import com.github.katjahahn.parser.HeaderKey
import com.github.katjahahn.parser.IOUtil
import com.github.katjahahn.parser.MemoryMappedPE
import com.github.katjahahn.parser.optheader.OptionalHeader
import org.apache.logging.log4j.LogManager
import com.github.katjahahn.parser.sections.SectionLoader.LoadInfo
import com.github.katjahahn.parser.sections.idata.DelayLoadDirectoryKey._
import com.github.katjahahn.parser.PhysicalLocation
import com.github.katjahahn.parser.Location
import com.github.katjahahn.parser.sections.idata.DelayLoadDirectoryEntry._
import com.github.katjahahn.parser.optheader.WindowsEntryKey
class DelayLoadDirectoryEntry private (
private val entries: Map[DelayLoadDirectoryKey, StandardField],
private val offset: Long,
val name: String,
private val lookupTableEntries: List[LookupTableEntry]) {
def apply(key: DelayLoadDirectoryKey): Long = entries(key).getValue
/**
* Returns a list of all file locations where directory entries are found
*/
def getPhysicalLocations(): List[PhysicalLocation] = new PhysicalLocation(offset, delayDirSize) ::
//collect lookupTableEntry locations
(for (entry <- lookupTableEntries) yield new PhysicalLocation(entry.offset, entry.size)) :::
//collect HintNameEntry locations
(lookupTableEntries collect {
case e: NameEntry =>
new PhysicalLocation(e.hintNameEntry.fileOffset, e.hintNameEntry.size)
})
def lookupTableEntriesSize: Int = lookupTableEntries.size
def getInfo(): String = s"""${entries.values.mkString(NL)}
|ASCII name: $name
|
|lookup table entries for $name
|--------------------------------------
|
|${lookupTableEntries.mkString(NL)}""".stripMargin
override def toString(): String = getInfo()
/**
* Converts the directory entry to an ImportDLL instance
*/
def toImportDLL(): ImportDLL = {
val nameImports = lookupTableEntries collect { case i: NameEntry => i.toImport.asInstanceOf[NameImport] }
val ordImports = lookupTableEntries collect { case i: OrdinalEntry => i.toImport.asInstanceOf[OrdinalImport] }
new ImportDLL(name, nameImports.asJava, ordImports.asJava)
}
}
object DelayLoadDirectoryEntry {
private final val logger = LogManager.getLogger(DelayLoadDirectoryEntry.getClass().getName())
private val delayLoadSpec = "delayimporttablespec"
val delayDirSize = 32
def apply(loadInfo: LoadInfo, nr: Int): DelayLoadDirectoryEntry = {
val mmbytes = loadInfo.memoryMapped
val entryFileOffset = loadInfo.fileOffset + nr * delayDirSize
val va = loadInfo.va
val readAddress = va + nr * delayDirSize
val format = new SpecificationFormat(0, 1, 2, 3)
val delayLoadBytes = mmbytes.slice(readAddress, readAddress + delayDirSize)
val entries = IOUtil.readHeaderEntries(classOf[DelayLoadDirectoryKey],
format, delayLoadSpec, delayLoadBytes, entryFileOffset).asScala.toMap
val nameRVA = entries(NAME).getValue.toInt
val name = getASCIIName(nameRVA, va, mmbytes)
try {
val lookupTableEntries = readLookupTableEntries(entries, loadInfo)
return new DelayLoadDirectoryEntry(entries, entryFileOffset, name, lookupTableEntries)
} catch {
case e: FailureEntryException => logger.error(
"Invalid LookupTableEntry found, parsing aborted, " + e.getMessage())
}
// No lookup table entries read
return new DelayLoadDirectoryEntry(entries, entryFileOffset, name, Nil)
}
private def readLookupTableEntries(entries: Map[DelayLoadDirectoryKey, StandardField],
loadInfo: LoadInfo): List[LookupTableEntry] = {
val virtualAddress = loadInfo.va
val mmbytes = loadInfo.memoryMapped
val magicNumber = loadInfo.data.getOptionalHeader.getMagicNumber()
val fileOffset = loadInfo.fileOffset
var entry: LookupTableEntry = null
var iRVA = entries(DELAY_IMPORT_NAME_TABLE).getValue
var offset = iRVA - virtualAddress
var relOffset = iRVA
var iVA = iRVA + loadInfo.data.getOptionalHeader.get(WindowsEntryKey.IMAGE_BASE)
val lookupTableEntries = ListBuffer[LookupTableEntry]()
logger.debug("offset: " + offset + " rva: " + iRVA + " byteslength: " +
mmbytes.length() + " virtualAddress " + virtualAddress)
val EntrySize = magicNumber match {
case PE32 => 4
case PE32_PLUS => 8
case ROM => throw new IllegalArgumentException("ROM file format not covered by PortEx")
case UNKNOWN => throw new IllegalArgumentException("Unknown magic number, can not parse delay-load imports")
}
do {
//TODO get fileoffset for entry from mmbytes instead of this to avoid
//fractionated section issues ?
val entryFileOffset = fileOffset + offset
//val entryFileOffset = mmbytes.getPhysforVir(iRVA) //doesn't work
//FIXME dummy
val dummy = new DirectoryEntry(null, 0)
entry = LookupTableEntry(mmbytes, offset.toInt, EntrySize,
virtualAddress, relOffset, iVA, dummy, entryFileOffset)
if (!entry.isInstanceOf[NullEntry]) lookupTableEntries += entry
offset += EntrySize
relOffset += EntrySize
} while (!entry.isInstanceOf[NullEntry])
lookupTableEntries.toList
}
private def getASCIIName(nameRVA: Int, virtualAddress: Long,
mmbytes: MemoryMappedPE): String = {
val offset = nameRVA
val nullindex = mmbytes.indexWhere(_ == 0, offset)
new String(mmbytes.slice(offset, nullindex))
}
} | katjahahn/PortEx | src/main/java/com/github/katjahahn/parser/sections/idata/DelayLoadDirectoryEntry.scala | Scala | apache-2.0 | 6,528 |