code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
package quizleague.web.maintain.util
import scalajs.js
import quizleague.web.model.Team
import quizleague.web.util.component.SelectWrapper
import quizleague.web.util.rx.RefObservable
class TeamManager(var teams: js.Array[SelectWrapper[Team]]) {
private var usedTeams = Set[String]()
def unusedTeams(other: RefObservable[Team]) = teams.filter(x => !usedTeams.contains(x.value.id) && (if (other != js.undefined && other != null) { x.value.id != other.id } else true))
def take(team: RefObservable[Team]) = { usedTeams += team.id; team }
def untake(team: RefObservable[Team]) = usedTeams -= team.id
}
| gumdrop/quizleague-maintain | js/src/main/scala/quizleague/web/maintain/util/TeamManager.scala | Scala | mit | 613 |
/*
* Copyright 2014-2015
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Created by prannamalai on 2/20/15.
*/
package com.paypal.genio
import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.dataformat.yaml.YAMLFactory
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import org.json4s.native.JsonParser
import scala.io.Source
sealed abstract class SpecType
case object SpecTypeGDD extends SpecType
case object SpecTypeSwagger extends SpecType
sealed abstract class SpecFormat
case object SpecFormatJSON extends SpecFormat
case object SpecFormatYAML extends SpecFormat
class Reader{
def readFile (fileName:String) = {
Source.fromURL(getClass.getResource(fileName)).getLines().mkString("\n")
}
def readWebUrl (WbUrl:String) = {
Source.fromURL(WbUrl).mkString
}
def specFormat (fileName:String):Option[SpecFormat] = {
fileName match {
case file if file.endsWith("json") => Option(SpecFormatJSON)
case file if file.endsWith("yaml") => Option(SpecFormatYAML)
case _ => None
}
}
def parser (resourceName:String, content:String) = {
var parsedSpec:Map[String, Any] = null
val format = specFormat(resourceName).get
format match {
case SpecFormatJSON => parsedSpec = parseJson(content)
case SpecFormatYAML => parsedSpec = parseYaml(content)
case _ => None
}
parsedSpec
}
def parseJson (json:String) = {
implicit val formats = org.json4s.DefaultFormats
JsonParser.parse(json).extract[Map[String, Any]]
}
def parseYaml (yaml:String) = {
val parser = new ObjectMapper(new YAMLFactory())
parser.registerModule(DefaultScalaModule)
parser.readValue(yaml, classOf[Map[String, Any]])
}
def findSpecType (parsedSpec:Map[String, Any]) = {
if(parsedSpec.get("swagger") != None)
SpecTypeSwagger
else if (parsedSpec.get("discoveryVersion") != None)
SpecTypeGDD
else
None
}
def readSpec(fileName:String) = {
val fileContent:String = readFile(fileName)
val parsedSpec = parser(fileName, fileContent)
(findSpecType(parsedSpec), parsedSpec)
}
} | piyush-verma/genio-scala | src/main/scala/Reader.scala | Scala | apache-2.0 | 2,658 |
package io.fintrospect.renderers.simplejson
import com.twitter.finagle.http.path.Path
import com.twitter.finagle.http.{Request, Response}
import io.fintrospect.formats.Argo
import io.fintrospect.formats.Argo.JsonFormat.{Field, obj}
import io.fintrospect.formats.Argo.ResponseBuilder._
import io.fintrospect.renderers.{JsonErrorResponseRenderer, ModuleRenderer}
import io.fintrospect.util.ExtractionError
import io.fintrospect.{Security, ServerRoute}
/**
* Ultra-basic ModuleRenderer implementation that only supports the route paths and the main descriptions of each.
*/
class SimpleJson extends ModuleRenderer {
override def badRequest(badParameters: Seq[ExtractionError]): Response = JsonErrorResponseRenderer.badRequest(badParameters)
override def notFound(request: Request): Response = JsonErrorResponseRenderer.notFound()
private def render(basePath: Path, route: ServerRoute[_, _]): Field =
route.method.toString() + ":" + route.describeFor(basePath) -> Argo.JsonFormat.string(route.routeSpec.summary)
override def description(basePath: Path, security: Security, routes: Seq[ServerRoute[_, _]]): Response = Ok(obj("resources" -> obj(routes.map(r => render(basePath, r)))))
}
object SimpleJson {
def apply() = new SimpleJson()
} | daviddenton/fintrospect | core/src/main/scala/io/fintrospect/renderers/simplejson/SimpleJson.scala | Scala | apache-2.0 | 1,257 |
package util.enrich_my_library
/**
* @author ynupc
* Created on 2017/05/01
*/
trait StringUtilsConversions {
protected def str: String
def replaceAllLiteratim(target: CharSequence, replacement: CharSequence): String
def quote(quotation: (String, String)): String
def codePointNumber: Int
def toCodePointArray: Array[Int]
//binaryString to AnyVal
def binaryStringToByte: Byte
def binaryStringToByteOpt: Option[Byte] = {
try {
Option(binaryStringToByte)
} catch {
case e: NumberFormatException =>
e.printStackTrace()
None
}
}
def binaryStringToByteOr(defaultValue: Byte): Byte = {
binaryStringToByteOpt match {
case Some(byte) =>
byte
case None =>
defaultValue
}
}
def binaryStringToShort: Short
def binaryStringToShortOpt: Option[Short] = {
try {
Option(binaryStringToShort)
} catch {
case e: NumberFormatException =>
e.printStackTrace()
None
}
}
def binaryStringToShortOr(defaultValue: Short): Short = {
binaryStringToShortOpt match {
case Some(short) =>
short
case None =>
defaultValue
}
}
def binaryStringToInt: Int
def binaryStringToIntOpt: Option[Int] = {
try {
Option(binaryStringToInt)
} catch {
case e: NumberFormatException =>
e.printStackTrace()
None
}
}
def binaryStringToIntOr(defaultValue: Int): Int = {
binaryStringToIntOpt match {
case Some(int) =>
int
case None =>
defaultValue
}
}
def binaryStringToUnsignedInt: Int
def binaryStringToUnsignedIntOpt: Option[Int] = {
try {
Option(binaryStringToUnsignedInt)
} catch {
case e: NumberFormatException =>
e.printStackTrace()
None
}
}
def binaryStringToUnsignedIntOr(defaultValue: Int): Int = {
binaryStringToUnsignedIntOpt match {
case Some(int) =>
int
case None =>
defaultValue
}
}
def binaryStringToLong: Long
def binaryStringToLongOpt: Option[Long] = {
try {
Option(binaryStringToLong)
} catch {
case e: NumberFormatException =>
e.printStackTrace()
None
}
}
def binaryStringToLongOr(defaultValue: Long): Long = {
binaryStringToLongOpt match {
case Some(long) =>
long
case None =>
defaultValue
}
}
def binaryStringToUnsignedLong: Long
def binaryStringToUnsignedLongOpt: Option[Long] = {
try {
Option(binaryStringToUnsignedLong)
} catch {
case e: NumberFormatException =>
e.printStackTrace()
None
}
}
def binaryStringToUnsignedLongOr(defaultValue: Long): Long = {
binaryStringToUnsignedLongOpt match {
case Some(long) =>
long
case None =>
defaultValue
}
}
//octalString to AnyVal
def octalStringToByte: Byte
def octalStringToByteOpt: Option[Byte] = {
try {
Option(octalStringToByte)
} catch {
case e: NumberFormatException =>
e.printStackTrace()
None
}
}
def octalStringToByteOr(defaultValue: Byte): Byte = {
octalStringToByteOpt match {
case Some(byte) =>
byte
case None =>
defaultValue
}
}
def octalStringToShort: Short
def octalStringToShortOpt: Option[Short] = {
try {
Option(octalStringToShort)
} catch {
case e: NumberFormatException =>
e.printStackTrace()
None
}
}
def octalStringToShortOr(defaultValue: Short): Short = {
octalStringToShortOpt match {
case Some(short) =>
short
case None =>
defaultValue
}
}
def octalStringToInt: Int
def octalStringToIntOpt: Option[Int] = {
try {
Option(octalStringToInt)
} catch {
case e: NumberFormatException =>
e.printStackTrace()
None
}
}
def octalStringToIntOr(defaultValue: Int): Int = {
octalStringToIntOpt match {
case Some(int) =>
int
case None =>
defaultValue
}
}
def octalStringToUnsignedInt: Int
def octalStringToUnsignedIntOpt: Option[Int] = {
try {
Option(octalStringToUnsignedInt)
} catch {
case e: NumberFormatException =>
e.printStackTrace()
None
}
}
def octalStringToUnsignedIntOr(defaultValue: Int): Int = {
octalStringToUnsignedIntOpt match {
case Some(int) =>
int
case None =>
defaultValue
}
}
def octalStringToLong: Long
def octalStringToLongOpt: Option[Long] = {
try {
Option(octalStringToLong)
} catch {
case e: NumberFormatException =>
e.printStackTrace()
None
}
}
def octalStringToLongOr(defaultValue: Long): Long = {
octalStringToLongOpt match {
case Some(long) =>
long
case None =>
defaultValue
}
}
def octalStringToUnsignedLong: Long
def octalStringToUnsignedLongOpt: Option[Long] = {
try {
Option(octalStringToUnsignedLong)
} catch {
case e: NumberFormatException =>
e.printStackTrace()
None
}
}
def octalStringToUnsignedLongOr(defaultValue: Long): Long = {
octalStringToUnsignedLongOpt match {
case Some(long) =>
long
case None =>
defaultValue
}
}
//hexString to AnyVal
def hexStringToByte: Byte
def hexStringToByteOpt: Option[Byte] = {
try {
Option(hexStringToByte)
} catch {
case e: NumberFormatException =>
e.printStackTrace()
None
}
}
def hexStringToByteOr(defaultValue: Byte): Byte = {
hexStringToByteOpt match {
case Some(byte) =>
byte
case None =>
defaultValue
}
}
def hexStringToShort: Short
def hexStringToShortOpt: Option[Short] = {
try {
Option(hexStringToShort)
} catch {
case e: NumberFormatException =>
e.printStackTrace()
None
}
}
def hexStringToShortOr(defaultValue: Short): Short = {
hexStringToShortOpt match {
case Some(short) =>
short
case None =>
defaultValue
}
}
def hexStringToInt: Int
def hexStringToIntOpt: Option[Int] = {
try {
Option(hexStringToInt)
} catch {
case e: NumberFormatException =>
e.printStackTrace()
None
}
}
def hexStringToIntOr(defaultValue: Int): Int = {
hexStringToIntOpt match {
case Some(int) =>
int
case None =>
defaultValue
}
}
def hexStringToUnsignedInt: Int
def hexStringToUnsignedIntOpt: Option[Int] = {
try {
Option(hexStringToUnsignedInt)
} catch {
case e: NumberFormatException =>
e.printStackTrace()
None
}
}
def hexStringToUnsignedIntOr(defaultValue: Int): Int = {
hexStringToUnsignedIntOpt match {
case Some(int) =>
int
case None =>
defaultValue
}
}
def hexStringToLong: Long
def hexStringToLongOpt: Option[Long] = {
try {
Option(hexStringToLong)
} catch {
case e: NumberFormatException =>
e.printStackTrace()
None
}
}
def hexStringToLongOr(defaultValue: Long): Long = {
hexStringToLongOpt match {
case Some(long) =>
long
case None =>
defaultValue
}
}
def hexStringToUnsignedLong: Long
def hexStringToUnsignedLongOpt: Option[Long] = {
try {
Option(hexStringToUnsignedLong)
} catch {
case e: NumberFormatException =>
e.printStackTrace()
None
}
}
def hexStringToUnsignedLongOr(defaultValue: Long): Long = {
hexStringToUnsignedLongOpt match {
case Some(long) =>
long
case None =>
defaultValue
}
}
def toIntOpt: Option[Int]
def toIntOr(defaultValue: Int): Int = {
toIntOpt match {
case Some(int) =>
int
case None =>
defaultValue
}
}
def toLongOpt: Option[Long]
def toLongOr(defaultValue: Long): Long = {
toLongOpt match {
case Some(long) =>
long
case None =>
defaultValue
}
}
def toShortOpt: Option[Short]
def toShortOr(defaultValue: Short): Short = {
toShortOpt match {
case Some(short) =>
short
case None =>
defaultValue
}
}
def toByteOpt: Option[Byte]
def toByteOr(defaultValue: Byte): Byte = {
toByteOpt match {
case Some(byte) =>
byte
case None =>
defaultValue
}
}
def toFloatOpt: Option[Float]
def toFloatOr(defaultValue: Float): Float = {
toFloatOpt match {
case Some(float) =>
float
case None =>
defaultValue
}
}
def toDoubleOpt: Option[Double]
def toDoubleOr(defaultValue: Double): Double = {
toDoubleOpt match {
case Some(double) =>
double
case None =>
defaultValue
}
}
def toBooleanOpt: Option[Boolean]
def toBooleanOr(defaultValue: Boolean): Boolean = {
toBooleanOpt match {
case Some(boolean) =>
boolean
case None =>
defaultValue
}
}
} | ynupc/scalastringcourseday4 | src/main/scala/util/enrich_my_library/StringUtilsConversions.scala | Scala | apache-2.0 | 9,180 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ui.storage
import javax.servlet.http.HttpServletRequest
import scala.xml.Node
import org.apache.spark.storage._
import org.apache.spark.ui.{UIUtils, WebUIPage}
import org.apache.spark.util.Utils
/** Page showing list of RDD's currently stored in the cluster */
private[ui] class StoragePage(parent: StorageTab) extends WebUIPage("") {
private val listener = parent.listener
def render(request: HttpServletRequest): Seq[Node] = {
val content = rddTable(listener.rddInfoList) ++
receiverBlockTables(listener.allExecutorStreamBlockStatus.sortBy(_.executorId))
UIUtils.headerSparkPage("Spark Cache", content, parent)
}
private[storage] def rddTable(rdds: Seq[RDDInfo]): Seq[Node] = {
if (rdds.isEmpty) {
// Don't show the rdd table if there is no RDD persisted.
Nil
} else {
<div>
<h4>RDDs</h4>
{UIUtils.listingTable(rddHeader, rddRow, rdds, id = Some("storage-by-rdd-table"))}
</div>
}
}
/** Header fields for the RDD table */
private val rddHeader = Seq(
"RDD Name",
"Storage Level",
"Cached Partitions",
"Fraction Cached",
"Size in Memory",
"Size on Disk")
/** Render an HTML row representing an RDD */
private def rddRow(rdd: RDDInfo): Seq[Node] = {
// scalastyle:off
<tr>
<td>
<a href={"%s/Spark Cache/rdd?id=%s".format(UIUtils.prependBaseUri(parent.basePath), rdd.id)}>
{rdd.name}
</a>
</td>
<td>{rdd.storageLevel.description}
</td>
<td>{rdd.numCachedPartitions.toString}</td>
<td>{"%.0f%%".format(rdd.numCachedPartitions * 100.0 / rdd.numPartitions)}</td>
<td sorttable_customkey={rdd.memSize.toString}>{Utils.bytesToString(rdd.memSize)}</td>
<td sorttable_customkey={rdd.diskSize.toString} >{Utils.bytesToString(rdd.diskSize)}</td>
</tr>
// scalastyle:on
}
private[storage] def receiverBlockTables(statuses: Seq[ExecutorStreamBlockStatus]): Seq[Node] = {
if (statuses.map(_.numStreamBlocks).sum == 0) {
// Don't show the tables if there is no stream block
Nil
} else {
val blocks = statuses.flatMap(_.blocks).groupBy(_.blockId).toSeq.sortBy(_._1.toString)
<div>
<h4>Receiver Blocks</h4>
{executorMetricsTable(statuses)}
{streamBlockTable(blocks)}
</div>
}
}
private def executorMetricsTable(statuses: Seq[ExecutorStreamBlockStatus]): Seq[Node] = {
<div>
<h5>Aggregated Block Metrics by Executor</h5>
{UIUtils.listingTable(executorMetricsTableHeader, executorMetricsTableRow, statuses,
id = Some("storage-by-executor-stream-blocks"))}
</div>
}
private val executorMetricsTableHeader = Seq(
"Executor ID",
"Address",
"Total Size in Memory",
"Total Size on Disk",
"Stream Blocks")
private def executorMetricsTableRow(status: ExecutorStreamBlockStatus): Seq[Node] = {
<tr>
<td>
{status.executorId}
</td>
<td>
{status.location}
</td>
<td sorttable_customkey={status.totalMemSize.toString}>
{Utils.bytesToString(status.totalMemSize)}
</td>
<td sorttable_customkey={status.totalDiskSize.toString}>
{Utils.bytesToString(status.totalDiskSize)}
</td>
<td>
{status.numStreamBlocks.toString}
</td>
</tr>
}
private def streamBlockTable(blocks: Seq[(BlockId, Seq[BlockUIData])]): Seq[Node] = {
if (blocks.isEmpty) {
Nil
} else {
<div>
<h5>Blocks</h5>
{UIUtils.listingTable(
streamBlockTableHeader,
streamBlockTableRow,
blocks,
id = Some("storage-by-block-table"),
sortable = false)}
</div>
}
}
private val streamBlockTableHeader = Seq(
"Block ID",
"Replication Level",
"Location",
"Storage Level",
"Size")
/** Render a stream block */
private def streamBlockTableRow(block: (BlockId, Seq[BlockUIData])): Seq[Node] = {
val replications = block._2
assert(replications.size > 0) // This must be true because it's the result of "groupBy"
if (replications.size == 1) {
streamBlockTableSubrow(block._1, replications.head, replications.size, true)
} else {
streamBlockTableSubrow(block._1, replications.head, replications.size, true) ++
replications.tail.flatMap(streamBlockTableSubrow(block._1, _, replications.size, false))
}
}
private def streamBlockTableSubrow(
blockId: BlockId, block: BlockUIData, replication: Int, firstSubrow: Boolean): Seq[Node] = {
val (storageLevel, size) = streamBlockStorageLevelDescriptionAndSize(block)
<tr>
{
if (firstSubrow) {
<td rowspan={replication.toString}>
{block.blockId.toString}
</td>
<td rowspan={replication.toString}>
{replication.toString}
</td>
}
}
<td>{block.location}</td>
<td>{storageLevel}</td>
<td>{Utils.bytesToString(size)}</td>
</tr>
}
private[storage] def streamBlockStorageLevelDescriptionAndSize(
block: BlockUIData): (String, Long) = {
if (block.storageLevel.useDisk) {
("Disk", block.diskSize)
} else if (block.storageLevel.useMemory && block.storageLevel.deserialized) {
("Memory", block.memSize)
} else if (block.storageLevel.useMemory && !block.storageLevel.deserialized) {
("Memory Serialized", block.memSize)
} else {
throw new IllegalStateException(s"Invalid Storage Level: ${block.storageLevel}")
}
}
}
| SnappyDataInc/spark | core/src/main/scala/org/apache/spark/ui/storage/StoragePage.scala | Scala | apache-2.0 | 6,391 |
/*
* =========================================================================================
* Copyright © 2015 the khronus project <https://github.com/hotels-tech/khronus>
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
* =========================================================================================
*/
package com.searchlight.khronus.model
import com.fasterxml.jackson.databind.annotation.JsonDeserialize
import org.apache.commons.lang.builder.{ EqualsBuilder, HashCodeBuilder }
import scala.concurrent.duration._
import org.HdrHistogram.Histogram
object MetricType {
val Counter = "counter"
val Timer = "timer"
val Gauge = "gauge"
}
case class Metric(name: String, mtype: String) {
def isSystem = SystemMetric.isSystem(name)
}
object SystemMetric {
val systemSymbol = '~'
def isSystem(metricName: String) = {
metricName.charAt(0) == systemSymbol
}
}
case class MetricBatch(metrics: List[MetricMeasurement])
case class MetricMeasurement(name: String, mtype: String, measurements: List[Measurement]) {
override def toString = s"Metric($name,$mtype)"
def asMetric = Metric(name, mtype)
}
case class Measurement(@JsonDeserialize(contentAs = classOf[java.lang.Long]) ts: Option[Long], @JsonDeserialize(contentAs = classOf[java.lang.Long]) values: Seq[Long]) | despegar/khronus | khronus-core/src/main/scala/com/searchlight/khronus/model/MetricMeasurement.scala | Scala | apache-2.0 | 1,810 |
/*
* Copyright 2013 Akiyoshi Sugiki, University of Tsukuba
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kumoi.core.mr
import kumoi.shell.aaa._
import kumoi.shell.event._
import kumoi.shell.mr._
import kumoi.core.classloader._
import kumoi.core.log._
import kumoi.core._
import kumoi.core.or._
import kumoi.core.Shared._
import kumoi.impl.group._
import scala.actors._
import scala.actors.Actor._
import scala.actors.remote._
import scala.actors.remote.RemoteActor._
import scala.util.Random
import java.net.URL
import kumoi.core.classloader.RemoteClassLoader
/**
* A parallel skeleton worker.
*
* @author Akiyoshi SUGIKI
*/
class DWorker2(val port: Int, cloader: RemoteClassLoader) extends ORObject[Worker] with Worker {
def this(cl: RemoteClassLoader) = this(DefaultPort, cl)
private val logging = Logging("DWORKER")
private var masters = List[Node]()
private var worker: Actor = null
private var jobs = Map[Long, OutputChannel[Any]]()
//private var wid = 0L
def tasks(implicit auth: AAA) = {
worker !? DistWorkerGetTasks match {
case DistWorkerTasks(tasks) => tasks
case _ => List()
}
}
protected def genEvent(e: Exception) = WorkerError(e)
def start() = {
//logging.config("start()")
//logging.config("port=" + port + ", name=" + workerName)
//logging.config("classLoader=" + classLoader)
worker = actor {
//RemoteActor.classLoader = cloader
alive(port)
register(WorkerName, self)
loop {
receive {
case DistAddURL(url) =>
logging.debug("addURL " + url)
masters = Node(url.getHost, url.getPort) :: masters
cloader.addURL(url)
// sender ! DistResult(ok)
case DistRemoveURL(url) =>
cloader.removeURL(url)
case DistParallel(no, op, serial, timeout, auth) =>
logging.debug("DistParallel")
/*
val result = process(no, op, serial, auth)
logging.debug("result " + result)
reply(result)
*/
val task = createTask()
jobs += (serial -> sender)
task ! DistWorkerTask(no, op, serial, timeout, auth)
case DistAbort(no, serial) =>
logging.warn("ABORT is not implemented " + no + ":" + serial)
case DistWorkerCompleted(res, serial) =>
jobs.get(serial) match {
case Some(master) =>
jobs -= serial
master ! res
case None =>
logging.warn("Compeleted() not found")
}
case DistWorkerFailed(res, serial) =>
jobs.get(serial) match {
case Some(master) =>
jobs -= serial
master ! res
case None =>
logging.warn("Compeleted() not found")
}
case DistWorkerGetTasks =>
reply(DistWorkerTasks(jobs.toList.map(j => WorkerTask(j._1))))
case DistExit(reason) =>
exit(reason)
case m =>
logging.warn("unknown message - " + m)
}
}
}
}
private def createTask() = {
actor {
receive {
case DistWorkerTask(no, op, serial, timeout, auth) =>
val res = process(no, op, serial, auth)
logging.debug("worker result=" + res)
res match {
case succ: DistResult[_] => reply(DistWorkerCompleted(succ, serial))
case failed: DistFailed => reply(DistWorkerFailed(failed, serial))
}
}
}
}
private def process(no: Int, op: DistRequest, serial: Long, auth: AAA) = {
type AnyTuple = (Any, Any)
try {
op match {
case DistMap(l, f) =>
val res = l.foldLeft { (List[Any](), List[AnyTuple]()) }{ (b, a) =>
try { (f(a) :: b._1, b._2) } catch {
case e: Exception => logging.debug("***** EXCEPTION *****"); (b._1, (a, e) :: b._2)
}
}
logging.debug("FOB map() res=" + res)
DistResult(no, res._1.reverse.toList, res._2.reverse.toList, serial)
case DistReduceLeft(l, f, _) =>
l match {
case h :: rest =>
val res = l.foldLeft { (h, List[AnyTuple]()) } { (b, a) =>
try { (f(b._1, a), b._2) } catch {
case e: Exception => (b._1, (a, e) :: b._2)
}
}
logging.debug("FOB reduceLeft() res=" + res)
DistResult(no, List(res._1), res._2.reverse.toList, serial)
case List() => DistResult(no, List(), List(), serial)
}
case DistReduceRight(l, f, _) =>
l match {
case h :: rest =>
val res = l.foldRight { (h, List[AnyTuple]()) } { (a, b) =>
try { (f(a, b._1), b._2) } catch {
case e: Exception => (b._1, (a, e) :: b._2)
}
}
logging.debug("FOB reduceRight() res=" + res)
DistResult(no, List(res._1), res._2.reverse.toList, serial)
case List() => DistResult(no, List(), List(), serial)
}
case DistForeach(l, f) =>
val res = l.foldLeft { List[AnyTuple]() } { (b, a) =>
try { f(a); b } catch {
case e: Exception => (a, e) :: b
}
}
logging.debug("FOB foreach() res=" + res)
DistResult(no, List('ok), res.reverse.toList, serial)
case DistExists(l, f) =>
val res = l.foldLeft { (false, List[AnyTuple]()) } { (b, a) =>
try { (f(a) || b._1, b._2) } catch {
case e: Exception => (b._1, (a, e) :: b._2)
}
}
logging.debug("FOB exists() res=" + res)
DistResult(no, List(res._1), res._2.reverse.toList, serial)
case DistFilter(l, f) =>
val res = l.foldLeft { (List[Any](), List[AnyTuple]()) } { (b, a) =>
try { ({ if (f(a)) a :: b._1 else b._1}, b._2) } catch {
case e: Exception => (b._1, (a, e) :: b._2)
}
}
logging.debug("FOB filter() res=" + res)
DistResult(no, res._1.reverse.toList, res._2.reverse.toList, serial) // reverse
case DistForall(l, f) =>
val res = l.foldLeft { (true, List[AnyTuple]()) } { (b, a) =>
try { (f(a) && b._1, b._2) } catch {
case e: Exception => (false, (a, e) :: b._2) // TODO: Is this right?
}
}
logging.debug("FOB forall() res=" + res)
DistResult(no, List(res._1), res._2.reverse.toList, serial)
case DistCount(l, f) =>
val res = l.foldLeft { (0, List[AnyTuple]()) } { (b, a) =>
try { (b._1 + { if (f(a)) 1 else 0 }, b._2) } catch {
case e: Exception => (b._1, (a, e) :: b._2)
}
}
logging.debug("FOB count() res=" + res)
DistResult(no, List(res._1), res._2.reverse.toList, serial)
}
} catch {
case e: Exception =>
logging.warn("unreacheable?")
DistFailed(no, e, serial)
}
}
/*
def process(no: Int, op: DistRequest, serial: Long, auth: AAA) = {
type AnyTuple = (Any, Any)
try {
op match {
case DistMap(l, f) =>
//val g = (a: Any) => try { Some(f(a)) } catch { case e: Exception => None }
val r = l.par.map(f)
logging.debug("map " + r)
DistResult(no, r.toList, serial)
case DistReduceLeft(l, f, _) =>
//val g1 = (b: Any, a: Any) => try { f(b, a) } catch { case e: Exception => b }
val r = l.par.reduceLeft(f)
logging.debug("reduceLeft " + r)
DistResult(no, List(r), serial)
case DistReduceRight(l, f, _) =>
//val g2= (a: Any, b: Any) => try { f(a, b) } catch { case e: Exception => b }
val r = l.par.reduceRight(f)
logging.debug("reduceRight " + r)
DistResult(no, List(r), serial)
case DistForeach(l, f) =>
//val g = (a: Any) => try { f(a) } catch { case e: Exception => }
l.par.foreach(f)
logging.debug("foreach")
DistResult(no, List('ok), serial)
case DistExists(l, f) =>
//val g = (a: Any) => try { f(a) } catch { case e: Exception => false }
val r = l.par.exists(f)
logging.debug("exists " + r)
DistResult(no, List(r), serial)
case DistFilter(l, f) =>
//val g = (a: Any) => try { f(a) } catch { case e: Exception => false }
val r = l.par.filter(f)
logging.debug("filter " + r)
DistResult(no, r.toList, serial)
case DistForall(l, f) =>
//val g = (a: Any) => try { f(a) } catch { case e: Exception => false }
val r = l.par.forall(f)
logging.debug("forall " + r)
DistResult(no, List(r), serial)
case DistCount(l, f) =>
//val g = (a: Any) => try { f(a) } catch { case e: Exception => false }
val r = l.par.count(f)
logging.debug("count " + r)
DistResult(no, List(r), serial)
}
} catch {
case e: Exception => DistFailed(no, e, serial)
}
}
class WorkerActor(no: Int, op: DistRequest, serial: Long, aaa: AAA) extends Actor {
def act() {
//implicit def auth: AAA = { aaa }
//alive(port)
//register(Symbol(WorkerName + wid.toString), self)
}
}*/
//protected def op[A](block: => A) = try { block } catch { case e: Exception => throw e }
def shutdown() {
worker ! DistExit('normal)
}
def gc(members: List[PV]) {
if (worker != null) {
val nm = members.map(m => (m.pm.getHostName, m.pm.getPort))
val failed = masters.diff(nm)
if (!failed.isEmpty) {
for (Node(host, port) <- failed) worker ! DistRemoveURL(new URL("http://" + host + ":" + port + "/"))
}
}
}
}
| axi-sugiki/kumoi | src/kumoi/core/mr/DWorker2.scala | Scala | apache-2.0 | 9,634 |
package com.microsoft.chgeuer
object TrackingSample {
def data() : Array[(Long, Double, Double)] = {
val normal : Long = 1000
val longDelay : Long = 3000
Array[(Long, Double, Double)](
(normal, 48.80175, 2.637859),
(normal, 48.8018, 2.637719),
(normal, 48.80186, 2.637579),
(normal, 48.80192, 2.637439),
(longDelay, 48.80197, 2.6373),
(normal, 48.802032, 2.63716),
(normal, 48.802094, 2.63702),
(normal, 48.802143, 2.63687),
(normal, 48.802193, 2.63672),
(normal, 48.802242, 2.63658),
(longDelay, 48.80229, 2.63644),
(normal, 48.80233, 2.63633),
(normal, 48.80235, 2.63624),
(normal, 48.80237, 2.63618),
(normal, 48.80238, 2.63615),
(normal, 48.8024, 2.63611),
(normal, 48.80241, 2.63606),
(normal, 48.80243, 2.635999),
(normal, 48.80246, 2.635919),
(normal, 48.80249, 2.635829),
(normal, 48.80253, 2.63573),
(normal, 48.80256, 2.63563),
(normal, 48.802597, 2.63551),
(normal, 48.802635, 2.63538),
(normal, 48.802673, 2.63525),
(normal, 48.802723, 2.63511),
(normal, 48.80276, 2.63497),
(normal, 48.80281, 2.63482),
(normal, 48.80286, 2.63468),
(longDelay, 48.80291, 2.63453),
(normal, 48.802967, 2.63438),
(normal, 48.803005, 2.63423),
(normal, 48.803055, 2.63408),
(normal, 48.803104, 2.63394),
(normal, 48.803143, 2.63379),
(normal, 48.80318, 2.63365),
(normal, 48.80321, 2.63352),
(normal, 48.80325, 2.63342),
(normal, 48.80331, 2.63335),
(normal, 48.80338, 2.63333),
(normal, 48.80346, 2.63334),
(normal, 48.80354, 2.63337),
(normal, 48.80362, 2.6334),
(normal, 48.80371, 2.63343),
(normal, 48.80379, 2.63345),
(normal, 48.803875, 2.633469),
(normal, 48.803967, 2.633499),
(normal, 48.804047, 2.633519),
(normal, 48.804127, 2.633549),
(normal, 48.804195, 2.633549),
(normal, 48.804264, 2.633529),
(normal, 48.804325, 2.633469),
(normal, 48.804356, 2.633379),
(normal, 48.804375, 2.633279),
(normal, 48.804356, 2.633169),
(normal, 48.804325, 2.63308),
(normal, 48.804276, 2.63298),
(normal, 48.804237, 2.63287),
(normal, 48.804188, 2.63276),
(normal, 48.80415, 2.63264),
(normal, 48.804134, 2.632519),
(normal, 48.804104, 2.63238),
(normal, 48.804092, 2.63223),
(normal, 48.80408, 2.63209),
(normal, 48.80408, 2.63194),
(normal, 48.80407, 2.63178),
(normal, 48.804058, 2.63163),
(normal, 48.804047, 2.63147),
(normal, 48.804035, 2.63132),
(normal, 48.804035, 2.63116),
(normal, 48.804024, 2.631),
(normal, 48.804012, 2.63084),
(normal, 48.804, 2.63069),
(normal, 48.804, 2.63054),
(normal, 48.80399, 2.63041),
(normal, 48.803963, 2.630291),
(normal, 48.803925, 2.630191),
(normal, 48.803875, 2.630111),
(normal, 48.803825, 2.630051),
(normal, 48.803764, 2.630011),
(normal, 48.803703, 2.629961),
(normal, 48.803642, 2.629911),
(normal, 48.803574, 2.629861),
(normal, 48.803505, 2.629811),
(normal, 48.803425, 2.62975),
(normal, 48.803345, 2.62968),
(normal, 48.803265, 2.62962),
(normal, 48.803173, 2.62955),
(normal, 48.803093, 2.62949),
(normal, 48.803, 2.62942),
(normal, 48.80293, 2.62936),
(normal, 48.802837, 2.62932),
(normal, 48.80274, 2.6293),
(normal, 48.802647, 2.6293),
(normal, 48.802547, 2.62932),
(normal, 48.802456, 2.62937),
(normal, 48.802364, 2.62945),
(normal, 48.802284, 2.62956),
(normal, 48.802223, 2.62969),
(normal, 48.802174, 2.62983),
(normal, 48.802113, 2.62999),
(normal, 48.802063, 2.63015),
(normal, 48.802002, 2.63031),
(normal, 48.801952, 2.63047),
(normal, 48.80189, 2.63064),
(normal, 48.82953, 2.711079),
(normal, 48.82951, 2.711489),
(normal, 48.82948, 2.711919),
(normal, 48.82946, 2.712339),
(normal, 48.82943, 2.712759),
(normal, 48.82941, 2.713179),
(normal, 48.82938, 2.713599),
(normal, 48.82936, 2.714019),
(normal, 48.82934, 2.714449),
(normal, 48.82931, 2.71488),
(normal, 48.82928, 2.71531),
(normal, 48.82926, 2.71573),
(normal, 48.82923, 2.71616),
(normal, 48.829212, 2.71659),
(normal, 48.82918, 2.71702),
(normal, 48.829155, 2.717449),
(normal, 48.829124, 2.717869),
(normal, 48.829094, 2.718299),
(normal, 48.829075, 2.718719),
(normal, 48.829044, 2.719139),
(normal, 48.829025, 2.719559),
(normal, 48.829006, 2.719979),
(normal, 48.828976, 2.720399),
(normal, 48.828957, 2.720819),
(normal, 48.828926, 2.72123),
(normal, 48.828907, 2.72165),
(normal, 48.828876, 2.72206),
(normal, 48.828857, 2.72248),
(normal, 48.82884, 2.7229),
(normal, 48.82882, 2.72332),
(normal, 48.828808, 2.723739),
(normal, 48.82879, 2.724159),
(normal, 48.828777, 2.724589),
(normal, 48.82876, 2.725019),
(normal, 48.828747, 2.725449),
(normal, 48.828728, 2.725879),
(normal, 48.828716, 2.72631),
(normal, 48.828705, 2.726749),
(normal, 48.828693, 2.72718),
(normal, 48.828682, 2.72761),
(normal, 48.828682, 2.72805),
(normal, 48.82867, 2.72848),
(normal, 48.82867, 2.72891),
(normal, 48.82867, 2.72934),
(normal, 48.82867, 2.72977),
(normal, 48.82867, 2.730189),
(normal, 48.82867, 2.730619),
(normal, 48.828682, 2.73104),
(normal, 48.828693, 2.73147),
(normal, 48.828693, 2.73189),
(normal, 48.828712, 2.73232),
(normal, 48.828724, 2.73275),
(normal, 48.828743, 2.73319),
(normal, 48.828762, 2.73362),
(normal, 48.82878, 2.73405),
(normal, 48.8288, 2.73449),
(normal, 48.82883, 2.73492),
(normal, 48.82885, 2.73536),
(normal, 48.82888, 2.73579),
(normal, 48.82891, 2.73623),
(normal, 48.828926, 2.736659),
(normal, 48.828957, 2.737089),
(normal, 48.828987, 2.737529),
(normal, 48.829018, 2.737959),
(normal, 48.82905, 2.738389),
(normal, 48.82908, 2.738819),
(normal, 48.82911, 2.739249),
(normal, 48.82914, 2.739679),
(normal, 48.829178, 2.740109),
(normal, 48.82921, 2.74054),
(normal, 48.82924, 2.74097),
(normal, 48.82927, 2.7414),
(normal, 48.8293, 2.74183),
(normal, 48.829338, 2.74226),
(normal, 48.82937, 2.74268),
(normal, 48.829403, 2.743109),
(normal, 48.82944, 2.743529),
(normal, 48.82947, 2.743959),
(normal, 48.829502, 2.744389),
(normal, 48.82954, 2.744809),
(normal, 48.82957, 2.745229),
(normal, 48.8296, 2.745659),
(normal, 48.82963, 2.746079),
(normal, 48.829662, 2.7465),
(normal, 48.829693, 2.74693),
(normal, 48.829723, 2.74735),
(normal, 48.829754, 2.74777),
(normal, 48.829792, 2.74819),
(normal, 48.829823, 2.74861),
(normal, 48.82986, 2.74903),
(normal, 48.82989, 2.749449),
(normal, 48.82992, 2.749869),
(normal, 48.829952, 2.750289),
(normal, 48.829983, 2.750719),
(normal, 48.830013, 2.751139),
(normal, 48.830044, 2.751569),
(normal, 48.830074, 2.751999),
(normal, 48.830112, 2.752419),
(normal, 48.830143, 2.75285),
(normal, 48.830173, 2.75328),
(normal, 48.83021, 2.75371),
(normal, 48.830242, 2.75414),
(normal, 48.83028, 2.75457),
(normal, 48.83031, 2.755),
(normal, 48.83034, 2.75544),
(normal, 48.830376, 2.755871),
(normal, 48.830414, 2.756301),
(normal, 48.830444, 2.756741),
(normal, 48.830475, 2.757181),
(normal, 48.830505, 2.757621),
(normal, 48.830536, 2.758061),
(normal, 48.830566, 2.758501),
(normal, 48.830605, 2.75894),
(normal, 48.830635, 2.75938),
(normal, 48.830673, 2.75982),
(normal, 48.830704, 2.76026),
(normal, 48.83074, 2.7607),
(normal, 48.830772, 2.76113),
(normal, 48.83081, 2.76156),
(normal, 48.83084, 2.762),
(normal, 48.83087, 2.762429),
(normal, 48.83091, 2.762859),
(normal, 48.83094, 2.763289),
(normal, 48.83097, 2.763709),
(normal, 48.831, 2.764139),
(normal, 48.83103, 2.764569),
(normal, 48.83107, 2.764999),
(normal, 48.8311, 2.765419),
(normal, 48.83113, 2.765849),
(normal, 48.83116, 2.766269),
(normal, 48.8312, 2.7667),
(normal, 48.83123, 2.76712),
(normal, 48.83126, 2.76755),
(normal, 48.83129, 2.76797),
(normal, 48.83132, 2.76839),
(normal, 48.83135, 2.768809),
(normal, 48.83138, 2.769229),
(normal, 48.831417, 2.769649),
(normal, 48.831448, 2.770069),
(normal, 48.83148, 2.770499),
(normal, 48.83151, 2.770919),
(normal, 48.83154, 2.771339),
(normal, 48.83157, 2.771759),
(normal, 48.8316, 2.772189),
(normal, 48.83163, 2.772609),
(normal, 48.83167, 2.77304),
(normal, 48.8317, 2.77346),
(normal, 48.83173, 2.77388),
(normal, 48.83176, 2.7743),
(normal, 48.8318, 2.77472),
(normal, 48.831833, 2.775139),
(normal, 48.831863, 2.775569),
(normal, 48.8319, 2.775989),
(normal, 48.831932, 2.776419),
(normal, 48.831963, 2.776839),
(normal, 48.832, 2.777269),
(normal, 48.83203, 2.777699),
(normal, 48.83207, 2.77813),
(normal, 48.8321, 2.77856),
(normal, 48.83213, 2.77899),
(normal, 48.83216, 2.77942),
(normal, 48.83219, 2.77985),
(normal, 48.832222, 2.78028),
(normal, 48.83226, 2.78072),
(normal, 48.83229, 2.78115),
(normal, 48.83232, 2.78159),
(normal, 48.83235, 2.78202),
(normal, 48.832382, 2.78246),
(normal, 48.83242, 2.78289),
(normal, 48.83245, 2.78333),
(normal, 48.83248, 2.78376),
(normal, 48.83251, 2.7842),
(normal, 48.83255, 2.78463),
(normal, 48.83258, 2.78507),
(normal, 48.83261, 2.78551),
(normal, 48.83265, 2.78594),
(normal, 48.83268, 2.78638),
(normal, 48.832718, 2.78681),
(normal, 48.83275, 2.78724),
(normal, 48.83278, 2.78768),
(normal, 48.83281, 2.788109),
(normal, 48.83284, 2.788539),
(normal, 48.83288, 2.788979),
(normal, 48.83291, 2.789409),
(normal, 48.83294, 2.789839),
(normal, 48.83297, 2.790269),
(normal, 48.833, 2.790709),
(normal, 48.83303, 2.791139),
(normal, 48.83307, 2.791569),
(normal, 48.8331, 2.792),
(normal, 48.83313, 2.79243),
(normal, 48.83316, 2.79285),
(normal, 48.8332, 2.79328),
(normal, 48.83323, 2.7937),
(normal, 48.83326, 2.79412),
(normal, 48.833305, 2.794539),
(normal, 48.833336, 2.794959),
(normal, 48.833366, 2.795379),
(normal, 48.833416, 2.795789),
(normal, 48.833454, 2.796209),
(normal, 48.833492, 2.796619),
(normal, 48.83353, 2.797029),
(normal, 48.83358, 2.79744),
(normal, 48.83363, 2.79785),
(normal, 48.83368, 2.79825),
(normal, 48.83373, 2.79865),
(normal, 48.83378, 2.79905),
(normal, 48.833828, 2.79944),
(normal, 48.833878, 2.79983),
(normal, 48.83394, 2.80021),
(normal, 48.833992, 2.800599),
(normal, 48.834053, 2.800989),
(normal, 48.83412, 2.801369),
(normal, 48.834183, 2.801759),
(normal, 48.83425, 2.80215),
(normal, 48.83432, 2.80253),
(normal, 48.83439, 2.80292),
(normal, 48.83447, 2.80331),
(normal, 48.834538, 2.80369),
(normal, 48.834618, 2.80408),
(normal, 48.834698, 2.80446),
(normal, 48.834778, 2.80484),
(normal, 48.834858, 2.80522),
(normal, 48.83495, 2.8056),
(normal, 48.83503, 2.80598),
(normal, 48.835125, 2.80636),
(normal, 48.835217, 2.80675),
(normal, 48.835316, 2.80714),
(normal, 48.835415, 2.80753),
(normal, 48.835514, 2.80792),
(normal, 48.835613, 2.8083),
(normal, 48.835712, 2.80869),
(normal, 48.835823, 2.80908),
(normal, 48.835922, 2.80947),
(normal, 48.836033, 2.80984),
(normal, 48.836132, 2.81022),
(normal, 48.83623, 2.81059),
(normal, 48.83633, 2.81096),
(normal, 48.83643, 2.81133),
(normal, 48.83653, 2.8117),
(normal, 48.836624, 2.81207),
(normal, 48.836716, 2.81244),
(normal, 48.836807, 2.8128),
(normal, 48.8369, 2.81317),
(normal, 48.83698, 2.81354),
(normal, 48.83707, 2.8139),
(normal, 48.83715, 2.81427),
(normal, 48.837242, 2.81462),
(normal, 48.837322, 2.81498),
(normal, 48.837414, 2.81533),
(normal, 48.837494, 2.81568),
(normal, 48.837574, 2.81602),
(normal, 48.837643, 2.81637),
(normal, 48.837704, 2.81672),
(normal, 48.83774, 2.81707),
(normal, 48.83777, 2.81743),
(normal, 48.837788, 2.81778),
(normal, 48.8378, 2.81814),
(normal, 48.83781, 2.81849),
(normal, 48.837822, 2.81883),
(normal, 48.83784, 2.81917),
(normal, 48.83787, 2.8195),
(normal, 48.83791, 2.81982),
(normal, 48.83796, 2.82013),
(normal, 48.83804, 2.82042),
(normal, 48.83813, 2.82071),
(normal, 48.83824, 2.82098),
(normal, 48.83837, 2.82123),
(normal, 48.8385, 2.82146),
(normal, 48.83863, 2.82167),
(normal, 48.83882, 2.82186),
(normal, 48.839012, 2.82202),
(normal, 48.83921, 2.82215),
(normal, 48.83941, 2.82225),
(normal, 48.83962, 2.82232),
(normal, 48.83983, 2.82237),
(normal, 48.84005, 2.82239),
(normal, 48.84027, 2.82237),
(normal, 48.840492, 2.82232),
(normal, 48.840702, 2.82223),
(normal, 48.8409, 2.82211),
(normal, 48.8411, 2.82196),
(normal, 48.841278, 2.82178),
(normal, 48.84145, 2.82158),
(normal, 48.84161, 2.82135),
(normal, 48.84176, 2.82109),
(normal, 48.84189, 2.82081),
(normal, 48.842, 2.82051),
(normal, 48.84209, 2.8202),
(normal, 48.84215, 2.81987),
(normal, 48.8422, 2.81953),
(normal, 48.84224, 2.81919),
(normal, 48.842278, 2.81884),
(normal, 48.842316, 2.81848),
(normal, 48.842354, 2.81814),
(normal, 48.842403, 2.81779),
(normal, 48.842472, 2.81746),
(normal, 48.84254, 2.81713),
(normal, 48.84262, 2.81681),
(normal, 48.84272, 2.8165),
(normal, 48.842827, 2.8162),
(normal, 48.842937, 2.81592),
(normal, 48.843067, 2.81565),
(normal, 48.843185, 2.8154),
(normal, 48.843327, 2.81516),
(normal, 48.843468, 2.81494),
(normal, 48.843616, 2.81472),
(normal, 48.843765, 2.81452),
(normal, 48.843914, 2.81433),
(normal, 48.844063, 2.81414),
(normal, 48.84421, 2.81395),
(normal, 48.844353, 2.81376),
(normal, 48.8445, 2.81357),
(normal, 48.844643, 2.81338),
(normal, 48.84479, 2.81319),
(normal, 48.84493, 2.813001),
(normal, 48.845078, 2.812811),
(normal, 48.845238, 2.812611),
(normal, 48.845387, 2.812401),
(normal, 48.84556, 2.812191),
(normal, 48.84572, 2.81198),
(normal, 48.84589, 2.81176),
(normal, 48.84606, 2.81154),
(normal, 48.846233, 2.81131),
(normal, 48.846405, 2.81109),
(normal, 48.846577, 2.81087),
(normal, 48.84675, 2.81065),
(normal, 48.84691, 2.81044),
(normal, 48.84708, 2.81023),
(normal, 48.84724, 2.81002),
(normal, 48.84741, 2.80981),
(normal, 48.847557, 2.8096),
(normal, 48.847717, 2.8094),
(normal, 48.847878, 2.8092),
(normal, 48.848026, 2.809),
(normal, 48.848186, 2.8088),
(normal, 48.848335, 2.80861),
(normal, 48.848484, 2.80841),
(normal, 48.848633, 2.80821),
(normal, 48.84878, 2.80801),
(normal, 48.84893, 2.80782),
(normal, 48.84907, 2.80763),
(normal, 48.84923, 2.80743),
(normal, 48.84938, 2.80724),
(normal, 48.84953, 2.80705),
(normal, 48.84969, 2.80685),
(normal, 48.84984, 2.80665),
(normal, 48.85, 2.80645),
(normal, 48.85016, 2.80625),
(normal, 48.85032, 2.80605),
(normal, 48.85048, 2.80584),
(normal, 48.85064, 2.80564),
(normal, 48.8508, 2.80543),
(normal, 48.85096, 2.80522),
(normal, 48.85112, 2.80502),
(normal, 48.85128, 2.80481),
(normal, 48.85144, 2.8046),
(normal, 48.8516, 2.8044),
(normal, 48.85175, 2.8042),
(normal, 48.85191, 2.804),
(normal, 48.85205, 2.80381),
(normal, 48.8522, 2.80363),
(normal, 48.85233, 2.80346),
(normal, 48.852478, 2.80331),
(normal, 48.85262, 2.80317),
(normal, 48.85278, 2.80306),
(normal, 48.85293, 2.80296),
(normal, 48.85309, 2.80289),
(normal, 48.85325, 2.80286),
(normal, 48.85341, 2.80285),
(normal, 48.85357, 2.80287),
(normal, 48.853718, 2.80289),
(normal, 48.85386, 2.80292),
(normal, 48.854, 2.80294),
(normal, 48.85414, 2.80295),
(normal, 48.85427, 2.80294),
(normal, 48.8544, 2.8029),
(normal, 48.85452, 2.80283),
(normal, 48.854637, 2.80274),
(normal, 48.854736, 2.80263),
(normal, 48.854828, 2.80252),
(normal, 48.854908, 2.8024),
(normal, 48.85499, 2.80228),
(normal, 48.855057, 2.80218),
(normal, 48.855118, 2.80209),
(normal, 48.85518, 2.80202),
(normal, 48.85524, 2.80197),
(normal, 48.8553, 2.80195),
(normal, 48.855362, 2.80196),
(normal, 48.85541, 2.80198),
(normal, 48.855453, 2.802031),
(normal, 48.855503, 2.802101),
(normal, 48.855564, 2.802171),
(normal, 48.855625, 2.802241),
(normal, 48.855686, 2.802321),
(normal, 48.855747, 2.802411),
(normal, 48.855816, 2.802501),
(normal, 48.855896, 2.802591),
(normal, 48.855988, 2.802691),
(normal, 48.856087, 2.8028),
(normal, 48.856186, 2.80291),
(normal, 48.856297, 2.80304),
(normal, 48.856407, 2.80317),
(normal, 48.856518, 2.8033),
(normal, 48.85663, 2.80344),
(normal, 48.85676, 2.803581),
(normal, 48.856876, 2.803721),
(normal, 48.857006, 2.803851),
(normal, 48.857136, 2.803991),
(normal, 48.857265, 2.804121),
(normal, 48.857395, 2.804251),
(normal, 48.857536, 2.804381),
(normal, 48.857677, 2.804511),
(normal, 48.85782, 2.804631),
(normal, 48.857967, 2.80475),
(normal, 48.858116, 2.80487),
(normal, 48.858265, 2.80498),
(normal, 48.858414, 2.80509),
(normal, 48.858562, 2.80519),
(normal, 48.85871, 2.80529),
(normal, 48.858875, 2.805392),
(normal, 48.859035, 2.805491),
(normal, 48.859184, 2.805591),
(normal, 48.859333, 2.805701),
(normal, 48.859493, 2.805801),
(normal, 48.859642, 2.805901),
(normal, 48.859802, 2.806001),
(normal, 48.85995, 2.806101),
(normal, 48.86011, 2.806201),
(normal, 48.86027, 2.806291),
(normal, 48.86042, 2.806381),
(normal, 48.86058, 2.80647),
(normal, 48.86073, 2.80657),
(normal, 48.86089, 2.80666),
(normal, 48.86104, 2.80675),
(normal, 48.86119, 2.806829),
(normal, 48.86134, 2.806909),
(normal, 48.8615, 2.806969),
(normal, 48.86165, 2.807029),
(normal, 48.861797, 2.807089),
(normal, 48.861946, 2.807149),
(normal, 48.862106, 2.807199),
(normal, 48.862255, 2.80726),
(normal, 48.862415, 2.80731),
(normal, 48.862576, 2.80734),
(normal, 48.862736, 2.80737),
(normal, 48.862907, 2.8074),
(normal, 48.863068, 2.80744),
(normal, 48.86324, 2.80747),
(normal, 48.8634, 2.80751),
(normal, 48.863564, 2.80754),
(normal, 48.863724, 2.80757),
(normal, 48.863884, 2.8076),
(normal, 48.864044, 2.80762),
(normal, 48.864204, 2.80764),
(normal, 48.864365, 2.80766),
(normal, 48.864525, 2.80768),
(normal, 48.864685, 2.80769),
(normal, 48.864845, 2.8077),
(normal, 48.865017, 2.8077),
(normal, 48.865177, 2.80771),
(normal, 48.86535, 2.80771),
(normal, 48.86551, 2.80771),
(normal, 48.86568, 2.80771),
(normal, 48.86584, 2.80771),
(normal, 48.86601, 2.80771),
(normal, 48.86617, 2.80771),
(normal, 48.866318, 2.80771),
(normal, 48.866467, 2.8077),
(normal, 48.866596, 2.80769),
(normal, 48.866714, 2.80768),
(normal, 48.866814, 2.80767),
(normal, 48.866905, 2.80769),
(normal, 48.866974, 2.80774),
(normal, 48.867012, 2.80783),
(normal, 48.86703, 2.80793),
(normal, 48.867043, 2.80806),
(normal, 48.86703, 2.80819),
(normal, 48.86702, 2.80833),
(normal, 48.867, 2.80848),
(normal, 48.866974, 2.80863),
(normal, 48.866955, 2.80879),
(normal, 48.866936, 2.80894),
(normal, 48.866924, 2.8091),
(normal, 48.866905, 2.80926),
(normal, 48.866886, 2.80942),
(normal, 48.866867, 2.80958),
(normal, 48.866848, 2.80974),
(normal, 48.86683, 2.8099),
(normal, 48.86681, 2.81005),
(normal, 48.86679, 2.81019),
(normal, 48.86677, 2.8103),
(normal, 48.86676, 2.8104),
(normal, 48.86675, 2.81048),
(normal, 48.86673, 2.81058),
(normal, 48.866726, 2.81069),
(normal, 48.866714, 2.8108),
(normal, 48.866703, 2.810899),
(normal, 48.866684, 2.810999),
(normal, 48.866665, 2.811099),
(normal, 48.866634, 2.811179),
(normal, 48.866634, 2.811249),
(normal, 48.866653, 2.81131),
(normal, 48.86669, 2.81136),
(normal, 48.866753, 2.81141),
(normal, 48.866814, 2.81143),
(normal, 48.866882, 2.81145),
(normal, 48.866943, 2.81148),
(normal, 48.866993, 2.81152),
(normal, 48.86703, 2.81159),
(normal, 48.867046, 2.811671),
(normal, 48.867046, 2.811771),
(normal, 48.867058, 2.811871),
(normal, 48.867077, 2.81199),
(normal, 48.86709, 2.8121),
(normal, 48.867107, 2.81223),
(normal, 48.867126, 2.81235),
(normal, 48.867146, 2.81248),
(normal, 48.867157, 2.8126),
(normal, 48.867176, 2.81272),
(normal, 48.867195, 2.81283),
(normal, 48.867207, 2.81293),
(normal, 48.867226, 2.81302),
(normal, 48.867237, 2.8131),
(normal, 48.86725, 2.81317),
(normal, 48.86726, 2.813241),
(normal, 48.86727, 2.813321),
(normal, 48.867283, 2.813401),
(normal, 48.867302, 2.813491),
(normal, 48.867313, 2.813581),
(normal, 48.867332, 2.81368),
(normal, 48.86735, 2.81378),
(normal, 48.867363, 2.81389),
(normal, 48.867382, 2.81402),
(normal, 48.8674, 2.81417),
(normal, 48.86742, 2.81433),
(normal, 48.86743, 2.81448),
(normal, 48.86745, 2.81465),
(normal, 48.86748, 2.81481),
(normal, 48.8675, 2.81498),
(normal, 48.867523, 2.81515),
(normal, 48.867542, 2.81532),
(normal, 48.86756, 2.81549),
(normal, 48.86758, 2.81566),
(normal, 48.86759, 2.81583),
(normal, 48.86759, 2.816),
(normal, 48.867603, 2.81617),
(normal, 48.86759, 2.81634),
(normal, 48.86759, 2.81651),
(normal, 48.86758, 2.81668),
(normal, 48.86756, 2.81683),
(normal, 48.86753, 2.81695),
(normal, 48.8675, 2.81705),
(normal, 48.86748, 2.81713),
(normal, 48.86745, 2.8172),
(normal, 48.86743, 2.81727),
(normal, 48.867413, 2.81733),
(normal, 48.867413, 2.81741),
(normal, 48.867443, 2.81747),
(normal, 48.867493, 2.81753),
(normal, 48.867554, 2.8176),
(normal, 48.867634, 2.817681),
(normal, 48.867714, 2.817761),
(normal, 48.867813, 2.817861),
(normal, 48.867924, 2.817961),
(normal, 48.868042, 2.81806),
(normal, 48.86816, 2.81818),
(normal, 48.86829, 2.8183),
(normal, 48.86842, 2.81842),
(normal, 48.86855, 2.81855),
(normal, 48.868687, 2.818671),
(normal, 48.868828, 2.818801),
(normal, 48.86897, 2.818931),
(normal, 48.86911, 2.819061),
(normal, 48.86925, 2.81918),
(normal, 48.8694, 2.81931),
(normal, 48.86953, 2.81944),
(normal, 48.86967, 2.81957),
(normal, 48.869812, 2.81969),
(normal, 48.869953, 2.81982),
(normal, 48.870102, 2.81995),
(normal, 48.87023, 2.82007),
(normal, 48.870342, 2.82017),
(normal, 48.87044, 2.82026),
(normal, 48.87051, 2.82032),
(normal, 48.870564, 2.82037),
(normal, 48.8706, 2.8204),
(normal, 48.870632, 2.82043),
(normal, 48.870644, 2.82044),
(normal, 48.870655, 2.82045),
(normal, 48.870655, 2.82045),
(normal, 48.870655, 2.82045),
(normal, 48.870655, 2.82045),
(normal, 48.870655, 2.82045),
(normal, 48.870655, 2.82045),
(normal, 48.870667, 2.82047),
(normal, 48.870705, 2.8205),
(normal, 48.870743, 2.82054),
(normal, 48.87081, 2.82059),
(normal, 48.87088, 2.82065),
(normal, 48.87095, 2.820731),
(normal, 48.871048, 2.820811),
(normal, 48.87114, 2.820891),
(normal, 48.87125, 2.820991),
(normal, 48.87136, 2.821091),
(normal, 48.87148, 2.821201),
(normal, 48.871597, 2.821311),
(normal, 48.87174, 2.82143),
(normal, 48.87187, 2.82155),
(normal, 48.872017, 2.82167),
(normal, 48.872158, 2.8218),
(normal, 48.87232, 2.82193),
(normal, 48.872467, 2.82205),
(normal, 48.872627, 2.82217),
(normal, 48.8728, 2.82227),
(normal, 48.87295, 2.822381),
(normal, 48.873123, 2.822481),
(normal, 48.873295, 2.822581),
(normal, 48.873466, 2.822681),
(normal, 48.87364, 2.822781),
(normal, 48.873817, 2.822881),
(normal, 48.87399, 2.822981),
(normal, 48.87416, 2.823081),
(normal, 48.874332, 2.823181),
(normal, 48.87451, 2.823271),
(normal, 48.874683, 2.82337),
(normal, 48.874855, 2.82347),
(normal, 48.875027, 2.82357),
(normal, 48.8752, 2.82368),
(normal, 48.87537, 2.82379),
(normal, 48.875534, 2.82392),
(normal, 48.875694, 2.82406),
(normal, 48.875843, 2.82421),
(normal, 48.87599, 2.82436),
(normal, 48.876133, 2.82451),
(normal, 48.87628, 2.82465),
(normal, 48.876423, 2.8248),
(normal, 48.87657, 2.82495),
(normal, 48.876713, 2.8251),
(normal, 48.87686, 2.82524),
(normal, 48.877003, 2.8254),
(normal, 48.87715, 2.82555),
(normal, 48.877293, 2.8257),
(normal, 48.87744, 2.82586),
(normal, 48.87759, 2.82601),
(normal, 48.877743, 2.82617),
(normal, 48.87789, 2.82633),
(normal, 48.87805, 2.82649),
(normal, 48.8782, 2.82665),
(normal, 48.87835, 2.82681),
(normal, 48.87851, 2.82697),
(normal, 48.87867, 2.82713),
(normal, 48.87882, 2.8273),
(normal, 48.87898, 2.82746),
(normal, 48.87914, 2.82762),
(normal, 48.879288, 2.82778),
(normal, 48.879448, 2.82792),
(normal, 48.87961, 2.82805),
(normal, 48.87978, 2.82816),
(normal, 48.87994, 2.82825),
(normal, 48.880108, 2.82833),
(normal, 48.88028, 2.8284),
(normal, 48.88044, 2.82846),
(normal, 48.88059, 2.82851),
(normal, 48.88072, 2.82854),
(normal, 48.880836, 2.82854),
(normal, 48.880947, 2.82853),
(normal, 48.881058, 2.82851),
(normal, 48.88117, 2.8285),
(normal, 48.88128, 2.82849),
(normal, 48.88139, 2.82848),
(normal, 48.8815, 2.82849),
(normal, 48.8816, 2.82849),
(normal, 48.8817, 2.8285),
(normal, 48.88181, 2.82852),
(normal, 48.88191, 2.82854),
(normal, 48.882008, 2.82855),
(normal, 48.882107, 2.82857),
(normal, 48.882217, 2.82858),
(normal, 48.882328, 2.8286),
(normal, 48.882427, 2.82862),
(normal, 48.882538, 2.82863),
(normal, 48.88265, 2.82864),
(normal, 48.88276, 2.82864),
(normal, 48.88287, 2.82864),
(normal, 48.88297, 2.82863),
(normal, 48.88308, 2.82863),
(normal, 48.88318, 2.82863),
(normal, 48.88329, 2.82862),
(normal, 48.88338, 2.82862),
(normal, 48.88348, 2.82861),
(normal, 48.88357, 2.82861),
(normal, 48.88365, 2.8286),
(normal, 48.88372, 2.8286),
(normal, 48.88377, 2.8286),
(normal, 48.88381, 2.8286),
(normal, 48.88384, 2.82859),
(normal, 48.883858, 2.82859),
(normal, 48.88387, 2.82859),
(normal, 48.88389, 2.82859),
(normal, 48.883907, 2.82859),
(normal, 48.883907, 2.82859),
(normal, 48.88392, 2.82859),
(normal, 48.88392, 2.82858),
(normal, 48.88392, 2.82858),
(normal, 48.883938, 2.82858),
(normal, 48.883957, 2.82858),
(normal, 48.883987, 2.82858),
(normal, 48.884037, 2.82858),
(normal, 48.884087, 2.82857),
(normal, 48.884148, 2.82858),
(normal, 48.884197, 2.8286),
(normal, 48.884235, 2.82865),
(normal, 48.884266, 2.82872),
(normal, 48.884285, 2.82882),
(normal, 48.884304, 2.82893),
(normal, 48.884323, 2.82904),
(normal, 48.884342, 2.82917),
(normal, 48.88436, 2.82931),
(normal, 48.88438, 2.82946),
(normal, 48.884415, 2.82961),
(normal, 48.884434, 2.82976),
(normal, 48.884453, 2.82992),
(normal, 48.88447, 2.83009),
(normal, 48.884502, 2.83026),
(normal, 48.88452, 2.83043),
(normal, 48.88454, 2.8306),
(normal, 48.88456, 2.83078),
(normal, 48.88459, 2.83095),
(normal, 48.88461, 2.83113),
(normal, 48.88463, 2.83129),
(normal, 48.88464, 2.83145),
(normal, 48.88466, 2.83158),
(normal, 48.884678, 2.83168),
(normal, 48.88469, 2.83177),
(normal, 48.884705, 2.83184),
(normal, 48.884716, 2.83189),
(normal, 48.884727, 2.83193),
(normal, 48.884727, 2.83194),
(normal, 48.884727, 2.83195),
(normal, 48.884727, 2.83195),
(normal, 48.884727, 2.83195),
(normal, 48.884727, 2.83194),
(normal, 48.884727, 2.83194),
(normal, 48.884727, 2.83194),
(normal, 48.88474, 2.83194),
(normal, 48.88474, 2.83194),
(normal, 48.88474, 2.83194),
(normal, 48.88474, 2.83194),
(normal, 48.88474, 2.83194),
(normal, 48.88963, 2.82775),
(normal, 48.88966, 2.82781),
(normal, 48.88969, 2.82786),
(normal, 48.88972, 2.82788),
(normal, 48.88975, 2.82788),
(normal, 48.88979, 2.82785),
(normal, 48.88982, 2.82781),
(normal, 48.88986, 2.82775),
(normal, 48.889908, 2.82767),
(normal, 48.889957, 2.82759),
(normal, 48.890007, 2.82751),
(normal, 48.890068, 2.82742),
(normal, 48.89013, 2.82733),
(normal, 48.89019, 2.82723),
(normal, 48.89025, 2.82714),
(normal, 48.8903, 2.82706),
(normal, 48.89036, 2.82697),
(normal, 48.8904, 2.8269),
(normal, 48.89045, 2.82684),
(normal, 48.890488, 2.82679),
(normal, 48.89052, 2.82676),
(normal, 48.89055, 2.82673),
(normal, 48.890568, 2.82671),
(normal, 48.890587, 2.8267),
(normal, 48.8906, 2.82669),
(normal, 48.8906, 2.82669),
(normal, 48.8906, 2.82669),
(normal, 48.8906, 2.82669),
(normal, 48.8906, 2.82669),
(normal, 48.8906, 2.82669),
(normal, 48.89059, 2.82669),
(normal, 48.89059, 2.82669),
(normal, 48.89059, 2.82669),
(normal, 48.89059, 2.82669),
(normal, 48.890602, 2.82668),
(normal, 48.890614, 2.82668),
(normal, 48.890614, 2.82667),
(normal, 48.890625, 2.82667),
(normal, 48.890636, 2.82666),
(normal, 48.890648, 2.82665),
(normal, 48.890648, 2.82665),
(normal, 48.89066, 2.82665),
(normal, 48.89066, 2.82664),
(normal, 48.89067, 2.82664),
(normal, 48.89067, 2.82664),
(normal, 48.89068, 2.82663),
(normal, 48.89069, 2.82663),
(normal, 48.89071, 2.82662),
(normal, 48.89074, 2.8266),
(normal, 48.89077, 2.82658),
(normal, 48.89082, 2.82656),
(normal, 48.89087, 2.82653),
(normal, 48.89093, 2.82649),
(normal, 48.89099, 2.82645),
(normal, 48.891052, 2.82639),
(normal, 48.891113, 2.82631),
(normal, 48.891163, 2.82622),
(normal, 48.891212, 2.82612),
(normal, 48.89125, 2.82601),
(normal, 48.89129, 2.82589),
(normal, 48.891335, 2.825769),
(normal, 48.891384, 2.825639),
(normal, 48.891445, 2.825509),
(normal, 48.891506, 2.825369),
(normal, 48.891556, 2.825219),
(normal, 48.891617, 2.825089),
(normal, 48.891678, 2.82495),
(normal, 48.891747, 2.82482),
(normal, 48.891815, 2.82469),
(normal, 48.891895, 2.82456),
(normal, 48.891987, 2.82443),
(normal, 48.892067, 2.8243),
(normal, 48.89216, 2.82416),
(normal, 48.89225, 2.82402),
(normal, 48.89234, 2.82388),
(normal, 48.89244, 2.823739),
(normal, 48.892532, 2.823609),
(normal, 48.89263, 2.823469),
(normal, 48.89273, 2.823339),
(normal, 48.89283, 2.823209),
(normal, 48.89293, 2.823079),
(normal, 48.89303, 2.822949),
(normal, 48.89312, 2.822819),
(normal, 48.89322, 2.82269),
(normal, 48.89332, 2.82255),
(normal, 48.89341, 2.82242),
(normal, 48.89351, 2.82229),
(normal, 48.89361, 2.82216),
(normal, 48.893707, 2.82203),
(normal, 48.8938, 2.8219),
(normal, 48.893898, 2.821779),
(normal, 48.89399, 2.821649),
(normal, 48.89407, 2.821529),
(normal, 48.89416, 2.821419),
(normal, 48.89424, 2.821309),
(normal, 48.89432, 2.821189),
(normal, 48.8944, 2.821069),
(normal, 48.894463, 2.82094),
(normal, 48.89453, 2.82081),
(normal, 48.894592, 2.82068),
(normal, 48.894653, 2.82055),
(normal, 48.894703, 2.82043),
(normal, 48.89474, 2.8203),
(normal, 48.89479, 2.82019),
(normal, 48.89482, 2.82009),
(normal, 48.89485, 2.820011),
(normal, 48.89487, 2.819941),
(normal, 48.89489, 2.81987),
(normal, 48.89491, 2.81983),
(normal, 48.89492, 2.81979),
(normal, 48.89494, 2.81976),
(normal, 48.89495, 2.81973),
(normal, 48.894962, 2.81969),
(normal, 48.894974, 2.81966),
(normal, 48.894985, 2.81964),
(normal, 48.894997, 2.81961),
(normal, 48.895008, 2.81959),
(normal, 48.895008, 2.81958),
(normal, 48.89502, 2.81957),
(normal, 48.89502, 2.81956),
(normal, 48.89503, 2.81955),
(normal, 48.89503, 2.81954),
(normal, 48.89503, 2.81954),
(normal, 48.89503, 2.81954),
(normal, 48.89503, 2.81953),
(normal, 48.89503, 2.81953),
(normal, 48.89503, 2.81953),
(normal, 48.89503, 2.81952),
(normal, 48.89503, 2.81952),
(normal, 48.89503, 2.81952),
(normal, 48.89503, 2.81952),
(normal, 48.89503, 2.81952),
(normal, 48.89503, 2.81952),
(normal, 48.89503, 2.81952),
(normal, 48.89503, 2.81952),
(normal, 48.89761, 2.79033),
(normal, 48.8977, 2.79022),
(normal, 48.898346, 2.78902),
(normal, 48.898376, 2.7889),
(normal, 48.898415, 2.78877),
(normal, 48.898464, 2.78863),
(normal, 48.898502, 2.78848),
(normal, 48.898552, 2.78833),
(normal, 48.8986, 2.78818),
(normal, 48.89865, 2.78802),
(normal, 48.8987, 2.78785),
(normal, 48.89875, 2.78768),
(normal, 48.8988, 2.78751),
(normal, 48.89885, 2.78732),
(normal, 48.8989, 2.78713),
(normal, 48.89895, 2.78693),
(normal, 48.89901, 2.78672),
(normal, 48.899063, 2.786521),
(normal, 48.899124, 2.786311),
(normal, 48.899174, 2.786101),
(normal, 48.899235, 2.785891),
(normal, 48.899284, 2.785671),
(normal, 48.899345, 2.785461),
(normal, 48.899395, 2.785241),
(normal, 48.899456, 2.785021),
(normal, 48.899506, 2.784801),
(normal, 48.899555, 2.78457),
(normal, 48.899593, 2.78434),
(normal, 48.899643, 2.78411),
(normal, 48.89968, 2.78388),
(normal, 48.89973, 2.78366),
(normal, 48.89978, 2.78343),
(normal, 48.899834, 2.7832),
(normal, 48.899902, 2.78298),
(normal, 48.89997, 2.78277),
(normal, 48.90004, 2.78257),
(normal, 48.90013, 2.78237),
(normal, 48.900223, 2.78218),
(normal, 48.900322, 2.782),
(normal, 48.90042, 2.78182),
(normal, 48.90053, 2.78165),
(normal, 48.900642, 2.78148),
(normal, 48.90076, 2.78131),
(normal, 48.90089, 2.78115),
(normal, 48.90102, 2.78099),
(normal, 48.90116, 2.78084),
(normal, 48.90129, 2.78068),
(normal, 48.901436, 2.780509),
(normal, 48.901566, 2.780339),
(normal, 48.901695, 2.780199),
(normal, 48.901836, 2.780049),
(normal, 48.901966, 2.779899),
(normal, 48.902096, 2.779759),
(normal, 48.902225, 2.779609),
(normal, 48.902344, 2.779469),
(normal, 48.902462, 2.77933),
(normal, 48.90259, 2.77919),
(normal, 48.90271, 2.77906),
(normal, 48.90282, 2.77893),
(normal, 48.90293, 2.7788),
(normal, 48.90305, 2.77869),
(normal, 48.90316, 2.77857),
(normal, 48.904175, 2.77804),
(normal, 48.904213, 2.77803),
(normal, 48.904243, 2.77802),
(normal, 48.904274, 2.77802),
(normal, 48.904312, 2.77801),
(normal, 48.90436, 2.778),
(normal, 48.90443, 2.77799),
(normal, 48.9045, 2.77798),
(normal, 48.90459, 2.77797),
(normal, 48.90467, 2.77796),
(normal, 48.90477, 2.77795),
(normal, 48.90486, 2.77795),
(normal, 48.90496, 2.77795),
(normal, 48.905052, 2.77794),
(normal, 48.90515, 2.77793),
(normal, 48.905243, 2.777939),
(normal, 48.90674, 2.77852),
(normal, 48.90685, 2.77858),
(normal, 48.906948, 2.77864),
(normal, 48.90706, 2.77869),
(normal, 48.90717, 2.77875),
(normal, 48.90728, 2.77881),
(normal, 48.90739, 2.77887),
(normal, 48.90749, 2.77894),
(normal, 48.9076, 2.779),
(normal, 48.9077, 2.77908),
(normal, 48.90781, 2.77916),
(normal, 48.9079, 2.77925),
(normal, 48.908, 2.77935),
(normal, 48.9081, 2.77946),
(normal, 48.9082, 2.77956),
(normal, 48.908302, 2.779671),
(normal, 48.9084, 2.779761),
(normal, 48.908493, 2.779861),
(normal, 48.908585, 2.779961),
(normal, 48.908665, 2.780071),
(normal, 48.908745, 2.78018),
(normal, 48.908825, 2.78029),
(normal, 48.908905, 2.78039),
(normal, 48.908974, 2.78049),
(normal, 48.909035, 2.78058),
(normal, 48.909084, 2.78064),
(normal, 48.909134, 2.78066),
(normal, 48.909184, 2.78065),
(normal, 48.909233, 2.78058),
(normal, 48.90927, 2.7805),
(normal, 48.909313, 2.78039),
(normal, 48.909363, 2.78026),
(normal, 48.909424, 2.78013),
(normal, 48.909485, 2.77998),
(normal, 48.909546, 2.77982),
(normal, 48.909607, 2.77967),
(normal, 48.909657, 2.77953),
(normal, 48.909706, 2.77942),
(normal, 48.909737, 2.77935),
(normal, 48.909756, 2.77931),
(normal, 48.909767, 2.77926),
(normal, 48.909798, 2.77919),
(normal, 48.90983, 2.77911),
(normal, 48.90986, 2.77901),
(normal, 48.90991, 2.7789),
(normal, 48.90995, 2.778761),
(normal, 48.91, 2.778621),
(normal, 48.91007, 2.778461),
(normal, 48.91013, 2.778281),
(normal, 48.9102, 2.778101),
(normal, 48.91028, 2.777901),
(normal, 48.91036, 2.777701),
(normal, 48.91044, 2.777481),
(normal, 48.91053, 2.777261),
(normal, 48.91062, 2.77703),
(normal, 48.910713, 2.7768),
(normal, 48.910805, 2.77658),
(normal, 48.910896, 2.77635),
(normal, 48.910988, 2.77611),
(normal, 48.91108, 2.77588),
(normal, 48.932888, 2.72815),
(normal, 48.93308, 2.72814),
(normal, 48.933258, 2.72811),
(normal, 48.93345, 2.72809),
(normal, 48.933628, 2.72806),
(normal, 48.93382, 2.72804),
(normal, 48.93401, 2.72801),
(normal, 48.9342, 2.72799),
(normal, 48.93439, 2.72796),
(normal, 48.93457, 2.72794),
(normal, 48.93475, 2.72792),
(normal, 48.93494, 2.72791),
(normal, 48.93512, 2.72789),
(normal, 48.9353, 2.72788),
(normal, 48.93549, 2.72786),
(normal, 48.93568, 2.727851),
(normal, 48.93586, 2.727841),
(normal, 48.93604, 2.727821),
(normal, 48.93622, 2.727811),
(normal, 48.936398, 2.727791),
(normal, 48.936577, 2.727771),
(normal, 48.936756, 2.727751),
(normal, 48.936935, 2.72772),
(normal, 48.937126, 2.72768),
(normal, 48.937305, 2.72764),
(normal, 48.937485, 2.72759),
(normal, 48.937664, 2.72754),
(normal, 48.937843, 2.72747),
(normal, 48.938023, 2.7274),
(normal, 48.9382, 2.72732),
(normal, 48.93839, 2.727229),
(normal, 48.93857, 2.727139),
(normal, 48.938747, 2.727039),
(normal, 48.938927, 2.726939),
(normal, 48.939106, 2.726839),
(normal, 48.939278, 2.726739),
(normal, 48.939438, 2.726639),
(normal, 48.93961, 2.726539),
(normal, 48.93977, 2.726439),
(normal, 48.93993, 2.72635),
(normal, 48.94009, 2.72625),
(normal, 48.94026, 2.72615),
(normal, 48.940422, 2.72605),
(normal, 48.940582, 2.72596),
(normal, 48.94073, 2.72586),
(normal, 48.940884, 2.72576),
(normal, 48.941032, 2.72568),
(normal, 48.941162, 2.7256),
(normal, 48.94128, 2.72553),
(normal, 48.9414, 2.72548),
(normal, 48.94151, 2.72545),
(normal, 48.94161, 2.72545),
(normal, 48.9417, 2.72547),
(normal, 48.94178, 2.7255),
(normal, 48.94185, 2.72552),
(normal, 48.94193, 2.7255),
(normal, 48.94199, 2.72545),
(normal, 48.94204, 2.72535),
(normal, 48.94209, 2.72524),
(normal, 48.94215, 2.72512),
(normal, 48.94224, 2.725019),
(normal, 48.94234, 2.724939),
(normal, 48.94246, 2.724849),
(normal, 48.94259, 2.724769),
(normal, 48.94273, 2.724679),
(normal, 48.94288, 2.724589),
(normal, 48.943027, 2.724499),
(normal, 48.943188, 2.724399),
(normal, 48.943348, 2.724299),
(normal, 48.94352, 2.7242),
(normal, 48.94369, 2.72409),
(normal, 48.94387, 2.72399),
(normal, 48.944042, 2.72388),
(normal, 48.94422, 2.72377),
(normal, 48.9444, 2.72366),
(normal, 48.944576, 2.723549),
(normal, 48.944756, 2.723429),
(normal, 48.944935, 2.723319),
(normal, 48.945107, 2.723219),
(normal, 48.94528, 2.723109),
(normal, 48.94545, 2.723009),
(normal, 48.94562, 2.722899),
(normal, 48.945793, 2.722799),
(normal, 48.945965, 2.722699),
(normal, 48.946136, 2.722589),
(normal, 48.946316, 2.72249),
(normal, 48.946476, 2.72239),
(normal, 48.946648, 2.72228),
(normal, 48.94682, 2.72218),
(normal, 48.94698, 2.72208),
(normal, 48.947147, 2.72199),
(normal, 48.947296, 2.72189),
(normal, 48.947445, 2.72181),
(normal, 48.947586, 2.72173),
(normal, 48.947704, 2.72166),
(normal, 48.947815, 2.72158),
(normal, 48.947914, 2.72148),
(normal, 48.947994, 2.72136),
(normal, 48.948063, 2.72122),
(normal, 48.9481, 2.72107),
(normal, 48.94812, 2.72091),
(normal, 48.94812, 2.72075),
(normal, 48.94809, 2.7206),
(normal, 48.94804, 2.72047),
(normal, 48.94796, 2.72035),
(normal, 48.947876, 2.72024),
(normal, 48.947777, 2.72013),
(normal, 48.947678, 2.72001),
(normal, 48.94758, 2.71988),
(normal, 48.94748, 2.71975),
(normal, 48.947388, 2.71962),
(normal, 48.947308, 2.71947),
(normal, 48.947247, 2.7193),
(normal, 48.947197, 2.71911),
(normal, 48.947166, 2.71891),
(normal, 48.947147, 2.71868),
(normal, 48.94713, 2.71845),
(normal, 48.94711, 2.71819),
(normal, 48.94709, 2.71791),
(normal, 48.94707, 2.71761),
(normal, 48.94704, 2.7173),
(normal, 48.94702, 2.71697),
(normal, 48.947002, 2.71663),
(normal, 48.946983, 2.71628),
(normal, 48.946964, 2.71593),
(normal, 48.946945, 2.71557),
(normal, 48.946926, 2.71522),
(normal, 48.946907, 2.71487),
(normal, 48.946888, 2.71451),
(normal, 48.946877, 2.71416),
(normal, 48.946857, 2.7138),
(normal, 48.94684, 2.71344),
(normal, 48.946827, 2.71309),
(normal, 48.946808, 2.71273),
(normal, 48.94679, 2.71238),
(normal, 48.946774, 2.71202),
(normal, 48.946754, 2.71167),
(normal, 48.946743, 2.71131),
(normal, 48.946724, 2.71095),
(normal, 48.946705, 2.71059),
(normal, 48.946693, 2.71023),
(normal, 48.946674, 2.70987),
(normal, 48.946655, 2.70951),
(normal, 48.946636, 2.70915),
(normal, 48.946617, 2.70879),
(normal, 48.946606, 2.70843),
(normal, 48.946587, 2.70806),
(normal, 48.946568, 2.70768),
(normal, 48.94655, 2.70731),
(normal, 48.94653, 2.70693),
(normal, 48.94652, 2.70656),
(normal, 48.946503, 2.70618),
(normal, 48.94649, 2.7058),
(normal, 48.94648, 2.70542),
(normal, 48.94648, 2.70504),
(normal, 48.94648, 2.70467),
(normal, 48.94649, 2.70429),
(normal, 48.94651, 2.70391),
(normal, 48.94654, 2.70354),
(normal, 48.94657, 2.70317),
(normal, 48.9466, 2.70281),
(normal, 48.94664, 2.70246),
(normal, 48.94669, 2.70211),
(normal, 48.94674, 2.70177),
(normal, 48.94679, 2.70144),
(normal, 48.946854, 2.701119),
(normal, 48.946903, 2.700809),
(normal, 48.946964, 2.7005),
(normal, 48.947014, 2.70019),
(normal, 48.947063, 2.69989),
(normal, 48.947124, 2.6996),
(normal, 48.947174, 2.69932),
(normal, 48.947224, 2.69905),
(normal, 48.94726, 2.69878),
(normal, 48.94731, 2.69851),
(normal, 48.94736, 2.69825),
(normal, 48.94741, 2.69799),
(normal, 48.94746, 2.69773),
(normal, 48.94751, 2.69746),
(normal, 48.94756, 2.69719),
(normal, 48.948425, 2.69215),
(normal, 48.948486, 2.69181),
(normal, 48.948547, 2.69147),
(normal, 48.94861, 2.69114),
(normal, 48.948677, 2.69081),
(normal, 48.94874, 2.69048),
(normal, 48.9488, 2.69015),
(normal, 48.94886, 2.68981),
(normal, 48.9489, 2.68946),
(normal, 48.948936, 2.68911),
(normal, 48.948967, 2.68876),
(normal, 48.948997, 2.68841),
(normal, 48.94901, 2.68805),
(normal, 48.94901, 2.6877),
(normal, 48.94901, 2.68736),
(normal, 48.94892, 2.685689),
(normal, 48.948883, 2.685369),
(normal, 48.948833, 2.68505),
(normal, 48.948784, 2.68474),
(normal, 48.948723, 2.68442),
(normal, 48.94866, 2.68411),
(normal, 48.948593, 2.6838),
(normal, 48.948513, 2.68348),
(normal, 48.94842, 2.68316),
(normal, 48.94833, 2.68284),
(normal, 48.94823, 2.68253),
(normal, 48.948112, 2.68221),
(normal, 48.948, 2.68191),
(normal, 48.947884, 2.68161),
(normal, 48.947754, 2.68131),
(normal, 48.947624, 2.68103),
(normal, 48.947483, 2.68075),
(normal, 48.947334, 2.68047),
(normal, 48.947186, 2.680201),
(normal, 48.947025, 2.67994),
(normal, 48.946877, 2.67968),
(normal, 48.946716, 2.67941),
(normal, 48.946556, 2.67915),
(normal, 48.946407, 2.67889),
(normal, 48.94626, 2.67863),
(normal, 48.94611, 2.67838),
(normal, 48.94597, 2.678119),
(normal, 48.945827, 2.677859),
(normal, 48.945698, 2.677589),
(normal, 48.945568, 2.677319),
(normal, 48.94545, 2.677039),
(normal, 48.94533, 2.676759),
(normal, 48.94522, 2.676479),
(normal, 48.94511, 2.676189),
(normal, 48.94501, 2.675909),
(normal, 48.94492, 2.675629),
(normal, 48.944828, 2.675359),
(normal, 48.944748, 2.67508),
(normal, 48.944668, 2.67481),
(normal, 48.9446, 2.67454),
(normal, 48.94453, 2.67427),
(normal, 48.944458, 2.673989),
(normal, 48.944397, 2.673709),
(normal, 48.944336, 2.673419),
(normal, 48.944286, 2.673129),
(normal, 48.944237, 2.672839),
(normal, 48.9442, 2.672539),
(normal, 48.94416, 2.672239),
(normal, 48.94413, 2.671929),
(normal, 48.9441, 2.671619),
(normal, 48.94408, 2.671309),
(normal, 48.94406, 2.67101),
(normal, 48.94405, 2.67071),
(normal, 48.94403, 2.67042),
(normal, 48.94402, 2.67014),
(normal, 48.944, 2.66987),
(normal, 48.943985, 2.66961),
(normal, 48.943966, 2.66936),
(normal, 48.943954, 2.66912),
(normal, 48.943943, 2.6689),
(normal, 48.943954, 2.66872),
(normal, 48.943974, 2.66856),
(normal, 48.944023, 2.66844),
(normal, 48.94409, 2.66835),
(normal, 48.944183, 2.66829),
(normal, 48.944275, 2.66825),
(normal, 48.944366, 2.66822),
(normal, 48.944458, 2.66819),
(normal, 48.94454, 2.66813),
(normal, 48.9446, 2.66805),
(normal, 48.94466, 2.66795),
(normal, 48.94615, 2.66794),
(normal, 48.946243, 2.66799),
(normal, 48.946342, 2.66803),
(normal, 48.946434, 2.66806),
(normal, 48.946533, 2.66806),
(normal, 48.946625, 2.66803),
(normal, 48.946705, 2.66797),
(normal, 48.946774, 2.66787),
(normal, 48.946842, 2.66776),
(normal, 48.946922, 2.66767),
(normal, 48.947014, 2.6676),
(normal, 48.947113, 2.66757),
(normal, 48.947224, 2.66754),
(normal, 48.947342, 2.66751),
(normal, 48.94747, 2.66748),
(normal, 48.947594, 2.667461),
(normal, 48.947723, 2.667431),
(normal, 48.947865, 2.667401),
(normal, 48.948006, 2.667361),
(normal, 48.948154, 2.667331),
(normal, 48.948315, 2.66729),
(normal, 48.948486, 2.66725),
(normal, 48.948658, 2.66721),
(normal, 48.94883, 2.66717),
(normal, 48.94901, 2.66713),
(normal, 48.94918, 2.66709),
(normal, 48.94936, 2.66705),
(normal, 48.94953, 2.66701),
(normal, 48.94971, 2.66697),
(normal, 48.94989, 2.66693),
(normal, 48.950058, 2.666881),
(normal, 48.95023, 2.666841),
(normal, 48.95041, 2.666801),
(normal, 48.95059, 2.666761),
(normal, 48.95076, 2.666721),
(normal, 48.95094, 2.666681),
(normal, 48.95111, 2.666631),
(normal, 48.951283, 2.66659),
(normal, 48.95146, 2.66655),
(normal, 48.951633, 2.66651),
(normal, 48.951805, 2.66647),
(normal, 48.951977, 2.66642),
(normal, 48.952156, 2.66638),
(normal, 48.952328, 2.66635),
(normal, 48.9525, 2.66631),
(normal, 48.952663, 2.66627),
(normal, 48.952835, 2.66623),
(normal, 48.952995, 2.66619),
(normal, 48.953156, 2.66615),
(normal, 48.953304, 2.66611),
(normal, 48.953453, 2.66607),
(normal, 48.953594, 2.66604),
(normal, 48.953724, 2.66601),
(normal, 48.953842, 2.66599),
(normal, 48.95394, 2.66597),
(normal, 48.95402, 2.66596),
(normal, 48.954082, 2.66596),
(normal, 48.95412, 2.66596),
(normal, 48.95414, 2.66596),
(normal, 48.95415, 2.66596),
(normal, 48.954147, 2.66596),
(normal, 48.954147, 2.66596),
(normal, 48.95416, 2.66596),
(normal, 48.95416, 2.66596),
(normal, 48.95419, 2.66597),
(normal, 48.95422, 2.66599),
(normal, 48.95427, 2.66602),
(normal, 48.95433, 2.66604),
(normal, 48.955048, 2.665681),
(normal, 48.955177, 2.665651),
(normal, 48.955307, 2.665611),
(normal, 48.955456, 2.665581),
(normal, 48.955605, 2.66555),
(normal, 48.955753, 2.66551),
(normal, 48.955902, 2.66548),
(normal, 48.95605, 2.66544),
(normal, 48.9562, 2.66541),
(normal, 48.95635, 2.66537),
(normal, 48.95649, 2.66533),
(normal, 48.95664, 2.6653),
(normal, 48.95678, 2.66526),
(normal, 48.95692, 2.66523),
(normal, 48.95706, 2.66519),
(normal, 48.957207, 2.665161),
(normal, 48.957348, 2.665121),
(normal, 48.957508, 2.665091),
(normal, 48.95767, 2.665051),
(normal, 48.95783, 2.66502),
(normal, 48.95799, 2.66498),
(normal, 48.95816, 2.66494),
(normal, 48.958332, 2.6649),
(normal, 48.95851, 2.66486),
(normal, 48.95869, 2.66482),
(normal, 48.95887, 2.66477),
(normal, 48.95905, 2.66473),
(normal, 48.95924, 2.66468),
(normal, 48.95942, 2.66464),
(normal, 48.95961, 2.66459),
(normal, 48.95981, 2.664551),
(normal, 48.96, 2.664501),
(normal, 48.96019, 2.664451),
(normal, 48.96039, 2.664401),
(normal, 48.960587, 2.664361),
(normal, 48.960785, 2.664311),
(normal, 48.960983, 2.664261),
(normal, 48.96118, 2.664211),
(normal, 48.96139, 2.664171),
(normal, 48.96159, 2.66412),
(normal, 48.9618, 2.66407),
(normal, 48.96201, 2.66402),
(normal, 48.96223, 2.66396),
(normal, 48.96244, 2.66391),
(normal, 48.96266, 2.66386),
(normal, 48.96287, 2.663811),
(normal, 48.963093, 2.663761),
(normal, 48.963314, 2.663701),
(normal, 48.963535, 2.663651),
(normal, 48.963757, 2.663591),
(normal, 48.963978, 2.663541),
(normal, 48.9642, 2.663481),
(normal, 48.96442, 2.66343),
(normal, 48.96463, 2.66338),
(normal, 48.96485, 2.66333),
(normal, 48.96506, 2.66328),
(normal, 48.96527, 2.66323),
(normal, 48.96548, 2.66318),
(normal, 48.96569, 2.66313),
(normal, 48.96589, 2.66308),
(normal, 48.966087, 2.66302),
(normal, 48.966278, 2.66297),
(normal, 48.96647, 2.66291),
(normal, 48.96665, 2.66285),
(normal, 48.96682, 2.66279),
(normal, 48.96697, 2.66274),
(normal, 48.967117, 2.66268),
(normal, 48.967247, 2.66264),
(normal, 48.967358, 2.66261),
(normal, 48.96745, 2.6626),
(normal, 48.967518, 2.66264),
(normal, 48.96758, 2.6627),
(normal, 48.96764, 2.66277),
(normal, 48.96771, 2.66285),
(normal, 48.96779, 2.6629),
(normal, 48.967865, 2.66293),
(normal, 48.967957, 2.66291),
(normal, 48.96805, 2.66285),
(normal, 48.968117, 2.66276),
(normal, 48.968197, 2.662651),
(normal, 48.968277, 2.66257),
(normal, 48.968388, 2.6625),
(normal, 48.9685, 2.66246),
(normal, 48.968628, 2.66243),
(normal, 48.96877, 2.66239),
(normal, 48.968918, 2.66235),
(normal, 48.969067, 2.66231),
(normal, 48.96924, 2.66227),
(normal, 48.96941, 2.66223),
(normal, 48.96958, 2.66219),
(normal, 48.96976, 2.662151),
(normal, 48.96994, 2.662101),
(normal, 48.97012, 2.662061),
(normal, 48.9703, 2.662011),
(normal, 48.97049, 2.661971),
(normal, 48.97068, 2.661921),
(normal, 48.97086, 2.66188),
(normal, 48.97105, 2.66184),
(normal, 48.97124, 2.66179),
(normal, 48.97143, 2.66175),
(normal, 48.971622, 2.6617),
(normal, 48.971813, 2.66166),
(normal, 48.97201, 2.66161),
(normal, 48.972202, 2.66157),
(normal, 48.9724, 2.66152),
(normal, 48.972588, 2.661471),
(normal, 48.972786, 2.661431),
(normal, 48.972984, 2.661381),
(normal, 48.973175, 2.661331),
(normal, 48.973373, 2.661281),
(normal, 48.973564, 2.661231),
(normal, 48.973755, 2.661191),
(normal, 48.973934, 2.66114),
(normal, 48.974125, 2.6611),
(normal, 48.974304, 2.66106),
(normal, 48.974476, 2.66102),
(normal, 48.974648, 2.66097),
(normal, 48.97482, 2.66093),
(normal, 48.97499, 2.66089),
(normal, 48.97515, 2.66085),
(normal, 48.975315, 2.66082),
(normal, 48.975475, 2.66078),
(normal, 48.975647, 2.66074),
(normal, 48.975796, 2.6607),
(normal, 48.975956, 2.66066),
(normal, 48.976105, 2.66063),
(normal, 48.976254, 2.66059),
(normal, 48.976395, 2.66056),
(normal, 48.976524, 2.66052),
(normal, 48.976665, 2.66049),
(normal, 48.976795, 2.66046),
(normal, 48.976913, 2.66043),
(normal, 48.97703, 2.66041),
(normal, 48.977142, 2.66039),
(normal, 48.97724, 2.66038),
(normal, 48.977333, 2.66039),
(normal, 48.977425, 2.66041),
(normal, 48.977505, 2.66046),
(normal, 48.977585, 2.660501),
(normal, 48.977654, 2.660521),
(normal, 48.977715, 2.660501),
(normal, 48.977776, 2.66045),
(normal, 48.977837, 2.66037),
(normal, 48.977898, 2.66029),
(normal, 48.977978, 2.66023),
(normal, 48.97807, 2.66019),
(normal, 48.97817, 2.66015),
(normal, 48.97828, 2.66012),
(normal, 48.97839, 2.66009),
(normal, 48.97852, 2.66005),
(normal, 48.978645, 2.660021),
(normal, 48.978786, 2.659991),
(normal, 48.978928, 2.659951),
(normal, 48.979076, 2.659921),
(normal, 48.979237, 2.659881),
(normal, 48.979397, 2.659841),
(normal, 48.979557, 2.659801),
(normal, 48.979717, 2.65976),
(normal, 48.97989, 2.65972),
(normal, 48.98006, 2.65968),
(normal, 48.980232, 2.65964),
(normal, 48.980404, 2.6596),
(normal, 48.980583, 2.65956),
(normal, 48.980762, 2.65951),
(normal, 48.98094, 2.65947),
(normal, 48.98113, 2.659421),
(normal, 48.98132, 2.659371),
(normal, 48.98151, 2.659331),
(normal, 48.9817, 2.659281),
(normal, 48.98189, 2.659241),
(normal, 48.982082, 2.659191),
(normal, 48.98226, 2.659151),
(normal, 48.982452, 2.659101),
(normal, 48.98263, 2.65906),
(normal, 48.982822, 2.65901),
(normal, 48.983, 2.65897),
(normal, 48.98318, 2.65892),
(normal, 48.98336, 2.65888),
(normal, 48.98354, 2.65884),
(normal, 48.98373, 2.65879),
(normal, 48.983906, 2.658751),
(normal, 48.984097, 2.658701),
(normal, 48.984287, 2.658661),
(normal, 48.984478, 2.658611),
(normal, 48.984676, 2.658571),
(normal, 48.984867, 2.658511),
(normal, 48.985058, 2.658461),
(normal, 48.98525, 2.658421),
(normal, 48.985447, 2.65837),
(normal, 48.985638, 2.65832),
(normal, 48.98583, 2.65827),
(normal, 48.986027, 2.65823),
(normal, 48.986217, 2.65818),
(normal, 48.98641, 2.65813),
(normal, 48.9866, 2.65808),
(normal, 48.986786, 2.658041),
(normal, 48.986965, 2.658001),
(normal, 48.987156, 2.657961),
(normal, 48.987347, 2.657921),
(normal, 48.987526, 2.657871),
(normal, 48.987717, 2.65782),
(normal, 48.987907, 2.65778),
(normal, 48.9881, 2.65773),
(normal, 48.98829, 2.65769),
(normal, 48.988487, 2.65764),
(normal, 48.988678, 2.6576),
(normal, 48.98887, 2.65755),
(normal, 48.98906, 2.6575),
(normal, 48.98925, 2.65746),
(normal, 48.98945, 2.6574),
(normal, 48.989643, 2.65734),
(normal, 48.989834, 2.657269),
(normal, 48.990025, 2.657189),
(normal, 48.990215, 2.657109),
(normal, 48.990406, 2.657009),
(normal, 48.990585, 2.656909),
(normal, 48.990765, 2.65679),
(normal, 48.990936, 2.65668)
)
}
} | DXTEDStreamHack2017/nrt17 | containers/flink/master/src/main/scala/com/microsoft/chgeuer/TrackingSample.scala | Scala | apache-2.0 | 60,011 |
/**
* This file is part of the TA Buddy project.
* Copyright (c) 2015 Alexey Aksenov [email protected]
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Affero General Global License version 3
* as published by the Free Software Foundation with the addition of the
* following permission added to Section 15 as permitted in Section 7(a):
* FOR ANY PART OF THE COVERED WORK IN WHICH THE COPYRIGHT IS OWNED
* BY Limited Liability Company «MEZHGALAKTICHESKIJ TORGOVYJ ALIANS»,
* Limited Liability Company «MEZHGALAKTICHESKIJ TORGOVYJ ALIANS» DISCLAIMS
* THE WARRANTY OF NON INFRINGEMENT OF THIRD PARTY RIGHTS.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Affero General Global License for more details.
* You should have received a copy of the GNU Affero General Global License
* along with this program; if not, see http://www.gnu.org/licenses or write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA, 02110-1301 USA, or download the license from the following URL:
* http://www.gnu.org/licenses/agpl.html
*
* The interactive user interfaces in modified source and object code versions
* of this program must display Appropriate Legal Notices, as required under
* Section 5 of the GNU Affero General Global License.
*
* In accordance with Section 7(b) of the GNU Affero General Global License,
* you must retain the producer line in every report, form or document
* that is created or manipulated using TA Buddy.
*
* You can be released from the requirements of the license by purchasing
* a commercial license. Buying such a license is mandatory as soon as you
* develop commercial activities involving the TA Buddy software without
* disclosing the source code of your own applications.
* These activities include: offering paid services to customers,
* serving files in a web or/and network application,
* shipping TA Buddy with a closed source product.
*
* For more information, please contact Digimead Team at this
* address: [email protected]
*/
package org.digimead.tabuddy.desktop.core.ui.operation
import java.util.UUID
import java.util.concurrent.CancellationException
import org.digimead.digi.lib.aop.log
import org.digimead.digi.lib.api.XDependencyInjection
import org.digimead.digi.lib.log.api.XLoggable
import org.digimead.tabuddy.desktop.core.definition.Operation
import org.digimead.tabuddy.desktop.core.support.App
import org.digimead.tabuddy.desktop.core.ui.UI
import org.digimead.tabuddy.desktop.core.ui.operation.api.XOperationViewSelect
import org.eclipse.core.runtime.{ IAdaptable, IProgressMonitor }
import org.digimead.tabuddy.desktop.core.ui.block.WindowSupervisor
/** 'Select view' operation. */
class OperationViewSelect extends XOperationViewSelect with XLoggable {
/** Akka execution context. */
implicit lazy val ec = App.system.dispatcher
/** Akka communication timeout. */
implicit val timeout = akka.util.Timeout(UI.communicationTimeout)
/**
* Select view.
*
* @param viewId Specific view Id that will be selected
*/
def apply(viewId: UUID): Option[UUID] = {
log.info(s"Select view %s[%08X].".format(viewId, viewId.hashCode()))
UI.viewMap.get(viewId) match {
case Some(vComposite) ⇒
App.execNGet {
if (!vComposite.isDisposed()) {
Option(vComposite.getShell().getData(UI.swtId)) match {
case Some(windowId) ⇒
WindowSupervisor ! App.Message.Start((windowId, vComposite), None)
Some(viewId)
case None ⇒
log.fatal("Unable to find window Id for view %s[%08X].".format(viewId, viewId.hashCode()))
None
}
} else {
log.info("Unable to select disposed view with Id " + viewId)
None
}
}
case None ⇒
log.info("Unable to find view with Id " + viewId)
None
}
}
/**
* Create 'Select view' operation.
*
* @param viewId Specific view Id that will be selected
* @return 'Select view' operation
*/
def operation(viewId: UUID) =
new Implemetation(viewId)
/**
* Checks that this class can be subclassed.
* <p>
* The API class is intended to be subclassed only at specific,
* controlled point. This method enforces this rule
* unless it is overridden.
* </p><p>
* <em>IMPORTANT:</em> By providing an implementation of this
* method that allows a subclass of a class which does not
* normally allow subclassing to be created, the implementer
* agrees to be fully responsible for the fact that any such
* subclass will likely fail.
* </p>
*/
override protected def checkSubclass() {}
class Implemetation(viewId: UUID)
extends OperationViewSelect.Abstract(viewId) with XLoggable {
@volatile protected var allowExecute = true
override def canExecute() = allowExecute
override def canRedo() = false
override def canUndo() = false
protected def execute(monitor: IProgressMonitor, info: IAdaptable): Operation.Result[UUID] =
try Operation.Result.OK(OperationViewSelect.this(viewId))
catch { case e: CancellationException ⇒ Operation.Result.Cancel() }
protected def redo(monitor: IProgressMonitor, info: IAdaptable): Operation.Result[UUID] =
throw new UnsupportedOperationException
protected def undo(monitor: IProgressMonitor, info: IAdaptable): Operation.Result[UUID] =
throw new UnsupportedOperationException
}
}
object OperationViewSelect extends XLoggable {
/** Stable identifier with OperationViewSelect DI */
lazy val operation = DI.operation.asInstanceOf[OperationViewSelect]
/**
* Build a new 'Select view' operation.
*
* @param viewId Specific view Id that will be selected
* @return 'Select view' operation
*/
@log
def apply(viewId: UUID): Option[Abstract] =
Some(operation.operation(viewId))
/** Bridge between abstract XOperation[UUID] and concrete Operation[UUID] */
abstract class Abstract(val viewId: UUID)
extends Operation[UUID](s"Select view %S[%08X].".format(viewId.hashCode(), viewId.hashCode())) {
this: XLoggable ⇒
}
/**
* Dependency injection routines.
*/
private object DI extends XDependencyInjection.PersistentInjectable {
lazy val operation = injectOptional[XOperationViewSelect] getOrElse new OperationViewSelect
}
}
| digimead/digi-TABuddy-desktop | part-core-ui/src/main/scala/org/digimead/tabuddy/desktop/core/ui/operation/OperationViewSelect.scala | Scala | agpl-3.0 | 6,598 |
package scala.meta.tests.tokenizers
import java.nio.charset.StandardCharsets
import scala.meta.internal.io.InputStreamIO
import scala.meta.testkit.DiffAssertions
import org.scalameta.logger
class UnicodeEscapeSuite extends BaseTokenizerSuite with DiffAssertions {
// Read tests from external file because scalac processes string literals in source
// @ """ s"${x}\\uef17" """.length
// res0: Int = 10
// by reading from external file escapes like `\\uef17` are represented
// as 6 characters instead of one.
val tests = new String(
InputStreamIO.readBytes(this.getClass.getClassLoader.getResourceAsStream("unicode.txt")),
StandardCharsets.UTF_8)
// asserts that tokenize(code).syntax == code
def checkRoundtrip(original: String): Unit = {
test(logger.revealWhitespace(original)) {
val tokens = tokenize(original)
val obtained = tokens.mkString
assertNoDiff(obtained, original)
}
}
tests.linesIterator.foreach { line =>
checkRoundtrip(line)
}
}
| xeno-by/scalameta | tests/jvm/src/test/scala/scala/meta/tests/tokenizers/UnicodeEscapeSuite.scala | Scala | bsd-3-clause | 1,007 |
package io.scrapeyard
import org.jvnet.mock_javamail.Mailbox
import org.scalatest.concurrent.Eventually
import org.scalatest.{Matchers, WordSpecLike}
import scala.concurrent.duration._
import scala.language.postfixOps
class ScrapeMailerSpec extends WordSpecLike with Matchers with Eventually {
"send an email" in {
val to = "[email protected]"
val subject = "Search results for scrapeyard.io"
val body = "you want it to be one way"
ScrapeMailer.sendMail(to, subject, body)
eventually(timeout(2 seconds)) {
val inbox = Mailbox.get(to)
inbox.size should be (1)
val msg = inbox.get(0)
msg.getSubject should be (subject)
msg.getContent should be (body)
}
}
}
| zoltanmaric/scrapeyard | server/src/test/scala/io/scrapeyard/ScrapeMailerSpec.scala | Scala | gpl-2.0 | 739 |
/* sbt -- Simple Build Tool
* Copyright 2008, 2010, 2011 Mark Harrah
*/
package sbt.complete
import Parser._
import sbt.Types.{ left, right, some }
import sbt.Util.{ makeList, separate }
/**
* A String parser that provides semi-automatic tab completion.
* A successful parse results in a value of type `T`.
* The methods in this trait are what must be implemented to define a new Parser implementation, but are not typically useful for common usage.
* Instead, most useful methods for combining smaller parsers into larger parsers are implicitly added by the [[RichParser]] type.
*/
sealed trait Parser[+T] {
def derive(i: Char): Parser[T]
def resultEmpty: Result[T]
def result: Option[T]
def completions(level: Int): Completions
def failure: Option[Failure]
def isTokenStart = false
def ifValid[S](p: => Parser[S]): Parser[S]
def valid: Boolean
}
sealed trait RichParser[A] {
/** Apply the original Parser and then apply `next` (in order). The result of both is provides as a pair. */
def ~[B](next: Parser[B]): Parser[(A, B)]
/** Apply the original Parser one or more times and provide the non-empty sequence of results.*/
def + : Parser[Seq[A]]
/** Apply the original Parser zero or more times and provide the (potentially empty) sequence of results.*/
def * : Parser[Seq[A]]
/** Apply the original Parser zero or one times, returning None if it was applied zero times or the result wrapped in Some if it was applied once.*/
def ? : Parser[Option[A]]
/** Apply either the original Parser or `b`.*/
def |[B >: A](b: Parser[B]): Parser[B]
/** Apply either the original Parser or `b`.*/
def ||[B](b: Parser[B]): Parser[Either[A, B]]
/** Apply the original Parser to the input and then apply `f` to the result.*/
def map[B](f: A => B): Parser[B]
/**
* Returns the original parser. This is useful for converting literals to Parsers.
* For example, `'c'.id` or `"asdf".id`
*/
def id: Parser[A]
/** Apply the original Parser, but provide `value` as the result if it succeeds. */
def ^^^[B](value: B): Parser[B]
/** Apply the original Parser, but provide `alt` as the result if it fails.*/
def ??[B >: A](alt: B): Parser[B]
/**
* Produces a Parser that applies the original Parser and then applies `next` (in order), discarding the result of `next`.
* (The arrow point in the direction of the retained result.)
*/
def <~[B](b: Parser[B]): Parser[A]
/**
* Produces a Parser that applies the original Parser and then applies `next` (in order), discarding the result of the original parser.
* (The arrow point in the direction of the retained result.)
*/
def ~>[B](b: Parser[B]): Parser[B]
/** Uses the specified message if the original Parser fails.*/
def !!!(msg: String): Parser[A]
/**
* If an exception is thrown by the original Parser,
* capture it and fail locally instead of allowing the exception to propagate up and terminate parsing.
*/
def failOnException: Parser[A]
@deprecated("Use `not` and explicitly provide the failure message", "0.12.2")
def unary_- : Parser[Unit]
/**
* Apply the original parser, but only succeed if `o` also succeeds.
* Note that `o` does not need to consume the same amount of input to satisfy this condition.
*/
def &(o: Parser[_]): Parser[A]
@deprecated("Use `and` and `not` and explicitly provide the failure message", "0.12.2")
def -(o: Parser[_]): Parser[A]
/** Explicitly defines the completions for the original Parser.*/
def examples(s: String*): Parser[A]
/** Explicitly defines the completions for the original Parser.*/
def examples(s: Set[String], check: Boolean = false): Parser[A]
/**
* @param exampleSource the source of examples when displaying completions to the user.
* @param maxNumberOfExamples limits the number of examples that the source of examples should return. This can
* prevent lengthy pauses and avoids bad interactive user experience.
* @param removeInvalidExamples indicates whether completion examples should be checked for validity (against the
* given parser). Invalid examples will be filtered out and only valid suggestions will
* be displayed.
* @return a new parser with a new source of completions.
*/
def examples(exampleSource: ExampleSource, maxNumberOfExamples: Int, removeInvalidExamples: Boolean): Parser[A]
/**
* @param exampleSource the source of examples when displaying completions to the user.
* @return a new parser with a new source of completions. It displays at most 25 completion examples and does not
* remove invalid examples.
*/
def examples(exampleSource: ExampleSource): Parser[A] = examples(exampleSource, maxNumberOfExamples = 25, removeInvalidExamples = false)
/** Converts a Parser returning a Char sequence to a Parser returning a String.*/
def string(implicit ev: A <:< Seq[Char]): Parser[String]
/**
* Produces a Parser that filters the original parser.
* If 'f' is not true when applied to the output of the original parser, the Parser returned by this method fails.
* The failure message is constructed by applying `msg` to the String that was successfully parsed by the original parser.
*/
def filter(f: A => Boolean, msg: String => String): Parser[A]
/** Applies the original parser, applies `f` to the result to get the next parser, and applies that parser and uses its result for the overall result. */
def flatMap[B](f: A => Parser[B]): Parser[B]
}
/** Contains Parser implementation helper methods not typically needed for using parsers. */
object Parser extends ParserMain {
sealed abstract class Result[+T] {
def isFailure: Boolean
def isValid: Boolean
def errors: Seq[String]
def or[B >: T](b: => Result[B]): Result[B]
def either[B](b: => Result[B]): Result[Either[T, B]]
def map[B](f: T => B): Result[B]
def flatMap[B](f: T => Result[B]): Result[B]
def &&(b: => Result[_]): Result[T]
def filter(f: T => Boolean, msg: => String): Result[T]
def seq[B](b: => Result[B]): Result[(T, B)] = app(b)((m, n) => (m, n))
def app[B, C](b: => Result[B])(f: (T, B) => C): Result[C]
def toEither: Either[() => Seq[String], T]
}
final case class Value[+T](value: T) extends Result[T] {
def isFailure = false
def isValid: Boolean = true
def errors = Nil
def app[B, C](b: => Result[B])(f: (T, B) => C): Result[C] = b match {
case fail: Failure => fail
case Value(bv) => Value(f(value, bv))
}
def &&(b: => Result[_]): Result[T] = b match { case f: Failure => f; case _ => this }
def or[B >: T](b: => Result[B]): Result[B] = this
def either[B](b: => Result[B]): Result[Either[T, B]] = Value(Left(value))
def map[B](f: T => B): Result[B] = Value(f(value))
def flatMap[B](f: T => Result[B]): Result[B] = f(value)
def filter(f: T => Boolean, msg: => String): Result[T] = if (f(value)) this else mkFailure(msg)
def toEither = Right(value)
}
final class Failure private[sbt] (mkErrors: => Seq[String], val definitive: Boolean) extends Result[Nothing] {
lazy val errors: Seq[String] = mkErrors
def isFailure = true
def isValid = false
def map[B](f: Nothing => B) = this
def flatMap[B](f: Nothing => Result[B]) = this
def or[B](b: => Result[B]): Result[B] = b match {
case v: Value[B] => v
case f: Failure => if (definitive) this else this ++ f
}
def either[B](b: => Result[B]): Result[Either[Nothing, B]] = b match {
case Value(v) => Value(Right(v))
case f: Failure => if (definitive) this else this ++ f
}
def filter(f: Nothing => Boolean, msg: => String) = this
def app[B, C](b: => Result[B])(f: (Nothing, B) => C): Result[C] = this
def &&(b: => Result[_]) = this
def toEither = Left(() => errors)
private[sbt] def ++(f: Failure) = mkFailures(errors ++ f.errors)
}
def mkFailures(errors: => Seq[String], definitive: Boolean = false): Failure = new Failure(errors.distinct, definitive)
def mkFailure(error: => String, definitive: Boolean = false): Failure = new Failure(error :: Nil, definitive)
@deprecated("This method is deprecated and will be removed in the next major version. Use the parser directly to check for invalid completions.", since = "0.13.2")
def checkMatches(a: Parser[_], completions: Seq[String]) {
val bad = completions.filter(apply(a)(_).resultEmpty.isFailure)
if (!bad.isEmpty) sys.error("Invalid example completions: " + bad.mkString("'", "', '", "'"))
}
def tuple[A, B](a: Option[A], b: Option[B]): Option[(A, B)] =
(a, b) match { case (Some(av), Some(bv)) => Some((av, bv)); case _ => None }
def mapParser[A, B](a: Parser[A], f: A => B): Parser[B] =
a.ifValid {
a.result match {
case Some(av) => success(f(av))
case None => new MapParser(a, f)
}
}
def bindParser[A, B](a: Parser[A], f: A => Parser[B]): Parser[B] =
a.ifValid {
a.result match {
case Some(av) => f(av)
case None => new BindParser(a, f)
}
}
def filterParser[T](a: Parser[T], f: T => Boolean, seen: String, msg: String => String): Parser[T] =
a.ifValid {
a.result match {
case Some(av) if f(av) => success(av)
case _ => new Filter(a, f, seen, msg)
}
}
def seqParser[A, B](a: Parser[A], b: Parser[B]): Parser[(A, B)] =
a.ifValid {
b.ifValid {
(a.result, b.result) match {
case (Some(av), Some(bv)) => success((av, bv))
case (Some(av), None) => b map { bv => (av, bv) }
case (None, Some(bv)) => a map { av => (av, bv) }
case (None, None) => new SeqParser(a, b)
}
}
}
def choiceParser[A, B](a: Parser[A], b: Parser[B]): Parser[Either[A, B]] =
if (a.valid)
if (b.valid) new HetParser(a, b) else a.map(left.fn)
else
b.map(right.fn)
def opt[T](a: Parser[T]): Parser[Option[T]] =
if (a.valid) new Optional(a) else success(None)
def onFailure[T](delegate: Parser[T], msg: String): Parser[T] =
if (delegate.valid) new OnFailure(delegate, msg) else failure(msg)
def trapAndFail[T](delegate: Parser[T]): Parser[T] =
delegate.ifValid(new TrapAndFail(delegate))
def zeroOrMore[T](p: Parser[T]): Parser[Seq[T]] = repeat(p, 0, Infinite)
def oneOrMore[T](p: Parser[T]): Parser[Seq[T]] = repeat(p, 1, Infinite)
def repeat[T](p: Parser[T], min: Int = 0, max: UpperBound = Infinite): Parser[Seq[T]] =
repeat(None, p, min, max, Nil)
private[complete] def repeat[T](partial: Option[Parser[T]], repeated: Parser[T], min: Int, max: UpperBound, revAcc: List[T]): Parser[Seq[T]] =
{
assume(min >= 0, "Minimum must be greater than or equal to zero (was " + min + ")")
assume(max >= min, "Minimum must be less than or equal to maximum (min: " + min + ", max: " + max + ")")
def checkRepeated(invalidButOptional: => Parser[Seq[T]]): Parser[Seq[T]] =
repeated match {
case i: Invalid if min == 0 => invalidButOptional
case i: Invalid => i
case _ =>
repeated.result match {
case Some(value) => success(revAcc reverse_::: value :: Nil) // revAcc should be Nil here
case None => if (max.isZero) success(revAcc.reverse) else new Repeat(partial, repeated, min, max, revAcc)
}
}
partial match {
case Some(part) =>
part.ifValid {
part.result match {
case Some(value) => repeat(None, repeated, min, max, value :: revAcc)
case None => checkRepeated(part.map(lv => (lv :: revAcc).reverse))
}
}
case None => checkRepeated(success(Nil))
}
}
@deprecated("Explicitly call `and` and `not` to provide the failure message.", "0.12.2")
def sub[T](a: Parser[T], b: Parser[_]): Parser[T] = and(a, not(b))
def and[T](a: Parser[T], b: Parser[_]): Parser[T] = a.ifValid(b.ifValid(new And(a, b)))
}
trait ParserMain {
/** Provides combinators for Parsers.*/
implicit def richParser[A](a: Parser[A]): RichParser[A] = new RichParser[A] {
def ~[B](b: Parser[B]) = seqParser(a, b)
def ||[B](b: Parser[B]) = choiceParser(a, b)
def |[B >: A](b: Parser[B]) = homParser(a, b)
def ? = opt(a)
def * = zeroOrMore(a)
def + = oneOrMore(a)
def map[B](f: A => B) = mapParser(a, f)
def id = a
def ^^^[B](value: B): Parser[B] = a map { _ => value }
def ??[B >: A](alt: B): Parser[B] = a.? map { _ getOrElse alt }
def <~[B](b: Parser[B]): Parser[A] = (a ~ b) map { case av ~ _ => av }
def ~>[B](b: Parser[B]): Parser[B] = (a ~ b) map { case _ ~ bv => bv }
def !!!(msg: String): Parser[A] = onFailure(a, msg)
def failOnException: Parser[A] = trapAndFail(a)
def unary_- = not(a)
def &(o: Parser[_]) = and(a, o)
def -(o: Parser[_]) = sub(a, o)
def examples(s: String*): Parser[A] = examples(s.toSet)
def examples(s: Set[String], check: Boolean = false): Parser[A] = examples(new FixedSetExamples(s), s.size, check)
def examples(s: ExampleSource, maxNumberOfExamples: Int, removeInvalidExamples: Boolean): Parser[A] = Parser.examples(a, s, maxNumberOfExamples, removeInvalidExamples)
def filter(f: A => Boolean, msg: String => String): Parser[A] = filterParser(a, f, "", msg)
def string(implicit ev: A <:< Seq[Char]): Parser[String] = map(_.mkString)
def flatMap[B](f: A => Parser[B]) = bindParser(a, f)
}
implicit def literalRichCharParser(c: Char): RichParser[Char] = richParser(c)
implicit def literalRichStringParser(s: String): RichParser[String] = richParser(s)
/**
* Construct a parser that is valid, but has no valid result. This is used as a way
* to provide a definitive Failure when a parser doesn't match empty input. For example,
* in `softFailure(...) | p`, if `p` doesn't match the empty sequence, the failure will come
* from the Parser constructed by the `softFailure` method.
*/
private[sbt] def softFailure(msg: => String, definitive: Boolean = false): Parser[Nothing] =
SoftInvalid(mkFailures(msg :: Nil, definitive))
/**
* Defines a parser that always fails on any input with messages `msgs`.
* If `definitive` is `true`, any failures by later alternatives are discarded.
*/
def invalid(msgs: => Seq[String], definitive: Boolean = false): Parser[Nothing] = Invalid(mkFailures(msgs, definitive))
/**
* Defines a parser that always fails on any input with message `msg`.
* If `definitive` is `true`, any failures by later alternatives are discarded.
*/
def failure(msg: => String, definitive: Boolean = false): Parser[Nothing] = invalid(msg :: Nil, definitive)
/** Defines a parser that always succeeds on empty input with the result `value`.*/
def success[T](value: T): Parser[T] = new ValidParser[T] {
override def result = Some(value)
def resultEmpty = Value(value)
def derive(c: Char) = Parser.failure("Expected end of input.")
def completions(level: Int) = Completions.empty
override def toString = "success(" + value + ")"
}
/** Presents a Char range as a Parser. A single Char is parsed only if it is in the given range.*/
implicit def range(r: collection.immutable.NumericRange[Char]): Parser[Char] =
charClass(r contains _).examples(r.map(_.toString): _*)
/** Defines a Parser that parses a single character only if it is contained in `legal`.*/
def chars(legal: String): Parser[Char] =
{
val set = legal.toSet
charClass(set, "character in '" + legal + "'") examples (set.map(_.toString))
}
/**
* Defines a Parser that parses a single character only if the predicate `f` returns true for that character.
* If this parser fails, `label` is used as the failure message.
*/
def charClass(f: Char => Boolean, label: String = "<unspecified>"): Parser[Char] = new CharacterClass(f, label)
/** Presents a single Char `ch` as a Parser that only parses that exact character. */
implicit def literal(ch: Char): Parser[Char] = new ValidParser[Char] {
def result = None
def resultEmpty = mkFailure("Expected '" + ch + "'")
def derive(c: Char) = if (c == ch) success(ch) else new Invalid(resultEmpty)
def completions(level: Int) = Completions.single(Completion.suggestStrict(ch.toString))
override def toString = "'" + ch + "'"
}
/** Presents a literal String `s` as a Parser that only parses that exact text and provides it as the result.*/
implicit def literal(s: String): Parser[String] = stringLiteral(s, 0)
/** See [[unapply]]. */
object ~ {
/** Convenience for destructuring a tuple that mirrors the `~` combinator.*/
def unapply[A, B](t: (A, B)): Some[(A, B)] = Some(t)
}
/** Parses input `str` using `parser`. If successful, the result is provided wrapped in `Right`. If unsuccesful, an error message is provided in `Left`.*/
def parse[T](str: String, parser: Parser[T]): Either[String, T] =
Parser.result(parser, str).left.map { failures =>
val (msgs, pos) = failures()
ProcessError(str, msgs, pos)
}
/**
* Convenience method to use when developing a parser.
* `parser` is applied to the input `str`.
* If `completions` is true, the available completions for the input are displayed.
* Otherwise, the result of parsing is printed using the result's `toString` method.
* If parsing fails, the error message is displayed.
*
* See also [[sampleParse]] and [[sampleCompletions]].
*/
def sample(str: String, parser: Parser[_], completions: Boolean = false): Unit =
if (completions) sampleCompletions(str, parser) else sampleParse(str, parser)
/**
* Convenience method to use when developing a parser.
* `parser` is applied to the input `str` and the result of parsing is printed using the result's `toString` method.
* If parsing fails, the error message is displayed.
*/
def sampleParse(str: String, parser: Parser[_]): Unit =
parse(str, parser) match {
case Left(msg) => println(msg)
case Right(v) => println(v)
}
/**
* Convenience method to use when developing a parser.
* `parser` is applied to the input `str` and the available completions are displayed on separate lines.
* If parsing fails, the error message is displayed.
*/
def sampleCompletions(str: String, parser: Parser[_], level: Int = 1): Unit =
Parser.completions(parser, str, level).get foreach println
// intended to be temporary pending proper error feedback
def result[T](p: Parser[T], s: String): Either[() => (Seq[String], Int), T] =
{
def loop(i: Int, a: Parser[T]): Either[() => (Seq[String], Int), T] =
a match {
case Invalid(f) => Left(() => (f.errors, i))
case _ =>
val ci = i + 1
if (ci >= s.length)
a.resultEmpty.toEither.left.map { msgs0 =>
() =>
val msgs = msgs0()
val nonEmpty = if (msgs.isEmpty) "Unexpected end of input" :: Nil else msgs
(nonEmpty, ci)
}
else
loop(ci, a derive s(ci))
}
loop(-1, p)
}
/** Applies parser `p` to input `s`. */
def apply[T](p: Parser[T])(s: String): Parser[T] =
(p /: s)(derive1)
/** Applies parser `p` to a single character of input. */
def derive1[T](p: Parser[T], c: Char): Parser[T] =
if (p.valid) p.derive(c) else p
/**
* Applies parser `p` to input `s` and returns the completions at verbosity `level`.
* The interpretation of `level` is up to parser definitions, but 0 is the default by convention,
* with increasing positive numbers corresponding to increasing verbosity. Typically no more than
* a few levels are defined.
*/
def completions(p: Parser[_], s: String, level: Int): Completions =
// The x Completions.empty removes any trailing token completions where append.isEmpty
apply(p)(s).completions(level) x Completions.empty
def examples[A](a: Parser[A], completions: Set[String], check: Boolean = false): Parser[A] =
examples(a, new FixedSetExamples(completions), completions.size, check)
/**
* @param a the parser to decorate with a source of examples. All validation and parsing is delegated to this parser,
* only [[Parser.completions]] is modified.
* @param completions the source of examples when displaying completions to the user.
* @param maxNumberOfExamples limits the number of examples that the source of examples should return. This can
* prevent lengthy pauses and avoids bad interactive user experience.
* @param removeInvalidExamples indicates whether completion examples should be checked for validity (against the given parser). An
* exception is thrown if the example source contains no valid completion suggestions.
* @tparam A the type of values that are returned by the parser.
* @return
*/
def examples[A](a: Parser[A], completions: ExampleSource, maxNumberOfExamples: Int, removeInvalidExamples: Boolean): Parser[A] =
if (a.valid) {
a.result match {
case Some(av) => success(av)
case None =>
new ParserWithExamples(a, completions, maxNumberOfExamples, removeInvalidExamples)
}
} else a
def matched(t: Parser[_], seen: Vector[Char] = Vector.empty, partial: Boolean = false): Parser[String] =
t match {
case i: Invalid => if (partial && !seen.isEmpty) success(seen.mkString) else i
case _ =>
if (t.result.isEmpty)
new MatchedString(t, seen, partial)
else
success(seen.mkString)
}
/**
* Establishes delegate parser `t` as a single token of tab completion.
* When tab completion of part of this token is requested, the completions provided by the delegate `t` or a later derivative are appended to
* the prefix String already seen by this parser.
*/
def token[T](t: Parser[T]): Parser[T] = token(t, TokenCompletions.default)
/**
* Establishes delegate parser `t` as a single token of tab completion.
* When tab completion of part of this token is requested, no completions are returned if `hide` returns true for the current tab completion level.
* Otherwise, the completions provided by the delegate `t` or a later derivative are appended to the prefix String already seen by this parser.
*/
def token[T](t: Parser[T], hide: Int => Boolean): Parser[T] = token(t, TokenCompletions.default.hideWhen(hide))
/**
* Establishes delegate parser `t` as a single token of tab completion.
* When tab completion of part of this token is requested, `description` is displayed for suggestions and no completions are ever performed.
*/
def token[T](t: Parser[T], description: String): Parser[T] = token(t, TokenCompletions.displayOnly(description))
/**
* Establishes delegate parser `t` as a single token of tab completion.
* When tab completion of part of this token is requested, `display` is used as the printed suggestion, but the completions from the delegate
* parser `t` are used to complete if unambiguous.
*/
def tokenDisplay[T](t: Parser[T], display: String): Parser[T] =
token(t, TokenCompletions.overrideDisplay(display))
def token[T](t: Parser[T], complete: TokenCompletions): Parser[T] =
mkToken(t, "", complete)
@deprecated("Use a different `token` overload.", "0.12.1")
def token[T](t: Parser[T], seen: String, track: Boolean, hide: Int => Boolean): Parser[T] =
{
val base = if (track) TokenCompletions.default else TokenCompletions.displayOnly(seen)
token(t, base.hideWhen(hide))
}
private[sbt] def mkToken[T](t: Parser[T], seen: String, complete: TokenCompletions): Parser[T] =
if (t.valid && !t.isTokenStart)
if (t.result.isEmpty) new TokenStart(t, seen, complete) else t
else
t
def homParser[A](a: Parser[A], b: Parser[A]): Parser[A] = (a, b) match {
case (Invalid(af), Invalid(bf)) => Invalid(af ++ bf)
case (Invalid(_), bv) => bv
case (av, Invalid(_)) => av
case (av, bv) => new HomParser(a, b)
}
@deprecated("Explicitly specify the failure message.", "0.12.2")
def not(p: Parser[_]): Parser[Unit] = not(p, "Excluded.")
def not(p: Parser[_], failMessage: String): Parser[Unit] = p.result match {
case None => new Not(p, failMessage)
case Some(_) => failure(failMessage)
}
def oneOf[T](p: Seq[Parser[T]]): Parser[T] = p.reduceLeft(_ | _)
def seq[T](p: Seq[Parser[T]]): Parser[Seq[T]] = seq0(p, Nil)
def seq0[T](p: Seq[Parser[T]], errors: => Seq[String]): Parser[Seq[T]] =
{
val (newErrors, valid) = separate(p) { case Invalid(f) => Left(f.errors); case ok => Right(ok) }
def combinedErrors = errors ++ newErrors.flatten
if (valid.isEmpty) invalid(combinedErrors) else new ParserSeq(valid, combinedErrors)
}
def stringLiteral(s: String, start: Int): Parser[String] =
{
val len = s.length
if (len == 0) sys.error("String literal cannot be empty") else if (start >= len) success(s) else new StringLiteral(s, start)
}
}
sealed trait ValidParser[T] extends Parser[T] {
final def valid = true
final def failure = None
final def ifValid[S](p: => Parser[S]): Parser[S] = p
}
private final case class Invalid(fail: Failure) extends Parser[Nothing] {
def failure = Some(fail)
def result = None
def resultEmpty = fail
def derive(c: Char) = sys.error("Invalid.")
def completions(level: Int) = Completions.nil
override def toString = fail.errors.mkString("; ")
def valid = false
def ifValid[S](p: => Parser[S]): Parser[S] = this
}
private final case class SoftInvalid(fail: Failure) extends ValidParser[Nothing] {
def result = None
def resultEmpty = fail
def derive(c: Char) = Invalid(fail)
def completions(level: Int) = Completions.nil
override def toString = fail.errors.mkString("; ")
}
private final class TrapAndFail[A](a: Parser[A]) extends ValidParser[A] {
def result = try { a.result } catch { case e: Exception => None }
def resultEmpty = try { a.resultEmpty } catch { case e: Exception => fail(e) }
def derive(c: Char) = try { trapAndFail(a derive c) } catch { case e: Exception => Invalid(fail(e)) }
def completions(level: Int) = try { a.completions(level) } catch { case e: Exception => Completions.nil }
override def toString = "trap(" + a + ")"
override def isTokenStart = a.isTokenStart
private[this] def fail(e: Exception): Failure = mkFailure(e.toString)
}
private final class OnFailure[A](a: Parser[A], message: String) extends ValidParser[A] {
def result = a.result
def resultEmpty = a.resultEmpty match { case f: Failure => mkFailure(message); case v: Value[A] => v }
def derive(c: Char) = onFailure(a derive c, message)
def completions(level: Int) = a.completions(level)
override def toString = "(" + a + " !!! \\"" + message + "\\" )"
override def isTokenStart = a.isTokenStart
}
private final class SeqParser[A, B](a: Parser[A], b: Parser[B]) extends ValidParser[(A, B)] {
lazy val result = tuple(a.result, b.result)
lazy val resultEmpty = a.resultEmpty seq b.resultEmpty
def derive(c: Char) =
{
val common = a.derive(c) ~ b
a.resultEmpty match {
case Value(av) => common | b.derive(c).map(br => (av, br))
case _: Failure => common
}
}
def completions(level: Int) = a.completions(level) x b.completions(level)
override def toString = "(" + a + " ~ " + b + ")"
}
private final class HomParser[A](a: Parser[A], b: Parser[A]) extends ValidParser[A] {
lazy val result = tuple(a.result, b.result) map (_._1)
def derive(c: Char) = (a derive c) | (b derive c)
lazy val resultEmpty = a.resultEmpty or b.resultEmpty
def completions(level: Int) = a.completions(level) ++ b.completions(level)
override def toString = "(" + a + " | " + b + ")"
}
private final class HetParser[A, B](a: Parser[A], b: Parser[B]) extends ValidParser[Either[A, B]] {
lazy val result = tuple(a.result, b.result) map { case (a, b) => Left(a) }
def derive(c: Char) = (a derive c) || (b derive c)
lazy val resultEmpty = a.resultEmpty either b.resultEmpty
def completions(level: Int) = a.completions(level) ++ b.completions(level)
override def toString = "(" + a + " || " + b + ")"
}
private final class ParserSeq[T](a: Seq[Parser[T]], errors: => Seq[String]) extends ValidParser[Seq[T]] {
assert(!a.isEmpty)
lazy val resultEmpty: Result[Seq[T]] =
{
val res = a.map(_.resultEmpty)
val (failures, values) = separate(res)(_.toEither)
// if(failures.isEmpty) Value(values) else mkFailures(failures.flatMap(_()) ++ errors)
if (values.nonEmpty) Value(values) else mkFailures(failures.flatMap(_()) ++ errors)
}
def result = {
val success = a.flatMap(_.result)
if (success.length == a.length) Some(success) else None
}
def completions(level: Int) = a.map(_.completions(level)).reduceLeft(_ ++ _)
def derive(c: Char) = seq0(a.map(_ derive c), errors)
override def toString = "seq(" + a + ")"
}
private final class BindParser[A, B](a: Parser[A], f: A => Parser[B]) extends ValidParser[B] {
lazy val result = a.result flatMap { av => f(av).result }
lazy val resultEmpty = a.resultEmpty flatMap { av => f(av).resultEmpty }
def completions(level: Int) =
a.completions(level) flatMap { c =>
apply(a)(c.append).resultEmpty match {
case _: Failure => Completions.strict(Set.empty + c)
case Value(av) => c x f(av).completions(level)
}
}
def derive(c: Char) =
{
val common = a derive c flatMap f
a.resultEmpty match {
case Value(av) => common | derive1(f(av), c)
case _: Failure => common
}
}
override def isTokenStart = a.isTokenStart
override def toString = "bind(" + a + ")"
}
private final class MapParser[A, B](a: Parser[A], f: A => B) extends ValidParser[B] {
lazy val result = a.result map f
lazy val resultEmpty = a.resultEmpty map f
def derive(c: Char) = (a derive c) map f
def completions(level: Int) = a.completions(level)
override def isTokenStart = a.isTokenStart
override def toString = "map(" + a + ")"
}
private final class Filter[T](p: Parser[T], f: T => Boolean, seen: String, msg: String => String) extends ValidParser[T] {
def filterResult(r: Result[T]) = r.filter(f, msg(seen))
lazy val result = p.result filter f
lazy val resultEmpty = filterResult(p.resultEmpty)
def derive(c: Char) = filterParser(p derive c, f, seen + c, msg)
def completions(level: Int) = p.completions(level) filterS { s => filterResult(apply(p)(s).resultEmpty).isValid }
override def toString = "filter(" + p + ")"
override def isTokenStart = p.isTokenStart
}
private final class MatchedString(delegate: Parser[_], seenV: Vector[Char], partial: Boolean) extends ValidParser[String] {
lazy val seen = seenV.mkString
def derive(c: Char) = matched(delegate derive c, seenV :+ c, partial)
def completions(level: Int) = delegate.completions(level)
def result = if (delegate.result.isDefined) Some(seen) else None
def resultEmpty = delegate.resultEmpty match { case f: Failure if !partial => f; case _ => Value(seen) }
override def isTokenStart = delegate.isTokenStart
override def toString = "matched(" + partial + ", " + seen + ", " + delegate + ")"
}
private final class TokenStart[T](delegate: Parser[T], seen: String, complete: TokenCompletions) extends ValidParser[T] {
def derive(c: Char) = mkToken(delegate derive c, seen + c, complete)
def completions(level: Int) = complete match {
case dc: TokenCompletions.Delegating => dc.completions(seen, level, delegate.completions(level))
case fc: TokenCompletions.Fixed => fc.completions(seen, level)
}
def result = delegate.result
def resultEmpty = delegate.resultEmpty
override def isTokenStart = true
override def toString = "token('" + complete + ", " + delegate + ")"
}
private final class And[T](a: Parser[T], b: Parser[_]) extends ValidParser[T] {
lazy val result = tuple(a.result, b.result) map { _._1 }
def derive(c: Char) = (a derive c) & (b derive c)
def completions(level: Int) = a.completions(level).filterS(s => apply(b)(s).resultEmpty.isValid)
lazy val resultEmpty = a.resultEmpty && b.resultEmpty
override def toString = "(%s) && (%s)".format(a, b)
}
private final class Not(delegate: Parser[_], failMessage: String) extends ValidParser[Unit] {
def derive(c: Char) = if (delegate.valid) not(delegate derive c, failMessage) else this
def completions(level: Int) = Completions.empty
def result = None
lazy val resultEmpty = delegate.resultEmpty match {
case f: Failure => Value(())
case v: Value[_] => mkFailure(failMessage)
}
override def toString = " -(%s)".format(delegate)
}
/**
* This class wraps an existing parser (the delegate), and replaces the delegate's completions with examples from
* the given example source.
*
* This class asks the example source for a limited amount of examples (to prevent lengthy and expensive
* computations and large amounts of allocated data). It then passes these examples on to the UI.
*
* @param delegate the parser to decorate with completion examples (i.e., completion of user input).
* @param exampleSource the source from which this class will take examples (potentially filter them with the delegate
* parser), and pass them to the UI.
* @param maxNumberOfExamples the maximum number of completions to read from the example source and pass to the UI. This
* limit prevents lengthy example generation and allocation of large amounts of memory.
* @param removeInvalidExamples indicates whether to remove examples that are deemed invalid by the delegate parser.
* @tparam T the type of value produced by the parser.
*/
private final class ParserWithExamples[T](delegate: Parser[T], exampleSource: ExampleSource, maxNumberOfExamples: Int, removeInvalidExamples: Boolean) extends ValidParser[T] {
def derive(c: Char) =
examples(delegate derive c, exampleSource.withAddedPrefix(c.toString), maxNumberOfExamples, removeInvalidExamples)
def result = delegate.result
lazy val resultEmpty = delegate.resultEmpty
def completions(level: Int) = {
if (exampleSource().isEmpty)
if (resultEmpty.isValid) Completions.nil else Completions.empty
else {
val examplesBasedOnTheResult = filteredExamples.take(maxNumberOfExamples).toSet
Completions(examplesBasedOnTheResult.map(ex => Completion.suggestion(ex)))
}
}
override def toString = "examples(" + delegate + ", " + exampleSource().take(2).toList + ")"
private def filteredExamples: Iterable[String] = {
if (removeInvalidExamples)
exampleSource().filter(isExampleValid)
else
exampleSource()
}
private def isExampleValid(example: String): Boolean = {
apply(delegate)(example).resultEmpty.isValid
}
}
private final class StringLiteral(str: String, start: Int) extends ValidParser[String] {
assert(0 <= start && start < str.length)
def failMsg = "Expected '" + str + "'"
def resultEmpty = mkFailure(failMsg)
def result = None
def derive(c: Char) = if (str.charAt(start) == c) stringLiteral(str, start + 1) else new Invalid(resultEmpty)
def completions(level: Int) = Completions.single(Completion.suggestion(str.substring(start)))
override def toString = '"' + str + '"'
}
private final class CharacterClass(f: Char => Boolean, label: String) extends ValidParser[Char] {
def result = None
def resultEmpty = mkFailure("Expected " + label)
def derive(c: Char) = if (f(c)) success(c) else Invalid(resultEmpty)
def completions(level: Int) = Completions.empty
override def toString = "class(" + label + ")"
}
private final class Optional[T](delegate: Parser[T]) extends ValidParser[Option[T]] {
def result = delegate.result map some.fn
def resultEmpty = Value(None)
def derive(c: Char) = (delegate derive c).map(some.fn)
def completions(level: Int) = Completion.empty +: delegate.completions(level)
override def toString = delegate.toString + "?"
}
private final class Repeat[T](partial: Option[Parser[T]], repeated: Parser[T], min: Int, max: UpperBound, accumulatedReverse: List[T]) extends ValidParser[Seq[T]] {
assume(0 <= min, "Minimum occurences must be non-negative")
assume(max >= min, "Minimum occurences must be less than the maximum occurences")
def derive(c: Char) =
partial match {
case Some(part) =>
val partD = repeat(Some(part derive c), repeated, min, max, accumulatedReverse)
part.resultEmpty match {
case Value(pv) => partD | repeatDerive(c, pv :: accumulatedReverse)
case _: Failure => partD
}
case None => repeatDerive(c, accumulatedReverse)
}
def repeatDerive(c: Char, accRev: List[T]): Parser[Seq[T]] = repeat(Some(repeated derive c), repeated, (min - 1) max 0, max.decrement, accRev)
def completions(level: Int) =
{
def pow(comp: Completions, exp: Completions, n: Int): Completions =
if (n == 1) comp else pow(comp x exp, exp, n - 1)
val repC = repeated.completions(level)
val fin = if (min == 0) Completion.empty +: repC else pow(repC, repC, min)
partial match {
case Some(p) => p.completions(level) x fin
case None => fin
}
}
def result = None
lazy val resultEmpty: Result[Seq[T]] =
{
val partialAccumulatedOption =
partial match {
case None => Value(accumulatedReverse)
case Some(partialPattern) => partialPattern.resultEmpty.map(_ :: accumulatedReverse)
}
(partialAccumulatedOption app repeatedParseEmpty)(_ reverse_::: _)
}
private def repeatedParseEmpty: Result[List[T]] =
{
if (min == 0)
Value(Nil)
else
// forced determinism
for (value <- repeated.resultEmpty) yield makeList(min, value)
}
override def toString = "repeat(" + min + "," + max + "," + partial + "," + repeated + ")"
} | jaceklaskowski/sbt | util/complete/src/main/scala/sbt/complete/Parser.scala | Scala | bsd-3-clause | 38,246 |
import com.google.inject.AbstractModule
import db.dao._
import services._
import play.api.{Configuration, Environment}
class Module(environment: Environment, configuration: Configuration) extends AbstractModule {
override def configure(): Unit = {
// DAOs
bind(classOf[MapDao]).to(classOf[PostgresMapDao])
bind(classOf[UsersDao]).to(classOf[PostgresUsersDao])
bind(classOf[SegmentsDao]).to(classOf[PostgresSegmentsDao])
bind(classOf[MiniSegmentsDao]).to(classOf[PostgresMiniSegmentsDao])
bind(classOf[SegmentRatingsDao]).to(classOf[PostgresSegmentRatingsDao])
bind(classOf[BetaUserWhitelistDao]).to(classOf[PostgresBetaUserWhitelistDao])
// Services
bind(classOf[RoutingService]).to(classOf[RoutingServiceImpl])
bind(classOf[SegmentService]).to(classOf[SegmentServiceImpl])
}
}
| trifectalabs/roadquality | api/app/Module.scala | Scala | bsd-3-clause | 824 |
/*
* Copyright 2014 Aurel Paulovic ([email protected]) (aurelpaulovic.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.aurelpaulovic.crdt.util
import com.aurelpaulovic.crdt.replica.Replica
case class TotalTimeClock[R <: Replica] private (val time: Long, val replica: R with Ordered[R], val ticks: Int = 0) extends TotalOrdering[TotalTimeClock[R]] {
def compare(other: TotalTimeClock[R]): Int = other match {
case TotalTimeClock(otherTime, _, _) if otherTime != time => time.compare(otherTime)
case TotalTimeClock(_, otherReplica, _) if otherReplica != replica => replica.compare(otherReplica)
case TotalTimeClock(_, _, otherTicks) => ticks.compare(otherTicks)
}
def copyForReplica[R2 <: Replica](newReplica: R2 with Ordered[R2]): TotalTimeClock[R2] = TotalTimeClock(time, newReplica, ticks)
def makeGreaterThan(other: TotalTimeClock[R]): TotalTimeClock[R] = TotalTimeClock.makeGreaterThan(replica, other)
}
object TotalTimeClock {
private var highestSeenTime: Long = System.currentTimeMillis()
private var currentTick: Int = 0
def apply[R <: Replica](replica: R with Ordered[R]): TotalTimeClock[R] = this.synchronized {
val currentMillis = System.currentTimeMillis()
if (currentMillis <= highestSeenTime) currentTick += 1
else {
highestSeenTime = currentMillis
currentTick = 0
}
new TotalTimeClock(highestSeenTime, replica, currentTick)
}
def makeGreaterThan[R <: Replica](replica: R with Ordered[R], other: TotalTimeClock[R]): TotalTimeClock[R] = this.synchronized {
if (other.time > highestSeenTime) highestSeenTime = other.time
TotalTimeClock(replica)
}
} | AurelPaulovic/crdt | src/main/scala/com/aurelpaulovic/crdt/util/TotalTimeClock.scala | Scala | apache-2.0 | 2,196 |
package com.gu.notifications.worker.delivery.fcm.oktransport
import java.io.ByteArrayInputStream
import java.util.concurrent.ConcurrentLinkedQueue
import com.google.api.client.http.{LowLevelHttpRequest, LowLevelHttpResponse}
import okhttp3.{Headers, MediaType, OkHttpClient, Request, RequestBody, Response, ResponseBody}
import okio.BufferedSink
import scala.annotation.tailrec
import scala.collection.mutable.ArrayBuffer
class OkGoogleHttpRequest(okHttpClient: OkHttpClient, url: String, method: String) extends LowLevelHttpRequest {
//No idea whether FCM now or in the future will call addHeaders concurrently.
private val headers = new ConcurrentLinkedQueue[(String, String)]()
override def addHeader(name: String, value: String): Unit = headers.offer((name, value))
override def execute(): LowLevelHttpResponse = {
val response: Response = executeRequest()
val (maybeContentType: Option[String], maybeBodyBytes: Option[Array[Byte]]) = callOnceWithResponseBody(response)(responseBody =>
(Option(responseBody.contentType()).map(_.toString), Option(responseBody.bytes()))
).getOrElse(None, None)
val maybeContent: Option[ByteArrayInputStream] = maybeBodyBytes.map(new ByteArrayInputStream(_))
val maybeContentLength: Option[Long] = maybeBodyBytes.map(_.length.toLong)
val headerList: scala.List[(String, String)] = readHeaders(response)
val maybeContentEncoding = Option(response.header("content-encoding"))
val httpProtocolInUpperCase = response.protocol().toString.toUpperCase
val statusCode = response.code
val statusReason = Option(response.message()).map(_.trim).filterNot(_.isEmpty)
new OkGoogleHttpResponse(
maybeContent,
maybeContentEncoding,
maybeContentLength,
maybeContentType,
httpProtocolInUpperCase,
statusCode,
statusReason,
headerList
)
}
private def readHeaders(response: Response): List[(String, String)] = {
val headerTuples = ArrayBuffer[(String, String)]()
val okHeaders: Headers = response.headers()
val headerCount = okHeaders.size()
for (headerIndex <- 0 until headerCount) {
headerTuples += ((okHeaders.name(headerIndex), okHeaders.value(headerIndex)))
}
headerTuples.toList
}
private def callOnceWithResponseBody[A](response: Response)(f: ResponseBody => A): Option[A] = {
val body = Option(response.body())
try {
body.map(f)
}
finally {
body.foreach(_.close())
}
}
private def executeRequest(): Response = okHttpClient.newCall(addHeadersToRequest(new Request.Builder())
.url(url)
.method(method, Option(getStreamingContent).map(content => {
new RequestBody {
override def contentType(): MediaType = MediaType.parse(getContentType)
override def writeTo(sink: BufferedSink): Unit = {
content.writeTo(sink.outputStream())
}
}
}).orNull)
.build())
.execute()
private def addHeadersToRequest(requestBuilder: Request.Builder): Request.Builder = {
Option(this.getContentLength).filter(_ >= 0).foreach(contentLength => addHeader("Content-Length", contentLength.toString))
Option(this.getContentEncoding).foreach(contentEncoding => addHeader("Content-Encoding", contentEncoding))
Option(this.getContentType).foreach(contentType => addHeader("Content-Type", contentType))
@tailrec
def pollThroughHeaders(): Unit =
Option(headers.poll()) match {
case Some((name, value)) => {
requestBuilder.addHeader(name, value)
pollThroughHeaders()
}
case _ => ()
}
pollThroughHeaders()
requestBuilder
}
}
| guardian/mobile-n10n | notificationworkerlambda/src/main/scala/com/gu/notifications/worker/delivery/fcm/oktransport/OkGoogleHttpRequest.scala | Scala | apache-2.0 | 3,666 |
import scala.collection.mutable.ArrayBuffer
// ****************************************************************************
class FastqIterator( file_name: String ) extends InputFile( file_name ) {
var fastq: FastqSequence = null
// **************************************************************************
def fastqs(): ArrayBuffer[FastqSequence] = {
val seqs: ArrayBuffer[FastqSequence] = ArrayBuffer()
// Read in all of the sequences
while ( end_of_file == false ) {
next
seqs += fastq
} // while
seqs
} // fastqs
// **************************************************************************
def micro_rnas( species: String ): ArrayBuffer[FastqSequence] = {
val seqs: ArrayBuffer[FastqSequence] = ArrayBuffer()
// Read in all of the sequences
while ( end_of_file == false ) {
next
if ( fastq.description contains species ) {
println( "Human: " + fastq.name + " " + fastq.description )
fastq.sequence = fastq.sequence.replace( 'U', 'T' )
seqs += fastq
} // if
} // while
seqs
} // fastqs
// **************************************************************************
def next(): FastqSequence = {
fastq = new FastqSequence()
fastq.parseHeader( line, '@' )
readEntry()
} // method next
// **************************************************************************
def readEntry(): FastqSequence = {
var seq: String = ""
var qual: String = ""
next_line()
while ( ( end_of_file == false ) && ( line.charAt( 0 ) != '+' ) )
{
if ( line.charAt( 0 ) != '+' )
{
seq += line
next_line()
} // if
} // while
fastq.sequence = seq
// Read in the quality letters.
next_line()
while ( ( end_of_file == false ) && ( qual.length < seq.length ) )
{
qual += line
next_line()
} // while
fastq.quality = qual
if ( qual.length != seq.length )
println( "*Warning* sequence length != quality length:\\n" + seq + "\\n" + qual + "\\n" )
//println("seq:"+fastq.sequence)
//println("qual:"+fastq.quality)
fastq
} // method readEntry
// **************************************************************************
} // class FastqIterator
| annashcherbina/sherlockstoolkit | scala_snp_caller/src/FastqIterator.scala | Scala | gpl-3.0 | 2,295 |
/**
* Copyright 2016, deepsense.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.deepsense.deeplang.doperations.examples
import io.deepsense.sparkutils.Linalg.Vectors
import io.deepsense.deeplang.doperables.GetFromVectorTransformer
import io.deepsense.deeplang.doperables.dataframe.DataFrame
import io.deepsense.deeplang.doperations.GetFromVector
class GetFromVectorExample extends AbstractOperationExample[GetFromVector] {
override def dOperation: GetFromVector = {
val op = new GetFromVector()
val transformer = new GetFromVectorTransformer()
op.transformer.setIndex(1)
op.transformer.setSingleColumn("features", "second_feature")
op.set(op.transformer.extractParamMap())
}
override def inputDataFrames: Seq[DataFrame] = {
val data = Seq(
Vectors.sparse(3, Seq((0, -2.0), (1, 2.3))),
Vectors.dense(0.01, 0.2, 3.0),
null,
Vectors.sparse(3, Seq((1, 0.91), (2, 3.2))),
Vectors.sparse(3, Seq((0, 5.7), (2, 2.7))),
Vectors.sparse(3, Seq()).toDense).map(Tuple1(_))
Seq(DataFrame.fromSparkDataFrame(sparkSQLSession.createDataFrame(data).toDF("features")))
}
}
| deepsense-io/seahorse-workflow-executor | deeplang/src/it/scala/io/deepsense/deeplang/doperations/examples/GetFromVectorExample.scala | Scala | apache-2.0 | 1,660 |
package scorex.integration
import java.nio.ByteBuffer
import scorex.Controller
import Controller._
import org.scalatest.FunSuite
import scorex.block.Block
import scorex.consensus.{ConsensusModuleNxt, ConsensusModuleQora}
import scorex.network.message.{Message, BlockMessage}
import scorex.settings.{Constants, Settings}
class ValidChainGenerationSpecification extends FunSuite {
test("retroactive chain test") {
Settings.filename = "settings-test.json"
Controller.init()
wallet.generateNewAccounts(10)
require(wallet.privateKeyAccounts().nonEmpty)
Thread.sleep(15000)
val bh = Controller.blockchainStorage.height()
//chain validity check
(2 to bh).foreach { h =>
assert(Controller.blockchainStorage.blockAt(h).get.isValid())
assert(Controller.blockchainStorage.blockAt(h).get.signatureValid)
}
val b1 = Controller.blockchainStorage.blockAt(1).get
val b2 = Controller.blockchainStorage.blockAt(2).get
//empty block size check
if (Constants.ConsensusAlgo == ConsensusModuleQora) {
assert(b2.bytes.size == 309)
} else if (Constants.ConsensusAlgo == ConsensusModuleNxt) {
assert(b2.bytes.size == 213)
}
//toBytes/parse roundtrip test
val bb2 = Block.parse(b2.bytes).get
assert(bb2.timestamp == b2.timestamp)
assert(bb2.generator == b2.generator)
assert(b1.timestamp != b2.timestamp)
assert(b1 != b2)
//serialization/deserialization thru BlockMessage roundtrip test
val bytes = BlockMessage(2, b2).bytes
if (Constants.ConsensusAlgo == ConsensusModuleQora) {
assert(bytes.length == 326)
} else if (Constants.ConsensusAlgo == ConsensusModuleNxt) {
assert(bytes.length == 230)
}
val restored = Message.parse(ByteBuffer.wrap(bytes)).get.asInstanceOf[BlockMessage].block
assert(restored.timestamp == b2.timestamp)
assert(restored.isValid())
Controller.stopAll()
}
} | Pole-he/Scorex-Lagonaki | src/test/scala/scorex/integration/ValidChainGenerationSpecification.scala | Scala | cc0-1.0 | 1,927 |
/*
* Copyright 2014–2018 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.api
import slamdata.Predef._
import quasar.fp.ski._
import quasar.contrib.pathy.sandboxCurrent
import java.util.{zip => jzip}
import pathy.Path._
import scalaz._, Scalaz._
import scalaz.concurrent.Task
import scalaz.stream._
import scodec.bits.{ByteVector}
import scodec.interop.scalaz._
object Zip {
// First construct a single Process of Ops which can be performed in
// sequence to produce the entire archive.
private sealed abstract class Op extends Product with Serializable
private object Op {
final case object Start extends Op
final case class StartEntry(entry: jzip.ZipEntry) extends Op
final case class Chunk(bytes: ByteVector) extends Op
final case object EndEntry extends Op
final case object End extends Op
}
// Wrap up ZipOutputStream's statefulness in a class offering just two
// mutating operations: one to accept an Op to be processed, and another
// to poll for data that's been written.
private class Buffer[F[_]: Monad] {
// Assumes that the var is private to Buffer and exposed methods are private to
// method zipFiles. Further assumes that usage by Process is without contention.
@SuppressWarnings(Array("org.wartremover.warts.Var"))
private[this] var chunks = ByteVector.empty
private def append(bytes: ByteVector) = chunks = chunks ++ bytes
private val sink = {
val os = new java.io.OutputStream {
@SuppressWarnings(Array("org.wartremover.warts.Overloading"))
def write(b: Int) = append(ByteVector(b.toByte))
// NB: overriding here to process each buffer-worth coming from the ZipOS in one call
@SuppressWarnings(Array("org.wartremover.warts.Overloading"))
override def write(b: Array[Byte], off: Int, len: Int) = append(ByteVector(b, off, len))
}
new jzip.ZipOutputStream(os)
}
def accept(op: Op): F[Unit] = (op match {
case Op.Start => ()
case Op.StartEntry(entry) => sink.putNextEntry(entry)
case Op.Chunk(bytes) => sink.write(bytes.toArray)
case Op.EndEntry => sink.closeEntry
case Op.End => sink.close
}).point[F]
def poll: F[ByteVector] = {
val result = chunks
chunks = ByteVector.empty
result
}.point[F]
}
def zipFiles[F[_]: Monad](files: Map[RelFile[Sandboxed], Process[F, ByteVector]]): Process[F, ByteVector] = {
val ops: Process[F, Op] = {
def fileOps(file: RelFile[Sandboxed], bytes: Process[F, ByteVector]) = {
Process.emit(Op.StartEntry(new jzip.ZipEntry(posixCodec.printPath(file).drop(2)))) ++ // do not include the "./" of a path when zipping
bytes.map(Op.Chunk(_)) ++
Process.emit(Op.EndEntry)
}
Process.emit(Op.Start) ++
Process.emitAll(files.toList).flatMap((fileOps _).tupled) ++
Process.emit(Op.End)
}
// Fold the allocation of Buffer instances in to the processing
// of Ops, so that a new instance is created as needed each time
// the resulting process is run, then flatMap so that each chunk
// can be handled in Task.
ops.zipWithState[Option[Buffer[F]]](None) {
case (_, None) => Some(new Buffer)
case (Op.End, Some(_)) => None
case (_, buf@Some(_)) => buf
}.flatMap {
case (Op.Start, _) => Process.emit(ByteVector.empty)
case (op, Some(buf)) =>
Process.await(for {
_ <- buf.accept(op)
b <- buf.poll
} yield b) { bytes =>
if (bytes.size ≟ 0) Process.halt
else Process.emit(bytes)
}
case (_, None) => Process.fail(new RuntimeException("unexpected state"))
}
}
def unzipFiles(zippedBytes: Process[Task, ByteVector]): EitherT[Task, String, Map[RelFile[Sandboxed], ByteVector]] = {
def entry(zis: jzip.ZipInputStream): OptionT[Task, (String, ByteVector)] =
for {
entry <- OptionT(Task.delay(Option(zis.getNextEntry())))
name = entry.getName
bytes <- contents(zis).liftM[OptionT]
_ <- Task.delay(zis.closeEntry()).liftM[OptionT]
} yield (name, bytes)
def contents(zis: jzip.ZipInputStream): Task[ByteVector] = {
def read(bufSize: Int)(is: jzip.ZipInputStream): OptionT[Task, ByteVector] =
OptionT(Task.delay {
val buf = new Array[Byte](bufSize)
val n = is.read(buf, 0, bufSize)
(n >= 0) option ByteVector.view(buf).take(n.toLong)
})
Process.unfoldEval(zis)(z => read(4*1024)(z).strengthR(z).run).runFoldMap(ι)
}
def entries(zis: jzip.ZipInputStream): Process[Task, (String, ByteVector)] =
Process.unfoldEval(zis)(z => entry(z).strengthR(z).run)
def toPath(pathString: String): Task[RelFile[Sandboxed]] =
posixCodec.parseRelFile(pathString).flatMap(sandboxCurrent).cata(
p => Task.now(p),
Task.fail(new RuntimeException(s"relative file path expected; found: $pathString")))
val is = io.toInputStream(zippedBytes)
EitherT((for {
zis <- Task.delay(new jzip.ZipInputStream(is))
es <- entries(zis).runLog
rez <- es.traverse { case (n: String, bs) => toPath(n).strengthR(bs) }
} yield rez.toMap).attempt.map(_.leftMap {
case x: jzip.ZipException => s"zip decoding error: $x"
case x => s"$x"
}))
}
}
| jedesah/Quasar | web/src/main/scala/quasar/api/zip.scala | Scala | apache-2.0 | 6,022 |
/***********************************************************************
* Copyright (c) 2013-2020 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.accumulo.filter
import com.typesafe.scalalogging.LazyLogging
import org.geotools.filter.text.ecql.ECQL
import org.geotools.geometry.jts.ReferencedEnvelope
import org.geotools.referencing.crs.DefaultGeographicCRS
import org.junit.runner.RunWith
import org.locationtech.geomesa.filter.visitor.BoundsFilterVisitor
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
@RunWith(classOf[JUnitRunner])
class BoundsFilterVisitorTest extends Specification with LazyLogging {
"BoundsFilterVisitor" should {
"work for during" >> {
val filter = ECQL.toFilter("(BBOX(geom,0,0,1,1)) AND (dtg DURING 2016-07-01T20:00:00.000Z/2016-07-01T21:00:00.000Z)")
BoundsFilterVisitor.visit(filter) mustEqual new ReferencedEnvelope(0.0, 1.0, 0.0, 1.0, DefaultGeographicCRS.WGS84)
}
}
}
| aheyne/geomesa | geomesa-accumulo/geomesa-accumulo-datastore/src/test/scala/org/locationtech/geomesa/accumulo/filter/BoundsFilterVisitorTest.scala | Scala | apache-2.0 | 1,312 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
* Contributors:
* Hao Jiang - initial API and implementation
*/
package edu.uchicago.cs.encsel.dataset.feature
import edu.uchicago.cs.encsel.dataset.column.Column
import scala.io.Source
/**
* This captures features similar to what is captured in <code>Sortness</code>.
* But instead of considering all pairs in a window, it only look at adjacent pairs.
*/
object AdjInvertPair extends FeatureExtractor {
def featureType: String = "AdjInvertPair"
def supportFilter: Boolean = true
def extract(input: Column, prefix: String): Iterable[Feature] = {
val source = Source.fromFile(input.colFile)
val comparator = input.dataType.comparator()
var numPair = 0
var numInverted = 0
val fType = featureType(prefix)
try {
var prev: String = null
source.getLines().foreach(line => {
if (prev != null) {
numPair += 1
numInverted += (comparator.compare(prev, line) match {
case gt if gt > 0 => 1
case _ => 0
})
}
prev = line
})
if (numPair != 0) {
val ratio = (numPair - numInverted).toDouble / numPair
val ivpair = 1 - Math.abs(1 - 2 * ratio)
val kendallsTau = 1 - 2 * numInverted.toDouble / numPair
Iterable(
new Feature(fType, "totalpair", numPair),
new Feature(fType, "ivpair", ivpair),
new Feature(fType, "kendallstau", kendallsTau)
)
} else {
Iterable(
new Feature(fType, "totalpair", numPair),
new Feature(fType, "ivpair", 0),
new Feature(fType, "kendallstau", 0)
)
}
} finally {
source.close()
}
}
}
| harperjiang/enc-selector | src/main/scala/edu/uchicago/cs/encsel/dataset/feature/AdjInvertPair.scala | Scala | apache-2.0 | 2,490 |
package com.thoughtworks.deeplearning
package plugins
import com.thoughtworks.deeplearning.scalatest.ThoughtworksFutureToScalaFuture
import com.thoughtworks.each.Monadic._
import com.thoughtworks.feature.{Factory, ImplicitApply}
import com.thoughtworks.raii.asynchronous._
import org.nd4j.linalg.api.ndarray.INDArray
import org.nd4j.linalg.api.ops.impl.transforms.IsMax
import org.nd4j.linalg.convolution.Convolution
import org.nd4j.linalg.factory.Nd4j
import org.nd4j.linalg.ops.transforms.Transforms
import org.nd4j.linalg.util.ArrayUtil
import org.scalatest._
import org.nd4s.Implicits._
import com.thoughtworks.continuation._
import com.thoughtworks.future._
import scalaz.std.iterable._
object CumulativeINDArrayLayersSpec {
trait CNNs extends INDArrayLayers with ImplicitsSingleton with Training with Operators {
trait ImplicitsApi
extends super[INDArrayLayers].ImplicitsApi
with super[Training].ImplicitsApi
with super[Operators].ImplicitsApi
type Implicits <: ImplicitsApi
private def toArray(tuple2: (Int, Int)): Array[Int] = {
val (one, two) = tuple2
Array(one, two)
}
def im2col[Operand0, Out <: INDArrayLayer](operand0: Operand0,
kernel: (Int, Int),
stride: (Int, Int),
padding: (Int, Int))(
implicit deepLearning: DeepLearning.Aux[Operand0, INDArray, INDArray],
layerImplicits: ImplicitApply.Aux[indArrayPartialApplyRawForward.Rest, Out]): Out = {
INDArrayLayer.unary(operand0) { data0: INDArray =>
val shape0 = data0.shape
val strideArray = toArray(stride)
val paddingArray = toArray(padding)
val outputData = Convolution.im2col(data0, toArray(kernel), strideArray, paddingArray)
val delta0 = { outputDelta: INDArray =>
Convolution.col2im(outputDelta, strideArray, paddingArray, shape0(2), shape0(3))
}
(outputData, delta0)
}
}
@inline
def conv2d[Input, Weight, Bias, Out <: INDArrayLayer](input: Input,
weight: Weight,
bias: Bias,
kernel: (Int, Int),
stride: (Int, Int),
padding: (Int, Int))(
implicit inputDeepLearning: DeepLearning.Aux[Input, INDArray, INDArray],
weightDeepLearning: DeepLearning.Aux[Weight, INDArray, INDArray],
biasDeepLearning: DeepLearning.Aux[Bias, INDArray, INDArray],
layerImplicits: ImplicitApply.Aux[indArrayPartialApplyRawForward.Rest, Out]): Out = {
import implicits._
INDArrayLayer(monadic[Do] {
val inputShape = input.forward.each.data.shape
val numberOfImages = inputShape(0)
val depth = inputShape(1)
val height = inputShape(2)
val width = inputShape(3)
val numberOfKernels = weight.forward.each.data.shape.head
val col = im2col(input, kernel, stride, padding)
val permutedCol = col.permute(0, 4, 5, 1, 2, 3)
val depthKernelKernel = depth * kernel._1 * kernel._2
val operandCol2d = permutedCol.reshape(numberOfImages * height * width, depthKernelKernel)
val reshapedWeight = weight.reshape(numberOfKernels, depthKernelKernel)
val permutedWeight = reshapedWeight.permute(1, 0)
val dotResult = operandCol2d dot permutedWeight
val plusResult = dotResult + bias
val reshapeResult = plusResult.reshape(numberOfImages, height, width, numberOfKernels)
reshapeResult.permute(0, 3, 1, 2).forward.each
})
}
@inline
def maxPool[Operand0, Out <: INDArrayLayer](operand0: Operand0, poolSize: (Int, Int))(
implicit deepLearning: DeepLearning.Aux[Operand0, INDArray, INDArray],
layerImplicits: ImplicitApply.Aux[indArrayPartialApplyRawForward.Rest, Out]): Out = {
INDArrayLayer.unary(operand0) { data0: INDArray =>
val shape0 = data0.shape
val kernelAndStrideSize: Array[Int] = toArray(poolSize)
val preMaxPool: INDArray =
Convolution
.im2col(data0, kernelAndStrideSize, kernelAndStrideSize, Array(0, 0))
.permute(0, 1, 4, 5, 2, 3)
val preShape: Seq[Int] = preMaxPool.shape().toSeq
val lastDimensionSize: Int = preShape.takeRight(2).product
val reshapedPreMaxPool: INDArray = preMaxPool
.reshape(preShape.take(preShape.length - 2) :+ lastDimensionSize: _*)
val outputData = reshapedPreMaxPool.max(4)
val delta0 = { outputDelta: INDArray =>
val a = reshapedPreMaxPool
val upStreamDup = a.dup()
val rows = ArrayUtil.prod(a.length())
val isMax: INDArray = Nd4j.getExecutioner
.execAndReturn(new IsMax(upStreamDup, 4))
.reshape(preShape.take(preShape.length - 2) :+ poolSize._2 :+ poolSize._1: _*)
.permute(0, 1, 2, 4, 3, 5)
.reshape('c', rows, 1)
val outputDelta1d = {
outputDelta
.repeat(-1, poolSize._1)
.permute(1, 0, 3, 2)
.repeat(-1, poolSize._2)
.permute(1, 0, 3, 2)
.reshape('c', shape0.product, 1)
}
isMax
.muliColumnVector(outputDelta1d)
.reshape(shape0: _*)
}
(outputData, delta0)
}
}
}
trait FixedLearningRate extends LearningRate {
val fixedLearningRate: scala.Double
trait INDArrayOptimizerApi extends super.INDArrayOptimizerApi { this: INDArrayOptimizer =>
final def learningRate: scala.Double = fixedLearningRate
}
override type INDArrayOptimizer <: INDArrayOptimizerApi with Optimizer
}
trait LearningRate extends INDArrayWeights {
trait INDArrayOptimizerApi extends super.INDArrayOptimizerApi { this: INDArrayOptimizer =>
def learningRate: scala.Double
abstract override def delta: INDArray = super.delta mul learningRate
}
override type INDArrayOptimizer <: INDArrayOptimizerApi with Optimizer
}
trait L1Regularization extends INDArrayWeights {
def l1Regularization: INDArray
trait INDArrayOptimizerApi extends super.INDArrayOptimizerApi { this: INDArrayOptimizer =>
abstract override def delta: INDArray = super.delta + Transforms.sign(weight.data) * l1Regularization
}
override type INDArrayOptimizer <: INDArrayOptimizerApi with Optimizer
}
trait L2Regularization extends INDArrayWeights {
def l2Regularization: INDArray
trait INDArrayOptimizerApi extends super.INDArrayOptimizerApi { this: INDArrayOptimizer =>
abstract override def delta: INDArray = super.delta + weight.data * l2Regularization
}
override type INDArrayOptimizer <: INDArrayOptimizerApi with Optimizer
}
trait Momentum extends INDArrayWeights {
trait INDArrayWeightApi extends super.INDArrayWeightApi { this: INDArrayWeight =>
def mu: scala.Double = 0.9
var v: INDArray = Nd4j.zeros(data.shape: _*)
}
override type INDArrayWeight <: INDArrayWeightApi with Weight
trait INDArrayOptimizerApi extends super.INDArrayOptimizerApi { this: INDArrayOptimizer =>
private lazy val delta0: INDArray = {
import weight._
v = super.delta + v * mu
v
}
abstract override def delta: INDArray = delta0
}
override type INDArrayOptimizer <: INDArrayOptimizerApi with Optimizer
}
trait NesterovMomentum extends Momentum {
trait INDArrayOptimizerApi extends super.INDArrayOptimizerApi { this: INDArrayOptimizer =>
abstract override lazy val delta: INDArray = {
import weight._
val vPrev = v
vPrev * (-mu) + super.delta * (1 + mu)
}
}
override type INDArrayOptimizer <: INDArrayOptimizerApi with Optimizer
}
/**
* @note This [[Adagrad]] hyperparameter is usually used before global [[LearningRate]]. e.g. `Adagrad with FixedLearningRate`, not `FixedLearningRate with Adagrad`
*/
trait Adagrad extends INDArrayWeights {
trait INDArrayWeightApi extends super.INDArrayWeightApi { this: INDArrayWeight =>
var cache: INDArray = Nd4j.zeros(data.shape: _*)
}
override type INDArrayWeight <: INDArrayWeightApi with Weight
trait INDArrayOptimizerApi extends super.INDArrayOptimizerApi { this: INDArrayOptimizer =>
def eps: scala.Double = 1e-4
abstract override lazy val delta: INDArray = {
import weight._
cache = cache + super.delta * super.delta
super.delta / (Transforms.sqrt(cache) + eps)
}
}
override type INDArrayOptimizer <: INDArrayOptimizerApi with Optimizer
}
/**
* @note This [[RMSprop]] hyperparameter is usually used before global [[LearningRate]]. e.g. `RMSprop with FixedLearningRate`, not `FixedLearningRate with RMSprop`
*/
trait RMSprop extends INDArrayWeights {
trait INDArrayWeightApi extends super.INDArrayWeightApi { this: INDArrayWeight =>
var cache: INDArray = Nd4j.zeros(data.shape: _*)
}
override type INDArrayWeight <: INDArrayWeightApi with Weight
trait INDArrayOptimizerApi extends super.INDArrayOptimizerApi { this: INDArrayOptimizer =>
def eps: scala.Double = 1e-4
def decayRate: scala.Double = 0.99
abstract override lazy val delta: INDArray = {
import weight._
cache = cache * decayRate + super.delta * super.delta * (1.0 - decayRate)
super.delta / (Transforms.sqrt(cache) + eps)
}
}
override type INDArrayOptimizer <: INDArrayOptimizerApi with Optimizer
}
trait Adam extends INDArrayWeights {
trait INDArrayWeightApi extends super.INDArrayWeightApi { this: INDArrayWeight =>
var m: INDArray = Nd4j.zeros(data.shape: _*)
var v: INDArray = Nd4j.zeros(data.shape: _*)
}
override type INDArrayWeight <: INDArrayWeightApi with Weight
trait INDArrayOptimizerApi extends super.INDArrayOptimizerApi { this: INDArrayOptimizer =>
def beta1: scala.Double = 0.9
def beta2: scala.Double = 0.999
def eps: scala.Double = 1e-8
abstract override lazy val delta: INDArray = {
import weight._
m = m * beta1 + super.delta * (1.0 - beta1)
v = v * beta2 + (super.delta * super.delta) * (1.0 - beta2)
m / (Transforms.sqrt(v) + eps)
}
}
override type INDArrayOptimizer <: INDArrayOptimizerApi with Optimizer
}
}
/**
* @author 杨博 (Yang Bo)
*/
class CumulativeINDArrayLayersSpec
extends AsyncFreeSpec
with Matchers
with Inside
with ThoughtworksFutureToScalaFuture {
import CumulativeINDArrayLayersSpec._
val hyperparameters = Factory[
Logging with ImplicitsSingleton with DoubleTraining with INDArrayTraining with INDArrayLiterals with DoubleLiterals with CumulativeDoubleLayers with Operators with CumulativeINDArrayLayers with FixedLearningRate]
.newInstance(fixedLearningRate = 1.0)
import hyperparameters.implicits._
def trainAndAssertLossAndWeight(myNetwork: INDArray => hyperparameters.INDArrayLayer,
weight: hyperparameters.INDArrayWeight,
trainTimes: Int = 2,
expectedLoss: Int = 0,
expectedWeightSum: Int = -16,
input: INDArray = Nd4j.ones(4, 4)): Future[Assertion] = {
@throwableMonadic[Future]
val run: Future[Assertion] = {
for (_ <- 1 to trainTimes) {
myNetwork(input).train.each.sumT
}
val loss = myNetwork(input).predict.each
loss.sumT should be(expectedLoss)
weight.data.sumT should be(expectedWeightSum)
}
run
}
"INDArray + INDArray" in {
val weight = hyperparameters.INDArrayWeight(Nd4j.ones(4, 4))
def myNetwork(input: INDArray): hyperparameters.INDArrayLayer = {
weight + input
}
trainAndAssertLossAndWeight(myNetwork, weight)
}
"INDArray + Double" in {
val weight = hyperparameters.INDArrayWeight(Nd4j.ones(4, 4))
def myNetwork(input: INDArray): hyperparameters.INDArrayLayer = {
weight + 1.0
}
trainAndAssertLossAndWeight(myNetwork, weight)
}
"Double + INDArray" in {
val weight = hyperparameters.INDArrayWeight(Nd4j.ones(4, 4))
def myNetwork(input: INDArray): hyperparameters.INDArrayLayer = {
1.0 + weight
}
trainAndAssertLossAndWeight(myNetwork, weight)
}
"INDArray - INDArray" in {
val weight = hyperparameters.INDArrayWeight(Nd4j.ones(4, 4))
def myNetwork(input: INDArray): hyperparameters.INDArrayLayer = {
weight - (-input)
}
trainAndAssertLossAndWeight(myNetwork, weight)
}
"INDArray - Double" in {
val weight = hyperparameters.INDArrayWeight(Nd4j.ones(4, 4))
def myNetwork(input: INDArray): hyperparameters.INDArrayLayer = {
weight - (-1.0)
}
trainAndAssertLossAndWeight(myNetwork, weight)
}
"Double - INDArray" in {
val weight = hyperparameters.INDArrayWeight((Nd4j.ones(4, 4).negi()))
def myNetwork(input: INDArray): hyperparameters.INDArrayLayer = {
1.0 - weight
}
trainAndAssertLossAndWeight(myNetwork, weight, expectedWeightSum = 16)
}
"INDArray * INDArray" in {
val weight = hyperparameters.INDArrayWeight((Nd4j.ones(4, 4) mul 2))
def myNetwork(input: INDArray): hyperparameters.INDArrayLayer = {
weight * input
}
trainAndAssertLossAndWeight(myNetwork, weight, expectedWeightSum = 0)
}
"INDArray * Double" in {
val weight = hyperparameters.INDArrayWeight((Nd4j.ones(4, 4) mul 2))
def myNetwork(input: INDArray): hyperparameters.INDArrayLayer = {
weight * 1.0
}
trainAndAssertLossAndWeight(myNetwork, weight, expectedWeightSum = 0)
}
"Double * INDArray" in {
val weight = hyperparameters.INDArrayWeight((Nd4j.ones(4, 4) mul 2))
def myNetwork(input: INDArray): hyperparameters.INDArrayLayer = {
1.0 * weight
}
trainAndAssertLossAndWeight(myNetwork, weight, expectedWeightSum = 0)
}
"INDArray / INDArray" in {
val weight = hyperparameters.INDArrayWeight((Nd4j.ones(4, 4) mul 2))
def myNetwork(input: INDArray): hyperparameters.INDArrayLayer = {
weight / input
}
trainAndAssertLossAndWeight(myNetwork, weight, expectedWeightSum = 0)
}
"INDArray / Double" in {
val weight = hyperparameters.INDArrayWeight((Nd4j.ones(4, 4) mul 2))
def myNetwork(input: INDArray): hyperparameters.INDArrayLayer = {
weight / 1.0
}
trainAndAssertLossAndWeight(myNetwork, weight, expectedWeightSum = 0)
}
"Double / INDArray" in {
val weight = hyperparameters.INDArrayWeight(Nd4j.ones(4, 4))
def myNetwork(input: INDArray): hyperparameters.INDArrayLayer = {
1.0 / weight
}
@monadic[Future]
val task: Future[Unit] = {
for (_ <- 1 to 20000) {
myNetwork(Nd4j.ones(4, 4)).train.each
}
}
throwableMonadic[Future] {
task.each
val loss = myNetwork(Nd4j.ones(4, 4)).predict.each
loss.sumT should be < 0.5
weight.data.sumT should be > 600.0
}
}
"max(INDArray,Double)" in {
val weight = hyperparameters.INDArrayWeight((Nd4j.ones(4, 4) * 10))
def myNetwork(input: INDArray): hyperparameters.INDArrayLayer = {
hyperparameters.max(weight, 0.0)
}
trainAndAssertLossAndWeight(myNetwork, weight, trainTimes = 10, expectedWeightSum = 0)
}
"min(INDArray,Double)" in {
val weight = hyperparameters.INDArrayWeight((Nd4j.ones(4, 4) * 10))
def myNetwork(input: INDArray): hyperparameters.INDArrayLayer = {
hyperparameters.min(weight, 100.0)
}
trainAndAssertLossAndWeight(myNetwork, weight, trainTimes = 10, expectedWeightSum = 0)
}
"exp(INDArray)" in {
val weight = hyperparameters.INDArrayWeight((Nd4j.ones(4, 4) * 10))
def myNetwork(input: INDArray): hyperparameters.INDArrayLayer = {
hyperparameters.exp(weight)
}
@monadic[Future]
val task: Future[Unit] = {
for (_ <- 1 to 50) {
myNetwork(Nd4j.ones(4, 4)).train.each
}
}
throwableMonadic[Future] {
task.each
val loss = myNetwork(Nd4j.ones(4, 4)).predict.each
loss.sumT should be < 1.0
weight.data.sumT should be < 1.0
}
}
"log(INDArray)" in {
val weight = hyperparameters.INDArrayWeight((Nd4j.ones(4, 4) * 10))
def myNetwork(input: INDArray): hyperparameters.INDArrayLayer = {
hyperparameters.log(weight)
}
@monadic[Future]
val task: Future[Unit] = {
for (_ <- 1 to 50) {
myNetwork(Nd4j.ones(4, 4)).train.each
}
}
throwableMonadic[Future] {
task.each
val loss = myNetwork(Nd4j.ones(4, 4)).predict.each
loss.sumT should be < 10.0
weight.data.sumT should be < 22.0
}
}
"abs(INDArray)" in {
val weight = hyperparameters.INDArrayWeight((Nd4j.ones(4, 4) * 10))
def myNetwork(input: INDArray): hyperparameters.INDArrayLayer = {
hyperparameters.abs(weight)
}
@monadic[Future]
val task: Future[Unit] = {
for (_ <- 1 to 10) {
myNetwork(Nd4j.ones(4, 4)).train.each
}
}
throwableMonadic[Future] {
task.each
val loss = myNetwork(Nd4j.ones(4, 4)).predict.each
loss.sumT should be < 1.0
weight.data.sumT should be < 1.0
}
}
"INDArray dot INDArray" in {
val weight = hyperparameters.INDArrayWeight((Nd4j.ones(4, 4) * 10))
def myNetwork(input: INDArray): hyperparameters.INDArrayLayer = {
def RichINDArray = ??? // Disable org.nd4s.Implicits.RichINDArray
input dot weight
}
@monadic[Future]
val task: Future[Unit] = {
for (_ <- 1 to 10) {
myNetwork(Nd4j.ones(4, 4)).train.each
}
}
throwableMonadic[Future] {
task.each
val loss = myNetwork(Nd4j.ones(4, 4)).predict.each
loss.sumT should be(-1920.0)
weight.data.sumT should be(-480.0)
}
}
"INDArray im2col (kernel,stride,padding) --forward" in {
val hyperparameters =
Factory[
CNNs with Logging with ImplicitsSingleton with INDArrayTraining with INDArrayLiterals with DoubleLiterals with CumulativeDoubleLayers with Operators with CumulativeINDArrayLayers with FixedLearningRate]
.newInstance(fixedLearningRate = 0.03)
import hyperparameters.implicits._
val weight = hyperparameters.INDArrayWeight((-(1 to 54).toNDArray).reshape(2, 3, 3, 3))
def myNetwork(kernel: (Int, Int), stride: (Int, Int), padding: (Int, Int)): hyperparameters.INDArrayLayer = {
hyperparameters.im2col(weight, kernel, stride, padding)
}
myNetwork((3, 3), (1, 1), (1, 1)).train.map { result =>
result.sumT should be(-8085.0)
}
}
"INDArray im2col (kernel,stride,padding) --train" in {
val hyperparameters =
Factory[
CNNs with Logging with ImplicitsSingleton with INDArrayTraining with INDArrayLiterals with DoubleLiterals with CumulativeDoubleLayers with Operators with CumulativeINDArrayLayers with FixedLearningRate]
.newInstance(fixedLearningRate = 0.01)
import hyperparameters.implicits._
val weight = hyperparameters.INDArrayWeight((1 to 54).toNDArray.reshape(2, 3, 3, 3))
def myNetwork(kernel: (Int, Int), stride: (Int, Int), padding: (Int, Int)): hyperparameters.INDArrayLayer = {
hyperparameters.im2col(weight, kernel, stride, padding)
}
@monadic[Future]
val task: Future[Unit] = {
for (_ <- 1 to 1000) {
myNetwork((3, 3), (1, 1), (1, 1)).train.each
}
}
throwableMonadic[Future] {
task.each
val loss = (myNetwork((3, 3), (1, 1), (1, 1))).predict.each
loss.sumT should be < 1.0
}
}
"INDArray reshape shapes --forward" in {
val weight = hyperparameters.INDArrayWeight((1 to 54).toNDArray.reshape(2, 3, 3, 3))
def myNetwork(dimensions: Int*): hyperparameters.INDArrayLayer = {
weight.reshape(dimensions: _*)
}
myNetwork(2, 3, 3, 3).train.map { result =>
result.sumT should be(1431.0)
}
}
"INDArray maxPool poolsize --forward" in {
val hyperparameters =
Factory[
CNNs with Logging with ImplicitsSingleton with INDArrayTraining with INDArrayLiterals with DoubleLiterals with CumulativeDoubleLayers with Operators with CumulativeINDArrayLayers with FixedLearningRate]
.newInstance(fixedLearningRate = 1.0)
import hyperparameters.implicits._
val weight = hyperparameters.INDArrayWeight((1 to 96).toNDArray.reshape(2, 3, 4, 4))
def myNetwork(poolSize: (Int, Int)): hyperparameters.INDArrayLayer = {
hyperparameters.maxPool(weight, poolSize)
}
myNetwork((2, 2)).train.map { result =>
result.sumT should be(1224.0)
}
}
"INDArray maxPool poolsize -- train" in {
val hyperparameters =
Factory[
CNNs with Logging with ImplicitsSingleton with INDArrayTraining with INDArrayLiterals with DoubleLiterals with CumulativeDoubleLayers with Operators with CumulativeINDArrayLayers with FixedLearningRate]
.newInstance(fixedLearningRate = 1.0)
import hyperparameters.implicits._
val weight = hyperparameters.INDArrayWeight((1 to 96).toNDArray.reshape(2, 3, 4, 4))
def myNetwork(poolSize: (Int, Int)): hyperparameters.INDArrayLayer = {
hyperparameters.maxPool(weight, poolSize)
}
val poolSize = (2, 2)
@monadic[Future]
val task: Future[Unit] = {
for (_ <- 1 to 700) {
myNetwork(poolSize).train.each
}
}
throwableMonadic[Future] {
task.each
val loss: INDArray = (myNetwork(poolSize)).predict.each
loss.meanT should be < 10.0
}
}
"4D INDArray * 4D INDArray -- forward" in {
val weight = hyperparameters.INDArrayWeight((0 until (1 * 2 * 3 * 4)).toNDArray.reshape(1, 2, 3, 4))
def myNetwork(input: INDArray): hyperparameters.INDArrayLayer = {
weight * input
}
val input = (0 until (1 * 2 * 3 * 4)).toNDArray.reshape(1, 2, 3, 4)
myNetwork(input).train.map { (result) =>
result.meanNumber.doubleValue should be(180.16666666666666667 +- 0.1)
}
}
"4D INDArray * 4D INDArray -- train" in {
val weight = hyperparameters.INDArrayWeight((0 until (1 * 2 * 3 * 4)).toNDArray.reshape(1, 2, 3, 4))
def myNetwork(input: INDArray): hyperparameters.INDArrayLayer = {
weight * input
}
val input = (0 until (1 * 2 * 3 * 4)).toNDArray.reshape(1, 2, 3, 4)
@monadic[Future]
val task: Future[Unit] = {
for (_ <- 1 to 100) {
myNetwork(input).train.each
}
}
throwableMonadic[Future] {
task.each
val loss: INDArray = (myNetwork(input)).predict.each
loss.meanNumber.doubleValue should be < 1.0
}
}
"INDArray permute dimensions --forward" in {
val weight = hyperparameters.INDArrayWeight((1 to 54).toNDArray.reshape(2, 3, 9))
def myNetwork(dimensions: Int*): hyperparameters.INDArrayLayer = {
weight.permute(dimensions: _*)
}
myNetwork(0, 2, 1).train.map { (result) =>
result.sumT should be(1431.0)
}
}
"conv2d(INDArray, INDArray, INDArray, kernel, stride, padding)" in {
val hyperparameters =
Factory[
CNNs with Logging with ImplicitsSingleton with INDArrayTraining with INDArrayLiterals with DoubleLiterals with CumulativeDoubleLayers with Operators with CumulativeINDArrayLayers with FixedLearningRate]
.newInstance(fixedLearningRate = 0.01)
import hyperparameters.implicits._
val weight: hyperparameters.INDArrayWeight = hyperparameters.INDArrayWeight(Nd4j.ones(1, 1, 3, 3))
val bias: hyperparameters.INDArrayWeight = hyperparameters.INDArrayWeight(Nd4j.zeros(1))
def convolution(input: INDArray): hyperparameters.INDArrayLayer = {
hyperparameters.conv2d(input, weight, bias, (3, 3), (1, 1), (1, 1))
}
val expectResult = Array(14.00, 24.00, 30.00, 22.00, 33.00, 54.00, 63.00, 45.00, 57.00, 90.00, 99.00, 69.00, 46.00,
72.00, 78.00, 54.00).toNDArray
.reshape(1, 1, 4, 4)
val input: INDArray = (1 to 16).toNDArray.reshape(1, 1, 4, 4)
convolution(input).train.map { (result) =>
result.eq(expectResult).sumT should be(16)
}
}
"sumT(INDArray)" in {
val weight = hyperparameters.INDArrayWeight((1 to 54).toNDArray.reshape(2, 3, 3, 3))
def myNetwork(): hyperparameters.DoubleLayer = {
weight.sum
}
@monadic[Future]
val task: Future[Unit] = {
for (_ <- 1 to 54) {
myNetwork().train.each
}
}
throwableMonadic[Future] {
task.each
val loss = myNetwork().predict.each
loss should be < 1.0
}
}
"sum(INDArray,dimensions) --2 dimensions" in {
val weight = hyperparameters.INDArrayWeight((1 to 54).toNDArray.reshape(6, 9))
def myNetwork(dimensions: Int*): hyperparameters.INDArrayLayer = {
weight.sum(dimensions: _*)
}
@monadic[Future]
val task: Future[Unit] = {
for (_ <- 1 to 54) {
myNetwork(0).train.each
}
}
throwableMonadic[Future] {
task.each
val loss = myNetwork(0).predict.each
loss.sumT should be < 1.0
}
}
// Failed due to nd4j bugs in broadcasting. TODO: Try to upgrade nd4j to a new version.
"sum(INDArray,dimensions) --4 dimensions" in {
val weight = hyperparameters.INDArrayWeight((1 to 54).toNDArray.reshape(2, 3, 3, 3))
def myNetwork(dimensions: Int*): hyperparameters.INDArrayLayer = {
weight.sum(dimensions: _*)
}
@monadic[Future]
val task: Future[Unit] = {
for (_ <- 1 to 54) {
myNetwork(0, 1).train.each
}
}
throwableMonadic[Future] {
task.each
val loss = myNetwork(0, 1).predict.each
loss.sumT should be < 1.0
}
}
"mean(INDArray)" in {
val weight = hyperparameters.INDArrayWeight((1 to 54).toNDArray.reshape(2, 3, 3, 3))
def myNetwork(): hyperparameters.DoubleLayer = {
weight.mean
}
@monadic[Future]
val task: Future[Unit] = {
for (_ <- 1 to 27) {
myNetwork().train.each
}
}
throwableMonadic[Future] {
task.each
val loss = myNetwork().predict.each
loss should be < 27.5
}
}
"INDArray reshape shapes --train" in {
val hyperparameters =
Factory[Logging with ImplicitsSingleton with INDArrayTraining with INDArrayLiterals with DoubleLiterals with CumulativeDoubleLayers with Operators with CumulativeINDArrayLayers with FixedLearningRate]
.newInstance(fixedLearningRate = 0.01)
import hyperparameters.implicits._
val weight = hyperparameters.INDArrayWeight((1 to 54).toNDArray.reshape(2, 3, 3, 3))
def myNetwork(dimensions: Int*): hyperparameters.INDArrayLayer = {
weight.reshape(dimensions: _*)
}
@monadic[Future]
val task: Future[Unit] = {
for (_ <- 1 to 10000) {
myNetwork(2, 3, 3, 3).train.each
}
}
throwableMonadic[Future] {
task.each
val loss = (myNetwork(2, 3, 3, 3)).predict.each
loss.sumT should be < 1.0
}
}
"INDArray permute dimensions --train" in {
val hyperparameters =
Factory[Logging with ImplicitsSingleton with INDArrayTraining with INDArrayLiterals with DoubleLiterals with CumulativeDoubleLayers with Operators with CumulativeINDArrayLayers with FixedLearningRate]
.newInstance(fixedLearningRate = 0.01)
import hyperparameters.implicits._
val weight = hyperparameters.INDArrayWeight((1 to 54).toNDArray.reshape(2, 3, 9))
def myNetwork(dimensions: Int*): hyperparameters.INDArrayLayer = {
weight.permute(dimensions: _*)
}
@monadic[Future]
val task: Future[Unit] = {
for (_ <- 1 to 10000) {
myNetwork(0, 2, 1).train.each
}
}
throwableMonadic[Future] {
task.each
val loss = myNetwork(0, 2, 1).predict.each
loss.sumT should be < 1.0
}
}
}
| izhangzhihao/DeepLearning.scala | plugins-CumulativeINDArrayLayers/src/test/scala-2.11/com/thoughtworks/deeplearning/plugins/CumulativeINDArrayLayersSpec.scala | Scala | apache-2.0 | 28,145 |
package edu.gemini.pit.ui.util
import edu.gemini.pit.ui.util.SimpleToolbar.IconButton
import javax.swing.Icon
import scala.swing.Button
import scala.swing.event.ButtonClicked
object ToolButton {
def apply(icon: Icon, disabledIcon: Icon, tooltipText: String)(f: => Unit) =
new ToolButton(icon, disabledIcon, tooltipText) {
def apply = f
}
}
// A generic tool button
abstract class ToolButton(icon: Icon, disabledIcon: Icon, tooltipText: String) extends Button {
override lazy val peer = new IconButton(icon, disabledIcon)
tooltip = tooltipText
reactions += {
case ButtonClicked(_) => apply()
}
def apply()
}
| arturog8m/ocs | bundle/edu.gemini.pit/src/main/scala/edu/gemini/pit/ui/util/ToolButton.scala | Scala | bsd-3-clause | 641 |
package jitd.rewrite
import jitd.spec._
object MatchToStatement
{
type VarName = String
type NodeType = String
def unroll(
definition: Definition,
pattern: MatchPattern,
extractName: VarName,
targetExpression: Expression,
addornot:Boolean
):
Seq[ (VarName, NodeType, Expression) ] =
{
pattern match {
case MatchAny(_) => Seq()
case MatchNode(nodeType, fields, _) =>
Seq( (extractName, nodeType, targetExpression) ) ++
fields.zip(definition.node(nodeType).fields).flatMap {
case (fieldPattern, fieldDefinition) =>
unroll(
definition,
fieldPattern,
extractName + "_" + fieldDefinition.name,
if(addornot == false)
{
WrapNodeRef(NodeSubscript(Var(extractName), fieldDefinition.name))
}
else{
WrapNodeRef(NodeCast(nodeType,Var(extractName), fieldDefinition.name))
},
if(addornot == false)
{
false
}
else{
true
}
)
}
}
}
// def unroll(
// definition: Definition,
// pattern: ConstructorPattern,
// extractName: VarName,
// targetExpression: Expression,
// addornot:Boolean
// ):
// Seq[ (VarName, NodeType, Expression) ] =
// {
// pattern match {
// case MatchAny(_) => Seq()
// case MatchNode(nodeType, fields, _) =>
// Seq( (extractName, nodeType, targetExpression) ) ++
// fields.zip(definition.node(nodeType).fields).flatMap {
// case (fieldPattern, fieldDefinition) =>
// unroll(
// definition,
// fieldPattern,
// extractName + "_" + fieldDefinition.name,
// UnWrapHandle(NodeSubscript(Var(extractName), fieldDefinition.name)),
// true
// )
// }
// }
// }
def varMappings(
definition: Definition,
pattern: MatchPattern,
targetName: String
): Map[String, Expression] =
{
val myMapping = pattern.name.toSeq.map { name => name -> Var(targetName) }.toMap
val childMappings =
pattern match {
case MatchAny(_) => Map()
case MatchNode(nodeType, fields, _) =>
fields.zip(definition.node(nodeType).fields).map {
case (MatchAny(None), fieldDefinition) => Map()
case (MatchAny(Some(name)), fieldDefinition) => Map(
name -> NodeSubscript(Var(targetName), fieldDefinition.name)
)
case (fieldPattern:MatchNode, fieldDefinition) =>
varMappings(definition, fieldPattern, targetName + "_" + fieldDefinition.name)
}.fold(Map()){ _ ++ _ }
}
return myMapping ++ childMappings
}
def varMappings(
definition: Definition,
pattern: ConstructorPattern,
targetName: String,
directFieldRef: Option[Expression] = None
): Map[String, Expression] =
{
//println(directFieldRef)
pattern match {
case ConstructExpression(_, None) => Map()
case ConstructExpression(_, Some(name)) =>
Map( name ->
directFieldRef.getOrElse { Var(targetName) }
)
case ConstructNode(nodeType, fields, name) => {
val me = Var(targetName)
val myMapping:Map[String, Expression] =
name match {
case Some(x) => Map(x -> Var(targetName+"_ref"))
case None => Map()
}
fields.zip(definition.node(nodeType).fields).map {
case (fieldPattern, fieldDefinition) =>
varMappings(
definition,
fieldPattern,
targetName+"_"+fieldDefinition.name,
Some(NodeSubscript(me, fieldDefinition.name))
)
}.fold(myMapping) { _ ++ _ }
}
case BeforeConstruct(_, target) => varMappings(definition, target, targetName)
case AfterConstruct(target, _) => varMappings(definition, target, targetName)
}
}
def varMappingsForEmplace(
definition: Definition,
pattern: ConstructorPattern,
targetName: String,
directFieldRef: Expression
)
{
//println(directFieldRef)
// val ret = pattern match {
// case ConstructExpression(_, None) => Seq()
// case ConstructExpression(_, Some(name)) =>
// Seq(Var(targetName))
// case ConstructNode(nodeType, fields, name) => {
// val me = Var(targetName)
// // val typeOfNode = nodeType
// // val myMapping:Map[String, Expression] =
// // name match {
// // case Some(x) => Map(x -> Var(" "))
// // case None => Map()
// // }
// val temp = fields.zip(definition.node(nodeType).fields).map {
// case (fieldPattern, fieldDefinition) =>
// varMappingsForEmplace(
// definition,
// fieldPattern,
// targetName+"_"+fieldDefinition.name,
// NodeSubscript(me, fieldDefinition.name)
// )
// //println(temp)
// }.flatten
// }
// case BeforeConstruct(_, target) => Seq()
// case AfterConstruct(target, _) => Seq()
// }
// println(ret)
// //directFieldRef
// directFieldRef match {
// case ns@NodeSubscript(_,_) => Seq(ns) ++ ret
// // case ns@NodeSubscript(_,"rhs")) => Seq(ns)
// // case ns@NodeSubscript(_,"listptr")) => Seq(ns)
// // case ns@NodeSubscript(_,"data")) => Seq(ns)
// case _ => ret
// }
}
def apply(
definition: Definition,
pattern: MatchPattern,
target: VarName,
handleref : Boolean,
onMatch: Statement,
onFail: Statement
): Statement =
{
val fromMappings = varMappings(definition, pattern, target+"_root")
val extractionSteps = if(handleref == true)
{unroll(definition, pattern, target+"_root", (Var(target)),false)}
else
{unroll(definition, pattern, target+"_root", WrapNodeRef(Var(target)),false)}
//println(InlineVars(onMatch, fromMappings))
return extractionSteps
.foldRight(InlineVars(onMatch, fromMappings)) { (check, accumulator) => {
val (nodeVarName, nodeType, nodeSourceExpression) = check
//println("MTS "+nodeVarName+"||"+nodeType+"||"+nodeSourceExpression)
ExtractNode(nodeVarName, nodeSourceExpression, Seq(nodeType -> accumulator), onFail)
}}
}
def constructorWithoutVarMapping(
definition: Definition,
pattern: ConstructorPattern,
target: VarName
): (Statement, Expression) =
{
pattern match {
case ConstructNode(node, fields, nameOption) => {
//println("Construct: "+node)
val nodeDefinition = definition.node(node)
val nodeName = nameOption.getOrElse(target)
//println(nodeName)
val (fieldConstructors, fieldExpressions) =
fields.zip(nodeDefinition.fields).map { case (fieldPattern, fieldDefinition) =>
constructorWithoutVarMapping(definition, fieldPattern, nodeName+"_"+fieldDefinition.name)
}.unzip
(
fieldConstructors.fold(Block(Seq())){ _ ++ _ } ++
Block(
Seq(
Comment(s"Assemble $target as a $node"),
Declare( nodeName, Some(TNode(node)), MakeNode(node, fieldExpressions)),
Declare( nodeName+"_ref", Some(TNodeRef()), WrapNode(Var(nodeName))),
Comment(s"Code to add nodes into sets")
)
),
Var(nodeName+"_ref")
)
}
case AfterConstruct(child, code) => {
val (constructor, accessor) = constructorWithoutVarMapping(definition, child, target)
(constructor ++Comment(s"Handle post-processing for $target") ++ code, accessor)
}
case BeforeConstruct(code, child) => {
val (constructor, accessor) = constructorWithoutVarMapping(definition, child, target)
(Comment(s"Handle pre-processing for $target") ++ code ++ constructor, accessor)
}
case ConstructExpression(expression, _) =>
(Block(Seq()), expression)
}
}
def apply(
definition: Definition,
pattern: ConstructorPattern,
target: VarName
): (Statement, Expression) =
{
//println(pattern)
//println(varMappings(definition, pattern, target))
val (constructor, accessor) = constructorWithoutVarMapping(definition, pattern, target)
//println("MTS"+constructor)
//println(InlineVars(constructor, varMappings(definition, pattern, target)))
(
InlineVars(
Comment(s"BEGIN ASSEMBLING $target") ++
constructor ++
Comment(s"END ASSEMBLING $target as $accessor"),
varMappings(definition, pattern, target)),
// constructor,
accessor
)
}
} | UBOdin/jitd-synthesis | src/main/scala/jitd/rewrite/MatchToStatement.scala | Scala | apache-2.0 | 9,261 |
package net.tomasherman.specus.common.api.plugin.config
/**
* This file is part of Specus.
*
* Specus is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Specus is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
*
* You should have received a copy of the GNU General Public License
* along with Specus. If not, see <http://www.gnu.org/licenses/>.
*
*/
/** Plugin related constants. */
abstract class PluginConfig {
/** Folder in which plugins can be found. */
def pkg: String
/** Folder in which plugins can be found. */
def directory: String
/** Plugin jar name ending */
def pluginEnding:String
/** Name of the file containing plugin definitions.*/
def pluginDefinitionFileNameSuffix: String
def definitions: PluginDefinitionsConfig
}
abstract class PluginDefinitionsConfig {
def authorKey:String
def versionKey:String
def identifierKey:String
def pluginClassKey:String
def nameKey:String
def dependenciesKey:String
def dependency_version:String
def dependency_identifier:String
} | tomasherman/specus | common_api/src/main/scala/plugin/config/pluginConfig.scala | Scala | gpl-3.0 | 1,405 |
import at.logic.gapt.algorithms.rewriting.TermReplacement
import at.logic.gapt.expr.fol.reduceHolToFol
import at.logic.gapt.expr.hol.{CNFp, removeAllQuantifiers, CNFn, instantiate}
import at.logic.gapt.expr._
import at.logic.gapt.formats.tip.TipSmtParser
import at.logic.gapt.grammars._
import at.logic.gapt.proofs.expansionTrees.{InstanceTermEncoding, extractInstances}
import at.logic.gapt.proofs.lkNew.{skolemize, LKToExpansionProof}
import at.logic.gapt.provers.inductionProver.{hSolveQBUP, qbupForRecSchem}
import at.logic.gapt.provers.prover9.Prover9
import at.logic.gapt.provers.veriT.VeriT
val tipProblem = TipSmtParser parse """
(declare-sort sk_a 0)
(declare-datatypes ()
((list (nil) (cons (head sk_a) (tail list)))))
(define-fun-rec
qrev
((x list) (y list)) list
(match x
(case nil y)
(case (cons z xs) (qrev xs (cons z y)))))
(assert-not (forall ((x list)) (= (qrev (qrev x nil) nil) x)))
(check-sat)
"""
val sequent = tipProblem toSequent
val list = TBase("list")
val sk_a = TBase("sk_a")
val nil = Const("nil", list)
val cons = Const("cons", sk_a -> (list -> list))
def mkList(i: Int) = (0 until i).foldRight[LambdaExpression](nil) { (j, l) => cons(Const(s"a$j", sk_a), l) }
val instances = 0 to 2 map mkList
// Compute many-sorted expansion sequents
val instanceProofs = instances map { inst =>
val instanceSequent = sequent.map(identity, instantiate(_, inst))
val erasure = (constants(instanceSequent) ++ variables(instanceSequent)).zipWithIndex.flatMap {
case (EqC(_), _) => None
case (c@NonLogicalConstant(name, FunctionType(To, argTypes)), i) =>
Some(c -> FOLAtomHead(s"P_${name}_$i", argTypes.size))
case (c@NonLogicalConstant(name, FunctionType(_, argTypes)), i) =>
Some(c -> FOLFunctionHead(s"f_${name}_$i", argTypes.size))
case (v@Var(name, TBase(ty)), i) =>
Some(v -> FOLVar(s"x_${name}_${ty}_$i"))
}.toMap[LambdaExpression, LambdaExpression]
val erasedInstanceSequent = instanceSequent map { TermReplacement(_, erasure) }
val Some(erasedProof) = Prover9 getLKProof erasedInstanceSequent
val erasedExpansion = LKToExpansionProof(erasedProof)
val reifiedExpansion = erasedExpansion map { TermReplacement(_, erasure.map(_.swap)) }
inst -> reifiedExpansion
}
instanceProofs foreach { case (inst, es) =>
println(s"Instances for x = $inst:")
extractInstances(es).map(-_, identity).elements foreach println
println()
}
val x = Var("x", list)
val encoding = InstanceTermEncoding(sequent.map(identity, instantiate(_, x)))
val A = Const("A", list -> encoding.instanceTermType)
val G = Const("G", list -> (list -> encoding.instanceTermType))
val y = Var("y", sk_a)
val w = Var("w", list)
val w2 = Var("w2", list)
val z = Var("z", encoding.instanceTermType)
val template = RecSchemTemplate(A,
A(x) -> G(x, w2), A(x) -> z,
G(cons(y, x), w) -> G(x, w2),
G(cons(y, x), w) -> z,
G(nil, w) -> z)
val targets = for ((inst, es) <- instanceProofs; term <- encoding encode es) yield A(inst) -> term
val stableRS = template.stableRecSchem(targets.toSet)
// FIXME: the class of rs w/o skolem symbols is not closed under the rewriting that stableTerms() expects :-/
val stableRSWithoutSkolemSymbols =
RecursionScheme(stableRS.axiom, stableRS.nonTerminals,
stableRS.rules filterNot { case Rule(from, to) =>
constants(to) exists { _.exptype == sk_a }
})
//println(stableRSWithoutSkolemSymbols)
val rs = minimizeRecursionScheme(stableRSWithoutSkolemSymbols, targets, template.targetFilter,
weight = rule => expressionSize(rule.lhs === rule.rhs))
println(s"Minimized recursion scheme:\\n$rs\\n")
val logicalRS = encoding.decode(rs.copy(rules = rs.rules flatMap {
case r@Rule(lhs, rhs) if lhs == G(x, w) =>
Seq(r(Substitution(x -> cons(y,x))), r(Substitution(x -> nil)))
case r => Seq(r)
}))
println(s"Logical recursion scheme:\\n$logicalRS\\n")
val inst = mkList(8)
val lang = logicalRS parametricLanguage inst map { _.asInstanceOf[HOLFormula] }
println(s"Validity for instance x = $inst:")
println(VeriT isValid reduceHolToFol(Or(lang toSeq)))
println()
// FIXME: currently learns datatype from recursion scheme :-/
val qbup @ Ex(x_G, qbupMatrix) = qbupForRecSchem(logicalRS)
println(s"QBUP:\\n$qbup\\n")
println(s"Canonical solution at G(${mkList(3)},w):")
val G_ = logicalRS.nonTerminals.find(_.name == "G").get
val canSol = And(logicalRS generatedTerms G_(mkList(3),w) map { -_ })
CNFp.toClauseList(canSol) foreach println
println()
val Some(solution) = hSolveQBUP(qbupMatrix, x_G(mkList(3), w), canSol)
println()
val formula = BetaReduction.betaNormalize(instantiate(qbup, solution))
println(s"Solution: $solution\\n")
println(VeriT isValid reduceHolToFol(skolemize(formula)))
| loewenheim/gapt | examples/induction/prod_prop_31.scala | Scala | gpl-3.0 | 4,728 |
/** Copyright 2015 TappingStone, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prediction.data.webhooks.exampleform
import io.prediction.data.webhooks.ConnectorTestUtil
import org.specs2.mutable._
/** Test the ExampleFormConnector */
class ExampleFormConnectorSpec extends Specification with ConnectorTestUtil {
"ExampleFormConnector" should {
"convert userAction to Event JSON" in {
// webhooks input
val userAction = Map(
"type" -> "userAction",
"userId" -> "as34smg4",
"event" -> "do_something",
"context[ip]" -> "24.5.68.47", // optional
"context[prop1]" -> "2.345", // optional
"context[prop2]" -> "value1", // optional
"anotherProperty1" -> "100",
"anotherProperty2"-> "optional1", // optional
"timestamp" -> "2015-01-02T00:30:12.984Z"
)
// expected converted Event JSON
val expected = """
{
"event": "do_something",
"entityType": "user",
"entityId": "as34smg4",
"properties": {
"context": {
"ip": "24.5.68.47",
"prop1": 2.345
"prop2": "value1"
},
"anotherProperty1": 100,
"anotherProperty2": "optional1"
}
"eventTime": "2015-01-02T00:30:12.984Z"
}
"""
check(ExampleFormConnector, userAction, expected)
}
"convert userAction without optional fields to Event JSON" in {
// webhooks input
val userAction = Map(
"type" -> "userAction",
"userId" -> "as34smg4",
"event" -> "do_something",
"anotherProperty1" -> "100",
"timestamp" -> "2015-01-02T00:30:12.984Z"
)
// expected converted Event JSON
val expected = """
{
"event": "do_something",
"entityType": "user",
"entityId": "as34smg4",
"properties": {
"anotherProperty1": 100,
}
"eventTime": "2015-01-02T00:30:12.984Z"
}
"""
check(ExampleFormConnector, userAction, expected)
}
"convert userActionItem to Event JSON" in {
// webhooks input
val userActionItem = Map(
"type" -> "userActionItem",
"userId" -> "as34smg4",
"event" -> "do_something_on",
"itemId" -> "kfjd312bc",
"context[ip]" -> "1.23.4.56",
"context[prop1]" -> "2.345",
"context[prop2]" -> "value1",
"anotherPropertyA" -> "4.567", // optional
"anotherPropertyB" -> "false", // optional
"timestamp" -> "2015-01-15T04:20:23.567Z"
)
// expected converted Event JSON
val expected = """
{
"event": "do_something_on",
"entityType": "user",
"entityId": "as34smg4",
"targetEntityType": "item",
"targetEntityId": "kfjd312bc"
"properties": {
"context": {
"ip": "1.23.4.56",
"prop1": 2.345
"prop2": "value1"
},
"anotherPropertyA": 4.567
"anotherPropertyB": false
}
"eventTime": "2015-01-15T04:20:23.567Z"
}
"""
check(ExampleFormConnector, userActionItem, expected)
}
"convert userActionItem without optional fields to Event JSON" in {
// webhooks input
val userActionItem = Map(
"type" -> "userActionItem",
"userId" -> "as34smg4",
"event" -> "do_something_on",
"itemId" -> "kfjd312bc",
"context[ip]" -> "1.23.4.56",
"context[prop1]" -> "2.345",
"context[prop2]" -> "value1",
"timestamp" -> "2015-01-15T04:20:23.567Z"
)
// expected converted Event JSON
val expected = """
{
"event": "do_something_on",
"entityType": "user",
"entityId": "as34smg4",
"targetEntityType": "item",
"targetEntityId": "kfjd312bc"
"properties": {
"context": {
"ip": "1.23.4.56",
"prop1": 2.345
"prop2": "value1"
}
}
"eventTime": "2015-01-15T04:20:23.567Z"
}
"""
check(ExampleFormConnector, userActionItem, expected)
}
}
}
| beni55/PredictionIO | data/src/test/scala/io/prediction/data/webhooks/exampleform/ExampleFormConnectorSpec.scala | Scala | apache-2.0 | 4,823 |
/***********************************************************************
* Copyright (c) 2013-2018 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.hbase.tools.stats
import com.beust.jcommander.Parameters
import org.locationtech.geomesa.hbase.data.HBaseDataStore
import org.locationtech.geomesa.hbase.tools.HBaseDataStoreCommand
import org.locationtech.geomesa.hbase.tools.HBaseDataStoreCommand.{HBaseParams, ToggleRemoteFilterParam}
import org.locationtech.geomesa.hbase.tools.stats.HBaseStatsCountCommand.HBaseStatsCountParams
import org.locationtech.geomesa.tools.stats.{StatsCountCommand, StatsCountParams}
class HBaseStatsCountCommand extends StatsCountCommand[HBaseDataStore] with HBaseDataStoreCommand {
override val params = new HBaseStatsCountParams
}
object HBaseStatsCountCommand {
@Parameters(commandDescription = "Estimate or calculate feature counts in a GeoMesa feature type")
class HBaseStatsCountParams extends StatsCountParams with HBaseParams with ToggleRemoteFilterParam
}
| ddseapy/geomesa | geomesa-hbase/geomesa-hbase-tools/src/main/scala/org/locationtech/geomesa/hbase/tools/stats/HBaseStatsCountCommand.scala | Scala | apache-2.0 | 1,352 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy
import java.io._
import java.nio.charset.StandardCharsets
import scala.collection.mutable.ArrayBuffer
import com.google.common.io.ByteStreams
import org.scalatest.{BeforeAndAfterEach, Matchers}
import org.scalatest.concurrent.Timeouts
import org.scalatest.time.SpanSugar._
import org.apache.spark._
import org.apache.spark.api.r.RUtils
import org.apache.spark.deploy.SparkSubmit._
import org.apache.spark.deploy.SparkSubmitUtils.MavenCoordinate
import org.apache.spark.internal.config._
import org.apache.spark.internal.Logging
import org.apache.spark.TestUtils.JavaSourceFromString
import org.apache.spark.util.{ResetSystemProperties, Utils}
// Note: this suite mixes in ResetSystemProperties because SparkSubmit.main() sets a bunch
// of properties that needed to be cleared after tests.
class SparkSubmitSuite
extends SparkFunSuite
with Matchers
with BeforeAndAfterEach
with ResetSystemProperties
with Timeouts {
override def beforeEach() {
super.beforeEach()
System.setProperty("spark.testing", "true")
}
private val noOpOutputStream = new OutputStream {
def write(b: Int) = {}
}
/** Simple PrintStream that reads data into a buffer */
private class BufferPrintStream extends PrintStream(noOpOutputStream) {
var lineBuffer = ArrayBuffer[String]()
// scalastyle:off println
override def println(line: String) {
lineBuffer += line
}
// scalastyle:on println
}
/** Returns true if the script exits and the given search string is printed. */
private def testPrematureExit(input: Array[String], searchString: String) = {
val printStream = new BufferPrintStream()
SparkSubmit.printStream = printStream
@volatile var exitedCleanly = false
SparkSubmit.exitFn = (_) => exitedCleanly = true
val thread = new Thread {
override def run() = try {
SparkSubmit.main(input)
} catch {
// If exceptions occur after the "exit" has happened, fine to ignore them.
// These represent code paths not reachable during normal execution.
case e: Exception => if (!exitedCleanly) throw e
}
}
thread.start()
thread.join()
val joined = printStream.lineBuffer.mkString("\\n")
if (!joined.contains(searchString)) {
fail(s"Search string '$searchString' not found in $joined")
}
}
// scalastyle:off println
test("prints usage on empty input") {
testPrematureExit(Array.empty[String], "Usage: spark-submit")
}
test("prints usage with only --help") {
testPrematureExit(Array("--help"), "Usage: spark-submit")
}
test("prints error with unrecognized options") {
testPrematureExit(Array("--blarg"), "Unrecognized option '--blarg'")
testPrematureExit(Array("-bleg"), "Unrecognized option '-bleg'")
}
test("handle binary specified but not class") {
testPrematureExit(Array("foo.jar"), "No main class")
}
test("handles arguments with --key=val") {
val clArgs = Seq(
"--jars=one.jar,two.jar,three.jar",
"--name=myApp")
val appArgs = new SparkSubmitArguments(clArgs)
appArgs.jars should include regex (".*one.jar,.*two.jar,.*three.jar")
appArgs.name should be ("myApp")
}
test("handles arguments to user program") {
val clArgs = Seq(
"--name", "myApp",
"--class", "Foo",
"userjar.jar",
"some",
"--weird", "args")
val appArgs = new SparkSubmitArguments(clArgs)
appArgs.childArgs should be (Seq("some", "--weird", "args"))
}
test("handles arguments to user program with name collision") {
val clArgs = Seq(
"--name", "myApp",
"--class", "Foo",
"userjar.jar",
"--master", "local",
"some",
"--weird", "args")
val appArgs = new SparkSubmitArguments(clArgs)
appArgs.childArgs should be (Seq("--master", "local", "some", "--weird", "args"))
}
test("specify deploy mode through configuration") {
val clArgs = Seq(
"--master", "yarn",
"--conf", "spark.submit.deployMode=client",
"--class", "org.SomeClass",
"thejar.jar"
)
val appArgs = new SparkSubmitArguments(clArgs)
val (_, _, sysProps, _) = prepareSubmitEnvironment(appArgs)
appArgs.deployMode should be ("client")
sysProps("spark.submit.deployMode") should be ("client")
// Both cmd line and configuration are specified, cmdline option takes the priority
val clArgs1 = Seq(
"--master", "yarn",
"--deploy-mode", "cluster",
"--conf", "spark.submit.deployMode=client",
"-class", "org.SomeClass",
"thejar.jar"
)
val appArgs1 = new SparkSubmitArguments(clArgs1)
val (_, _, sysProps1, _) = prepareSubmitEnvironment(appArgs1)
appArgs1.deployMode should be ("cluster")
sysProps1("spark.submit.deployMode") should be ("cluster")
// Neither cmdline nor configuration are specified, client mode is the default choice
val clArgs2 = Seq(
"--master", "yarn",
"--class", "org.SomeClass",
"thejar.jar"
)
val appArgs2 = new SparkSubmitArguments(clArgs2)
appArgs2.deployMode should be (null)
val (_, _, sysProps2, _) = prepareSubmitEnvironment(appArgs2)
appArgs2.deployMode should be ("client")
sysProps2("spark.submit.deployMode") should be ("client")
}
test("handles YARN cluster mode") {
val clArgs = Seq(
"--deploy-mode", "cluster",
"--master", "yarn",
"--executor-memory", "5g",
"--executor-cores", "5",
"--class", "org.SomeClass",
"--jars", "one.jar,two.jar,three.jar",
"--driver-memory", "4g",
"--queue", "thequeue",
"--files", "file1.txt,file2.txt",
"--archives", "archive1.txt,archive2.txt",
"--num-executors", "6",
"--name", "beauty",
"--conf", "spark.ui.enabled=false",
"thejar.jar",
"arg1", "arg2")
val appArgs = new SparkSubmitArguments(clArgs)
val (childArgs, classpath, sysProps, mainClass) = prepareSubmitEnvironment(appArgs)
val childArgsStr = childArgs.mkString(" ")
childArgsStr should include ("--class org.SomeClass")
childArgsStr should include ("--arg arg1 --arg arg2")
childArgsStr should include regex ("--jar .*thejar.jar")
mainClass should be ("org.apache.spark.deploy.yarn.Client")
// In yarn cluster mode, also adding jars to classpath
classpath(0) should endWith ("thejar.jar")
classpath(1) should endWith ("one.jar")
classpath(2) should endWith ("two.jar")
classpath(3) should endWith ("three.jar")
sysProps("spark.executor.memory") should be ("5g")
sysProps("spark.driver.memory") should be ("4g")
sysProps("spark.executor.cores") should be ("5")
sysProps("spark.yarn.queue") should be ("thequeue")
sysProps("spark.yarn.dist.jars") should include regex (".*one.jar,.*two.jar,.*three.jar")
sysProps("spark.yarn.dist.files") should include regex (".*file1.txt,.*file2.txt")
sysProps("spark.yarn.dist.archives") should include regex (".*archive1.txt,.*archive2.txt")
sysProps("spark.app.name") should be ("beauty")
sysProps("spark.ui.enabled") should be ("false")
sysProps("SPARK_SUBMIT") should be ("true")
}
test("handles YARN client mode") {
val clArgs = Seq(
"--deploy-mode", "client",
"--master", "yarn",
"--executor-memory", "5g",
"--executor-cores", "5",
"--class", "org.SomeClass",
"--jars", "one.jar,two.jar,three.jar",
"--driver-memory", "4g",
"--queue", "thequeue",
"--files", "file1.txt,file2.txt",
"--archives", "archive1.txt,archive2.txt",
"--num-executors", "6",
"--name", "trill",
"--conf", "spark.ui.enabled=false",
"thejar.jar",
"arg1", "arg2")
val appArgs = new SparkSubmitArguments(clArgs)
val (childArgs, classpath, sysProps, mainClass) = prepareSubmitEnvironment(appArgs)
childArgs.mkString(" ") should be ("arg1 arg2")
mainClass should be ("org.SomeClass")
classpath should have length (4)
classpath(0) should endWith ("thejar.jar")
classpath(1) should endWith ("one.jar")
classpath(2) should endWith ("two.jar")
classpath(3) should endWith ("three.jar")
sysProps("spark.app.name") should be ("trill")
sysProps("spark.executor.memory") should be ("5g")
sysProps("spark.executor.cores") should be ("5")
sysProps("spark.yarn.queue") should be ("thequeue")
sysProps("spark.executor.instances") should be ("6")
sysProps("spark.yarn.dist.files") should include regex (".*file1.txt,.*file2.txt")
sysProps("spark.yarn.dist.archives") should include regex (".*archive1.txt,.*archive2.txt")
sysProps("spark.yarn.dist.jars") should include
regex (".*one.jar,.*two.jar,.*three.jar,.*thejar.jar")
sysProps("SPARK_SUBMIT") should be ("true")
sysProps("spark.ui.enabled") should be ("false")
}
test("handles standalone cluster mode") {
testStandaloneCluster(useRest = true)
}
test("handles legacy standalone cluster mode") {
testStandaloneCluster(useRest = false)
}
/**
* Test whether the launch environment is correctly set up in standalone cluster mode.
* @param useRest whether to use the REST submission gateway introduced in Spark 1.3
*/
private def testStandaloneCluster(useRest: Boolean): Unit = {
val clArgs = Seq(
"--deploy-mode", "cluster",
"--master", "spark://h:p",
"--class", "org.SomeClass",
"--supervise",
"--driver-memory", "4g",
"--driver-cores", "5",
"--conf", "spark.ui.enabled=false",
"thejar.jar",
"arg1", "arg2")
val appArgs = new SparkSubmitArguments(clArgs)
appArgs.useRest = useRest
val (childArgs, classpath, sysProps, mainClass) = prepareSubmitEnvironment(appArgs)
val childArgsStr = childArgs.mkString(" ")
if (useRest) {
childArgsStr should endWith ("thejar.jar org.SomeClass arg1 arg2")
mainClass should be ("org.apache.spark.deploy.rest.RestSubmissionClient")
} else {
childArgsStr should startWith ("--supervise --memory 4g --cores 5")
childArgsStr should include regex "launch spark://h:p .*thejar.jar org.SomeClass arg1 arg2"
mainClass should be ("org.apache.spark.deploy.Client")
}
classpath should have size 0
sysProps should have size 9
sysProps.keys should contain ("SPARK_SUBMIT")
sysProps.keys should contain ("spark.master")
sysProps.keys should contain ("spark.app.name")
sysProps.keys should contain ("spark.jars")
sysProps.keys should contain ("spark.driver.memory")
sysProps.keys should contain ("spark.driver.cores")
sysProps.keys should contain ("spark.driver.supervise")
sysProps.keys should contain ("spark.ui.enabled")
sysProps.keys should contain ("spark.submit.deployMode")
sysProps("spark.ui.enabled") should be ("false")
}
test("handles standalone client mode") {
val clArgs = Seq(
"--deploy-mode", "client",
"--master", "spark://h:p",
"--executor-memory", "5g",
"--total-executor-cores", "5",
"--class", "org.SomeClass",
"--driver-memory", "4g",
"--conf", "spark.ui.enabled=false",
"thejar.jar",
"arg1", "arg2")
val appArgs = new SparkSubmitArguments(clArgs)
val (childArgs, classpath, sysProps, mainClass) = prepareSubmitEnvironment(appArgs)
childArgs.mkString(" ") should be ("arg1 arg2")
mainClass should be ("org.SomeClass")
classpath should have length (1)
classpath(0) should endWith ("thejar.jar")
sysProps("spark.executor.memory") should be ("5g")
sysProps("spark.cores.max") should be ("5")
sysProps("spark.ui.enabled") should be ("false")
}
test("handles mesos client mode") {
val clArgs = Seq(
"--deploy-mode", "client",
"--master", "mesos://h:p",
"--executor-memory", "5g",
"--total-executor-cores", "5",
"--class", "org.SomeClass",
"--driver-memory", "4g",
"--conf", "spark.ui.enabled=false",
"thejar.jar",
"arg1", "arg2")
val appArgs = new SparkSubmitArguments(clArgs)
val (childArgs, classpath, sysProps, mainClass) = prepareSubmitEnvironment(appArgs)
childArgs.mkString(" ") should be ("arg1 arg2")
mainClass should be ("org.SomeClass")
classpath should have length (1)
classpath(0) should endWith ("thejar.jar")
sysProps("spark.executor.memory") should be ("5g")
sysProps("spark.cores.max") should be ("5")
sysProps("spark.ui.enabled") should be ("false")
}
test("handles confs with flag equivalents") {
val clArgs = Seq(
"--deploy-mode", "cluster",
"--executor-memory", "5g",
"--class", "org.SomeClass",
"--conf", "spark.executor.memory=4g",
"--conf", "spark.master=yarn",
"thejar.jar",
"arg1", "arg2")
val appArgs = new SparkSubmitArguments(clArgs)
val (_, _, sysProps, mainClass) = prepareSubmitEnvironment(appArgs)
sysProps("spark.executor.memory") should be ("5g")
sysProps("spark.master") should be ("yarn")
sysProps("spark.submit.deployMode") should be ("cluster")
mainClass should be ("org.apache.spark.deploy.yarn.Client")
}
test("launch simple application with spark-submit") {
val unusedJar = TestUtils.createJarWithClasses(Seq.empty)
val args = Seq(
"--class", SimpleApplicationTest.getClass.getName.stripSuffix("$"),
"--name", "testApp",
"--master", "local",
"--conf", "spark.ui.enabled=false",
"--conf", "spark.master.rest.enabled=false",
unusedJar.toString)
runSparkSubmit(args)
}
test("includes jars passed in through --jars") {
val unusedJar = TestUtils.createJarWithClasses(Seq.empty)
val jar1 = TestUtils.createJarWithClasses(Seq("SparkSubmitClassA"))
val jar2 = TestUtils.createJarWithClasses(Seq("SparkSubmitClassB"))
val jarsString = Seq(jar1, jar2).map(j => j.toString).mkString(",")
val args = Seq(
"--class", JarCreationTest.getClass.getName.stripSuffix("$"),
"--name", "testApp",
"--master", "local-cluster[2,1,1024]",
"--conf", "spark.ui.enabled=false",
"--conf", "spark.master.rest.enabled=false",
"--jars", jarsString,
unusedJar.toString, "SparkSubmitClassA", "SparkSubmitClassB")
runSparkSubmit(args)
}
// SPARK-7287
test("includes jars passed in through --packages") {
val unusedJar = TestUtils.createJarWithClasses(Seq.empty)
val main = MavenCoordinate("my.great.lib", "mylib", "0.1")
val dep = MavenCoordinate("my.great.dep", "mylib", "0.1")
IvyTestUtils.withRepository(main, Some(dep.toString), None) { repo =>
val args = Seq(
"--class", JarCreationTest.getClass.getName.stripSuffix("$"),
"--name", "testApp",
"--master", "local-cluster[2,1,1024]",
"--packages", Seq(main, dep).mkString(","),
"--repositories", repo,
"--conf", "spark.ui.enabled=false",
"--conf", "spark.master.rest.enabled=false",
unusedJar.toString,
"my.great.lib.MyLib", "my.great.dep.MyLib")
runSparkSubmit(args)
}
}
// TODO(SPARK-9603): Building a package is flaky on Jenkins Maven builds.
// See https://gist.github.com/shivaram/3a2fecce60768a603dac for a error log
ignore("correctly builds R packages included in a jar with --packages") {
assume(RUtils.isRInstalled, "R isn't installed on this machine.")
// Check if the SparkR package is installed
assume(RUtils.isSparkRInstalled, "SparkR is not installed in this build.")
val main = MavenCoordinate("my.great.lib", "mylib", "0.1")
val sparkHome = sys.props.getOrElse("spark.test.home", fail("spark.test.home is not set!"))
val rScriptDir =
Seq(sparkHome, "R", "pkg", "inst", "tests", "packageInAJarTest.R").mkString(File.separator)
assert(new File(rScriptDir).exists)
IvyTestUtils.withRepository(main, None, None, withR = true) { repo =>
val args = Seq(
"--name", "testApp",
"--master", "local-cluster[2,1,1024]",
"--packages", main.toString,
"--repositories", repo,
"--verbose",
"--conf", "spark.ui.enabled=false",
rScriptDir)
runSparkSubmit(args)
}
}
test("include an external JAR in SparkR") {
assume(RUtils.isRInstalled, "R isn't installed on this machine.")
val sparkHome = sys.props.getOrElse("spark.test.home", fail("spark.test.home is not set!"))
// Check if the SparkR package is installed
assume(RUtils.isSparkRInstalled, "SparkR is not installed in this build.")
val rScriptDir =
Seq(sparkHome, "R", "pkg", "inst", "tests", "testthat", "jarTest.R").mkString(File.separator)
assert(new File(rScriptDir).exists)
// compile a small jar containing a class that will be called from R code.
val tempDir = Utils.createTempDir()
val srcDir = new File(tempDir, "sparkrtest")
srcDir.mkdirs()
val excSource = new JavaSourceFromString(new File(srcDir, "DummyClass").getAbsolutePath,
"""package sparkrtest;
|
|public class DummyClass implements java.io.Serializable {
| public static String helloWorld(String arg) { return "Hello " + arg; }
| public static int addStuff(int arg1, int arg2) { return arg1 + arg2; }
|}
""".stripMargin)
val excFile = TestUtils.createCompiledClass("DummyClass", srcDir, excSource, Seq.empty)
val jarFile = new File(tempDir, "sparkRTestJar-%s.jar".format(System.currentTimeMillis()))
val jarURL = TestUtils.createJar(Seq(excFile), jarFile, directoryPrefix = Some("sparkrtest"))
val args = Seq(
"--name", "testApp",
"--master", "local",
"--jars", jarURL.toString,
"--verbose",
"--conf", "spark.ui.enabled=false",
rScriptDir)
runSparkSubmit(args)
}
test("resolves command line argument paths correctly") {
val jars = "/jar1,/jar2" // --jars
val files = "hdfs:/file1,file2" // --files
val archives = "file:/archive1,archive2" // --archives
val pyFiles = "py-file1,py-file2" // --py-files
// Test jars and files
val clArgs = Seq(
"--master", "local",
"--class", "org.SomeClass",
"--jars", jars,
"--files", files,
"thejar.jar")
val appArgs = new SparkSubmitArguments(clArgs)
val sysProps = SparkSubmit.prepareSubmitEnvironment(appArgs)._3
appArgs.jars should be (Utils.resolveURIs(jars))
appArgs.files should be (Utils.resolveURIs(files))
sysProps("spark.jars") should be (Utils.resolveURIs(jars + ",thejar.jar"))
sysProps("spark.files") should be (Utils.resolveURIs(files))
// Test files and archives (Yarn)
val clArgs2 = Seq(
"--master", "yarn",
"--class", "org.SomeClass",
"--files", files,
"--archives", archives,
"thejar.jar"
)
val appArgs2 = new SparkSubmitArguments(clArgs2)
val sysProps2 = SparkSubmit.prepareSubmitEnvironment(appArgs2)._3
appArgs2.files should be (Utils.resolveURIs(files))
appArgs2.archives should be (Utils.resolveURIs(archives))
sysProps2("spark.yarn.dist.files") should be (Utils.resolveURIs(files))
sysProps2("spark.yarn.dist.archives") should be (Utils.resolveURIs(archives))
// Test python files
val clArgs3 = Seq(
"--master", "local",
"--py-files", pyFiles,
"--conf", "spark.pyspark.driver.python=python3.4",
"--conf", "spark.pyspark.python=python3.5",
"mister.py"
)
val appArgs3 = new SparkSubmitArguments(clArgs3)
val sysProps3 = SparkSubmit.prepareSubmitEnvironment(appArgs3)._3
appArgs3.pyFiles should be (Utils.resolveURIs(pyFiles))
sysProps3("spark.submit.pyFiles") should be (
PythonRunner.formatPaths(Utils.resolveURIs(pyFiles)).mkString(","))
sysProps3(PYSPARK_DRIVER_PYTHON.key) should be ("python3.4")
sysProps3(PYSPARK_PYTHON.key) should be ("python3.5")
}
test("resolves config paths correctly") {
val jars = "/jar1,/jar2" // spark.jars
val files = "hdfs:/file1,file2" // spark.files / spark.yarn.dist.files
val archives = "file:/archive1,archive2" // spark.yarn.dist.archives
val pyFiles = "py-file1,py-file2" // spark.submit.pyFiles
val tmpDir = Utils.createTempDir()
// Test jars and files
val f1 = File.createTempFile("test-submit-jars-files", "", tmpDir)
val writer1 = new PrintWriter(f1)
writer1.println("spark.jars " + jars)
writer1.println("spark.files " + files)
writer1.close()
val clArgs = Seq(
"--master", "local",
"--class", "org.SomeClass",
"--properties-file", f1.getPath,
"thejar.jar"
)
val appArgs = new SparkSubmitArguments(clArgs)
val sysProps = SparkSubmit.prepareSubmitEnvironment(appArgs)._3
sysProps("spark.jars") should be(Utils.resolveURIs(jars + ",thejar.jar"))
sysProps("spark.files") should be(Utils.resolveURIs(files))
// Test files and archives (Yarn)
val f2 = File.createTempFile("test-submit-files-archives", "", tmpDir)
val writer2 = new PrintWriter(f2)
writer2.println("spark.yarn.dist.files " + files)
writer2.println("spark.yarn.dist.archives " + archives)
writer2.close()
val clArgs2 = Seq(
"--master", "yarn",
"--class", "org.SomeClass",
"--properties-file", f2.getPath,
"thejar.jar"
)
val appArgs2 = new SparkSubmitArguments(clArgs2)
val sysProps2 = SparkSubmit.prepareSubmitEnvironment(appArgs2)._3
sysProps2("spark.yarn.dist.files") should be(Utils.resolveURIs(files))
sysProps2("spark.yarn.dist.archives") should be(Utils.resolveURIs(archives))
// Test python files
val f3 = File.createTempFile("test-submit-python-files", "", tmpDir)
val writer3 = new PrintWriter(f3)
writer3.println("spark.submit.pyFiles " + pyFiles)
writer3.close()
val clArgs3 = Seq(
"--master", "local",
"--properties-file", f3.getPath,
"mister.py"
)
val appArgs3 = new SparkSubmitArguments(clArgs3)
val sysProps3 = SparkSubmit.prepareSubmitEnvironment(appArgs3)._3
sysProps3("spark.submit.pyFiles") should be(
PythonRunner.formatPaths(Utils.resolveURIs(pyFiles)).mkString(","))
// Test remote python files
val f4 = File.createTempFile("test-submit-remote-python-files", "", tmpDir)
val writer4 = new PrintWriter(f4)
val remotePyFiles = "hdfs:///tmp/file1.py,hdfs:///tmp/file2.py"
writer4.println("spark.submit.pyFiles " + remotePyFiles)
writer4.close()
val clArgs4 = Seq(
"--master", "yarn",
"--deploy-mode", "cluster",
"--properties-file", f4.getPath,
"hdfs:///tmp/mister.py"
)
val appArgs4 = new SparkSubmitArguments(clArgs4)
val sysProps4 = SparkSubmit.prepareSubmitEnvironment(appArgs4)._3
// Should not format python path for yarn cluster mode
sysProps4("spark.submit.pyFiles") should be(
Utils.resolveURIs(remotePyFiles)
)
}
test("user classpath first in driver") {
val systemJar = TestUtils.createJarWithFiles(Map("test.resource" -> "SYSTEM"))
val userJar = TestUtils.createJarWithFiles(Map("test.resource" -> "USER"))
val args = Seq(
"--class", UserClasspathFirstTest.getClass.getName.stripSuffix("$"),
"--name", "testApp",
"--master", "local",
"--conf", "spark.driver.extraClassPath=" + systemJar,
"--conf", "spark.driver.userClassPathFirst=true",
"--conf", "spark.ui.enabled=false",
"--conf", "spark.master.rest.enabled=false",
userJar.toString)
runSparkSubmit(args)
}
test("SPARK_CONF_DIR overrides spark-defaults.conf") {
forConfDir(Map("spark.executor.memory" -> "2.3g")) { path =>
val unusedJar = TestUtils.createJarWithClasses(Seq.empty)
val args = Seq(
"--class", SimpleApplicationTest.getClass.getName.stripSuffix("$"),
"--name", "testApp",
"--master", "local",
unusedJar.toString)
val appArgs = new SparkSubmitArguments(args, Map("SPARK_CONF_DIR" -> path))
assert(appArgs.propertiesFile != null)
assert(appArgs.propertiesFile.startsWith(path))
appArgs.executorMemory should be ("2.3g")
}
}
test("comma separated list of files are unioned correctly") {
val left = Option("/tmp/a.jar,/tmp/b.jar")
val right = Option("/tmp/c.jar,/tmp/a.jar")
val emptyString = Option("")
Utils.unionFileLists(left, right) should be (Set("/tmp/a.jar", "/tmp/b.jar", "/tmp/c.jar"))
Utils.unionFileLists(emptyString, emptyString) should be (Set.empty)
Utils.unionFileLists(Option("/tmp/a.jar"), emptyString) should be (Set("/tmp/a.jar"))
Utils.unionFileLists(emptyString, Option("/tmp/a.jar")) should be (Set("/tmp/a.jar"))
Utils.unionFileLists(None, Option("/tmp/a.jar")) should be (Set("/tmp/a.jar"))
Utils.unionFileLists(Option("/tmp/a.jar"), None) should be (Set("/tmp/a.jar"))
}
// scalastyle:on println
// NOTE: This is an expensive operation in terms of time (10 seconds+). Use sparingly.
private def runSparkSubmit(args: Seq[String]): Unit = {
val sparkHome = sys.props.getOrElse("spark.test.home", fail("spark.test.home is not set!"))
val sparkSubmitFile = if (Utils.isWindows) {
new File(s"$sparkHome\\\\bin\\\\spark-submit.cmd")
} else {
new File(s"$sparkHome/bin/spark-submit")
}
val process = Utils.executeCommand(
Seq(sparkSubmitFile.getCanonicalPath) ++ args,
new File(sparkHome),
Map("SPARK_TESTING" -> "1", "SPARK_HOME" -> sparkHome))
try {
val exitCode = failAfter(60 seconds) { process.waitFor() }
if (exitCode != 0) {
fail(s"Process returned with exit code $exitCode. See the log4j logs for more detail.")
}
} finally {
// Ensure we still kill the process in case it timed out
process.destroy()
}
}
private def forConfDir(defaults: Map[String, String]) (f: String => Unit) = {
val tmpDir = Utils.createTempDir()
val defaultsConf = new File(tmpDir.getAbsolutePath, "spark-defaults.conf")
val writer = new OutputStreamWriter(new FileOutputStream(defaultsConf), StandardCharsets.UTF_8)
for ((key, value) <- defaults) writer.write(s"$key $value\\n")
writer.close()
try {
f(tmpDir.getAbsolutePath)
} finally {
Utils.deleteRecursively(tmpDir)
}
}
}
object JarCreationTest extends Logging {
def main(args: Array[String]) {
Utils.configTestLog4j("INFO")
val conf = new SparkConf()
val sc = new SparkContext(conf)
val result = sc.makeRDD(1 to 100, 10).mapPartitions { x =>
var exception: String = null
try {
Utils.classForName(args(0))
Utils.classForName(args(1))
} catch {
case t: Throwable =>
exception = t + "\\n" + Utils.exceptionString(t)
exception = exception.replaceAll("\\n", "\\n\\t")
}
Option(exception).toSeq.iterator
}.collect()
if (result.nonEmpty) {
throw new Exception("Could not load user class from jar:\\n" + result(0))
}
sc.stop()
}
}
object SimpleApplicationTest {
def main(args: Array[String]) {
Utils.configTestLog4j("INFO")
val conf = new SparkConf()
val sc = new SparkContext(conf)
val configs = Seq("spark.master", "spark.app.name")
for (config <- configs) {
val masterValue = conf.get(config)
val executorValues = sc
.makeRDD(1 to 100, 10)
.map(x => SparkEnv.get.conf.get(config))
.collect()
.distinct
if (executorValues.size != 1) {
throw new SparkException(s"Inconsistent values for $config: $executorValues")
}
val executorValue = executorValues(0)
if (executorValue != masterValue) {
throw new SparkException(
s"Master had $config=$masterValue but executor had $config=$executorValue")
}
}
sc.stop()
}
}
object UserClasspathFirstTest {
def main(args: Array[String]) {
val ccl = Thread.currentThread().getContextClassLoader()
val resource = ccl.getResourceAsStream("test.resource")
val bytes = ByteStreams.toByteArray(resource)
val contents = new String(bytes, 0, bytes.length, StandardCharsets.UTF_8)
if (contents != "USER") {
throw new SparkException("Should have read user resource, but instead read: " + contents)
}
}
}
| big-pegasus/spark | core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala | Scala | apache-2.0 | 29,020 |
package io.scalac.frees.login.obsolete
import freestyle.free
import io.scalac.frees.login.algebras.GitHubData
import io.scalac.frees.login.types._
sealed trait UserInsertionResult
case class UserInserted(id: UserId) extends UserInsertionResult
case object AlreadyExists extends UserInsertionResult
case class DBFailure(err: Throwable) extends UserInsertionResult
/**
* Free algebra for user login and registration.
* It was replaced by `DoobiePersistence` algebra build on top of
* `freestyle.doobie.DoobieM` for low level operations.
*/
@free trait Database {
def insertCredentialsUser(email: UserEmail, hash: PasswordHash): FS[UserInsertionResult]
def insertGitHubUser(ghData: GitHubData): FS[UserInsertionResult]
def getUserByEmail(email: UserEmail): FS[Option[UserId]]
def getPassword(email: UserEmail): FS[Option[PasswordHash]]
def getUserByGitHubId(ghId: GitHubId): FS[Option[UserId]]
}
| LGLO/freestyle-login | src/main/scala/io/scalac/frees/login/obsolete/Database.scala | Scala | apache-2.0 | 922 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.mandar2812.dynaml.models
import breeze.linalg.{DenseMatrix, norm, DenseVector}
import com.tinkerpop.blueprints.Graph
import com.tinkerpop.frames.FramedGraph
import org.apache.log4j.{Logger, Priority}
import io.github.mandar2812.dynaml.graphutils.{Parameter, CausalEdge, Point}
import io.github.mandar2812.dynaml.kernels.{RBFKernel, SVMKernel, GaussianDensityKernel}
import io.github.mandar2812.dynaml.optimization.ConjugateGradient
import io.github.mandar2812.dynaml.prototype.{QuadraticRenyiEntropy, GreedyEntropySelector}
import io.github.mandar2812.dynaml.utils
import scala.collection.JavaConversions
import scala.collection.mutable
import scala.util.Random
/**
* Abstract class implementing kernel feature map
* extraction functions.
*/
abstract class KernelBayesianModel(implicit override protected val task: String) extends
KernelizedModel[FramedGraph[Graph], Iterable[CausalEdge],
DenseVector[Double], DenseVector[Double], Double, Int, Int](task) {
protected val logger = Logger.getLogger(this.getClass)
override protected val optimizer: ConjugateGradient
def setRegParam(reg: Double): this.type = {
this.optimizer.setRegParam(reg)
this
}
def getRegParam: Double
override protected var hyper_parameters: List[String] = List("RegParam")
override protected var current_state: Map[String, Double] = Map("RegParam" -> 1.0)
protected val featuredims: Int
protected val vertexMaps: (mutable.HashMap[String, AnyRef],
mutable.HashMap[Long, AnyRef],
mutable.HashMap[Long, AnyRef])
protected val edgeMaps: (mutable.HashMap[Long, AnyRef],
mutable.HashMap[Long, AnyRef])
override def learn(): Unit = {
this.params = optimizer.optimize(nPoints, this.getXYEdges(), this.params)
}
override def getXYEdges() =
JavaConversions.iterableAsScalaIterable(
this.g.getEdges("relation", "causal", classOf[CausalEdge])
)
/**
* Get a subset of the data set defined
* as a filter operation on the raw data set.
*
* @param fn A function which takes a data point
* and returns a boolean value.
* @return The list containing all the data points
* satisfying the filtering criterion.
* */
def filter(fn : (Long) => Boolean): List[DenseVector[Double]] =
(1L to nPoints).view.filter(fn).map{
i => {
val point: Point = this.g.getVertex(vertexMaps._2(i),
classOf[Point])
DenseVector(point.getValue())(0 to -2)
}
}.toList
def filterLabels(fn: (Long) => Boolean): List[Double] = this.getXYEdges()
.map(_.getLabel().getValue()).toList
override def optimumSubset(M: Int): Unit = {
points = (0L to this.npoints - 1).toList
if(M < this.npoints) {
logger.info("Calculating sample variance of the data set")
//Get the original features of the data
//Calculate the column means and variances
val (mean, variance) = utils.getStats(this.filter((_) => true))
//Use the adjusted value of the variance
val adjvarance:DenseVector[Double] = variance :/= (npoints.toDouble - 1)
val density = new GaussianDensityKernel
logger.info("Using Silvermans rule of thumb to set bandwidth of density kernel")
logger.info("Std Deviation of the data: "+adjvarance.toString())
logger.info("norm: "+norm(adjvarance))
density.setBandwidth(DenseVector.tabulate[Double](featuredims - 1){
i => 1.06*math.sqrt(adjvarance(i))/math.pow(npoints, 0.2)
})
logger.info("Building low rank approximation to kernel matrix")
points = GreedyEntropySelector.subsetSelection(this,
M,
new QuadraticRenyiEntropy(density),
0.000001,
100)
}
}
/**
* Apply the feature map calculated by
* the using the Kernel to the data points
* and store the mapped features in the respective
* data nodes.
* */
override def applyFeatureMap(): Unit = {
logger.info("Applying Feature map to data set")
val edges = this.getXYEdges()
val pnode:Parameter = this.g.getVertex(this.vertexMaps._1("w"),
classOf[Parameter])
pnode.setSlope(this.params.toArray)
edges.foreach((edge) => {
//Get the predictor vertex corresponding to the edge
val vertex: Point = edge.getPoint()
//Get the original features of the point
val featurex = DenseVector(vertex.getValue())
//Get mapped features for the point
val mappedf = featureMap(featurex(0 to -2))
val newFeatures = DenseVector.vertcat[Double](mappedf, DenseVector(1.0))
//Set a new property in the vertex corresponding to the mapped features
vertex.setFeatureMap(newFeatures.toArray)
})
logger.info("DONE: Applying Feature map to data set")
}
override def applyKernel(
kernel: SVMKernel[DenseMatrix[Double]],
M: Int = math.sqrt(npoints).toInt): Unit = {
if(M != this.points.length) {
this.optimumSubset(M)
}
val features_of_points = this.filter(p => this.points.contains(p))
val kernelMatrix =
kernel.buildKernelMatrix(features_of_points, M)
val decomposition = kernelMatrix.eigenDecomposition(M)
this.featureMap = kernel.featureMapping(decomposition)(features_of_points)
this.params = DenseVector.ones[Double](decomposition._1.length + 1)
this.applyFeatureMap()
}
/**
* Override the effect of appyling a kernel
* and return the model back to its default
* state i.e. the Identity Kernel
* */
override def clearParameters(): Unit = {
this.params = DenseVector.ones[Double](this.featuredims)
this.featureMap = identity
val it = this.getXYEdges()
it.foreach((outEdge) => {
val ynode = outEdge.getLabel()
val xnode = outEdge.getPoint()
xnode.setFeatureMap(xnode.getValue())
})
val paramNode: Parameter = this.g.getVertex(vertexMaps._1("w"),
classOf[Parameter])
paramNode.setSlope(this.params.toArray)
}
override def initParams() = DenseVector.ones[Double](this.points.length+1)
override def trainTest(test: List[Long]) = {
val training_data = (1L to this.npoints).filter(!test.contains(_))
.map((p) => {
val ed: CausalEdge = this.g.getEdge(this.edgeMaps._1(p),
classOf[CausalEdge])
ed
}).view.toIterable
val test_data = test.map((p) => {
val ed: CausalEdge = this.g.getEdge(this.edgeMaps._1(p),
classOf[CausalEdge])
ed
}).view.toIterable
(training_data, test_data)
}
override def crossvalidate(folds: Int = 10, reg: Double = 0.001): (Double, Double, Double) = {
//Create the folds as lists of integers
//which index the data points
this.optimizer.setRegParam(reg).setNumIterations(1)
.setStepSize(0.001).setMiniBatchFraction(1.0)
val shuffle = Random.shuffle((1L to this.npoints).toList)
val avg_metrics: DenseVector[Double] = (1 to folds).map{a =>
//For the ath fold
//partition the data
//ceil(a-1*npoints/folds) -- ceil(a*npoints/folds)
//as test and the rest as training
val test = shuffle.slice((a-1)*this.nPoints.toInt/folds, a*this.nPoints.toInt/folds)
val(training_data, test_data) = this.trainTest(test)
val tempparams = this.optimizer.optimize((folds - 1 / folds) * this.npoints,
training_data,
DenseVector.ones[Double](this.params.length))
val metrics = this.evaluateFold(tempparams)(test_data)(this.task)
val res: DenseVector[Double] = metrics.kpi() / folds.toDouble
res
}.reduce(_+_)
//run batch sgd on each fold
//and test
(avg_metrics(0),
avg_metrics(1),
avg_metrics(2))
}
def tuneRBFKernel(prot: Int = math.sqrt(this.nPoints.toDouble).toInt,
folds: Int,
task: String = this.task): Unit = {
//Generate a grid of sigma values
//val (samplemean, samplevariance) = utils.getStats(this.filter(_ => true))
logger.info("Calculating grid for gamma values")
//samplevariance :*= 1.0/(this.npoints.toDouble - 1)
//val sigma = norm(samplevariance, 2)
val sigmagrid = List.tabulate(30)((i) => (i+1).toDouble/10.0)
val gammagrid = List.tabulate(30)((i) => i.toDouble/10.0)
val grid = (for{s <- sigmagrid; g <- gammagrid} yield (s,g)).groupBy((c) => c._1).map((hyper) => {
this.applyKernel(new RBFKernel(hyper._1), prot)
hyper._2.map((sigmaAndGamma) => {
logger.info("sigma = "+sigmaAndGamma._1+" gamma = "+sigmaAndGamma._2)
val (a, b, c) = this.crossvalidate(folds, sigmaAndGamma._2)
(c, sigmaAndGamma)
})
}).flatten
logger.info("Grid: \\n"+grid.toList)
val maximum = grid.max
logger.log(Priority.INFO, "Best value: "+maximum)
this.applyKernel(new RBFKernel(maximum._2._1), prot)
this.setRegParam(maximum._2._2).setMaxIterations(10).setBatchFraction(1.0)
this.learn()
}
} | mandar2812/bayeslearn | src/main/scala/io/github/mandar2812/dynaml/models/KernelBayesianModel.scala | Scala | apache-2.0 | 9,664 |
package jk_5.nailed.event
/**
* No description given
*
* @author jk-5
*/
class NailedEvent {
private var canceled = false
@inline def isCancelable = false
@inline final def isCanceled = this.canceled
@inline final def setCanceled(cancel: Boolean) = this.canceled = cancel
}
| nailed/nailed-legacy | src/main/scala/jk_5/nailed/event/NailedEvent.scala | Scala | unlicense | 287 |
/*
* SPDX-License-Identifier: Apache-2.0
* Copyright 2016-2020 Daniel Urban and contributors listed in NOTICE.txt
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package dev.tauri.choam
package bench
package util
import scala.concurrent.stm._
class StmStack[A](els: Iterable[A]) {
def this() =
this(Iterable.empty)
private[this] val head: Ref[TsList[A]] =
Ref(TsList.End)
atomic { implicit txn =>
els.foreach(push)
}
def push(a: A)(implicit mt: MaybeTxn): Unit = atomic { implicit txn =>
head.set(TsList.Cons(a, head.get))
}
def tryPop()(implicit mt: MaybeTxn): Option[A] = atomic { implicit txn =>
head.get match {
case TsList.End =>
None
case TsList.Cons(h, t) =>
head.set(t)
Some(h)
}
}
private[bench] def unsafeToList()(implicit mt: MaybeTxn): List[A] = atomic { implicit txn =>
head.get.toList
}
}
| durban/exp-reagents | bench/src/main/scala/dev/tauri/choam/bench/util/StmStack.scala | Scala | apache-2.0 | 1,412 |
package edu.rice.habanero.benchmarks.banking
import edu.rice.habanero.actors.{GparsActor, GparsActorState, GparsPool}
import edu.rice.habanero.benchmarks.banking.BankingConfig._
import edu.rice.habanero.benchmarks.{Benchmark, BenchmarkRunner}
import scala.collection.mutable.ListBuffer
import scala.util.Random
/**
*
* @author <a href="http://shams.web.rice.edu/">Shams Imam</a> ([email protected])
*/
object BankingGparsManualStashActorBenchmark {
def main(args: Array[String]) {
BenchmarkRunner.runBenchmark(args, new BankingGparsManualStashActorBenchmark)
}
private final class BankingGparsManualStashActorBenchmark extends Benchmark {
def initialize(args: Array[String]) {
BankingConfig.parseArgs(args)
}
def printArgInfo() {
BankingConfig.printArgs()
}
def runIteration() {
val master = new Teller(BankingConfig.A, BankingConfig.N)
master.start()
master.send(StartMessage.ONLY)
GparsActorState.awaitTermination()
}
def cleanupIteration(lastIteration: Boolean, execTimeMillis: Double) {
if (lastIteration) {
GparsPool.shutdown()
}
}
}
protected class Teller(numAccounts: Int, numBankings: Int) extends GparsActor[AnyRef] {
private val self = this
private val accounts = Array.tabulate[Account](numAccounts)((i) => {
new Account(i, BankingConfig.INITIAL_BALANCE)
})
private var numCompletedBankings = 0
private val randomGen = new Random(123456)
protected override def onPostStart() {
accounts.foreach(loopAccount => loopAccount.start())
}
override def process(theMsg: AnyRef) {
theMsg match {
case sm: BankingConfig.StartMessage =>
var m = 0
while (m < numBankings) {
generateWork()
m += 1
}
case sm: BankingConfig.ReplyMessage =>
numCompletedBankings += 1
if (numCompletedBankings == numBankings) {
accounts.foreach(loopAccount => loopAccount.send(StopMessage.ONLY))
exit()
}
case message =>
val ex = new IllegalArgumentException("Unsupported message: " + message)
ex.printStackTrace(System.err)
}
}
def generateWork(): Unit = {
// src is lower than dest id to ensure there is never a deadlock
val srcAccountId = randomGen.nextInt((accounts.length / 10) * 8)
var loopId = randomGen.nextInt(accounts.length - srcAccountId)
if (loopId == 0) {
loopId += 1
}
val destAccountId = srcAccountId + loopId
val srcAccount = accounts(srcAccountId)
val destAccount = accounts(destAccountId)
val amount = Math.abs(randomGen.nextDouble()) * 1000
val sender = self
val cm = new CreditMessage(sender, amount, destAccount)
srcAccount.send(cm)
}
}
protected class Account(id: Int, var balance: Double) extends GparsActor[AnyRef] {
private var inReplyMode = false
private var replyTeller: GparsActor[AnyRef] = null
private val stashedMessages = new ListBuffer[AnyRef]()
override def process(theMsg: AnyRef) {
if (inReplyMode) {
theMsg match {
case _: ReplyMessage =>
inReplyMode = false
replyTeller.send(ReplyMessage.ONLY)
if (!stashedMessages.isEmpty) {
val newMsg = stashedMessages.remove(0)
this.send(newMsg)
}
case message =>
stashedMessages.append(message)
}
} else {
// process the message
theMsg match {
case dm: DebitMessage =>
balance += dm.amount
val creditor = dm.sender.asInstanceOf[GparsActor[AnyRef]]
creditor.send(ReplyMessage.ONLY)
case cm: CreditMessage =>
balance -= cm.amount
replyTeller = cm.sender.asInstanceOf[GparsActor[AnyRef]]
val sender = this
val destAccount = cm.recipient.asInstanceOf[Account]
destAccount.send(new DebitMessage(sender, cm.amount))
inReplyMode = true
case _: StopMessage =>
exit()
case message =>
val ex = new IllegalArgumentException("Unsupported message: " + message)
ex.printStackTrace(System.err)
}
// recycle stashed messages
if (!inReplyMode && !stashedMessages.isEmpty) {
val newMsg = stashedMessages.remove(0)
this.send(newMsg)
}
}
}
}
}
| smarr/savina | src/main/scala/edu/rice/habanero/benchmarks/banking/BankingGparsManualStashActorBenchmark.scala | Scala | gpl-2.0 | 4,544 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.jobmanager
import java.io.IOException
import java.net.{InetAddress, InetSocketAddress}
import java.util.concurrent.TimeUnit
import java.util.concurrent.atomic.{AtomicBoolean, AtomicReference}
import org.apache.flink.configuration.{AkkaOptions, ConfigConstants, Configuration}
import org.apache.flink.runtime.akka.AkkaUtils
import org.apache.flink.util.NetUtils
import org.junit.Assert._
import org.junit.Test
import scala.concurrent.duration.Duration
/**
* Tests that a lookup of a local JobManager fails within a given timeout if the JobManager
* actor is not reachable.
*/
class JobManagerConnectionTest {
private val timeout = 1000
@Test
def testResolveUnreachableActorLocalHost() : Unit = {
// startup a test actor system listening at an arbitrary address
val actorSystem = AkkaUtils.createActorSystem(new Configuration(), Some(("", 0)))
try {
// get a port that we know is unoccupied
val freePort = try {
NetUtils.getAvailablePort()
} catch {
// abort the test if we cannot find a free port
case _ : Throwable => return
}
val endpoint = NetUtils.unresolvedHostAndPortToNormalizedString("127.0.0.1", freePort)
val config = createConfigWithLowTimeout()
mustReturnWithinTimeout(Duration(5*timeout, TimeUnit.MILLISECONDS)) {
() => {
try {
AkkaUtils.getActorRef(
endpoint,
actorSystem,
AkkaUtils.getLookupTimeout(config))
fail("Should fail since the JobManager is not reachable")
}
catch {
case e: IOException => // good
}
}
}
}
catch {
case e: Exception =>
e.printStackTrace()
fail(e.getMessage)
}
finally {
actorSystem.shutdown()
}
}
/**
* Tests that a lookup of a local JobManager fails within a given timeout if the JobManager
* actor is not reachable.
*/
@Test
def testResolveUnreachableActorRemoteHost() : Unit = {
// startup a test actor system listening at an arbitrary address
val actorSystem = AkkaUtils.createActorSystem(new Configuration(), Some(("", 0)))
try {
// some address that is not running a JobManager
val endpoint = NetUtils.unresolvedHostAndPortToNormalizedString("10.254.254.254", 2)
val config = createConfigWithLowTimeout()
mustReturnWithinTimeout(Duration(5*timeout, TimeUnit.MILLISECONDS)) {
() => {
try {
AkkaUtils.getActorRef(
endpoint,
actorSystem,
AkkaUtils.getLookupTimeout(config))
fail("Should fail since the JobManager is not reachable")
}
catch {
case e: IOException => // good
}
}
}
}
catch {
case e: Exception =>
e.printStackTrace()
fail(e.getMessage)
}
finally {
actorSystem.shutdown()
}
}
private def createConfigWithLowTimeout() : Configuration = {
val config = new Configuration()
config.setString(AkkaOptions.LOOKUP_TIMEOUT,
Duration(timeout, TimeUnit.MILLISECONDS).toSeconds + " s")
config
}
private def mustReturnWithinTimeout(timeout: Duration)(task: () => Unit) : Unit = {
val done = new AtomicBoolean()
val error = new AtomicReference[Throwable]()
val runnable = new Runnable {
override def run(): Unit = {
try {
task()
done.set(true)
}
catch {
case t: Throwable => error.set(t)
}
done.synchronized {
done.notifyAll()
}
}
}
val runner = new Thread(runnable, "Test runner")
runner.setDaemon(true)
var now = System.currentTimeMillis()
val deadline = now + timeout.toMillis
runner.start()
done.synchronized {
while (error.get() == null && !done.get() && now < deadline) {
done.wait(deadline - now)
now = System.currentTimeMillis()
}
}
if (error.get() != null) {
error.get().printStackTrace()
fail("Exception in the timed call: " + error.get().getMessage())
}
// check if we finished because we were done
// otherwise it is a timeout
if (!done.get()) {
runner.interrupt()
fail("Call did not finish within " + timeout)
}
}
}
| WangTaoTheTonic/flink | flink-runtime/src/test/scala/org/apache/flink/runtime/jobmanager/JobManagerConnectionTest.scala | Scala | apache-2.0 | 5,210 |
package akka.persistence
import akka.actor._
import scalaz._
import Scalaz._
import streamz.akka.persistence.Event
class EventReader(pid: String, from: Long) extends Actor {
import EventReader._
import BufferedView._
val view = context.actorOf(Props(new BufferedView(pid, BufferedViewSettings(fromSequenceNr = from), self)))
var callback: Option[Throwable \/ Event[Any] => Unit] = None
def receive = {
case Response(ps) => for {
p <- ps.headOption
cb <- callback
} {
cb(p.right)
callback = None
}
case Read(cb) =>
callback = Some(cb)
view ! Request(1)
}
}
object EventReader {
case class Read(callback: Throwable \/ Event[Any] => Unit)
}
| Astrac/streamz | streamz-akka-persistence/src/main/scala/akka/persistence/EventReader.scala | Scala | apache-2.0 | 711 |
package co.technius
import scala.language.experimental.macros
package object scalua {
implicit object IntConverter extends LuaConverter[Int] {
override def toJava(value: LuaValue): Option[Int] = value match {
case LuaInt(i) => Some(i)
case _ => None
}
override def toLua(value: Int): LuaValue = LuaInt(value)
}
implicit object FloatConverter extends LuaConverter[Float] {
override def toJava(value: LuaValue): Option[Float] = value match {
case LuaInt(i) => Some(i.toFloat)
case LuaDouble(d) => Some(d.toFloat)
case _ => None
}
override def toLua(value: Float): LuaValue = LuaDouble(value.toFloat)
}
implicit object DoubleConverter extends LuaConverter[Double] {
override def toJava(value: LuaValue): Option[Double] = value match {
case LuaInt(i) => Some(i.toDouble)
case LuaDouble(d) => Some(d)
case _ => None
}
override def toLua(value: Double): LuaValue = LuaDouble(value)
}
implicit object StringConverter extends LuaConverter[String] {
override def toJava(value: LuaValue): Option[String] = value match {
case LuaString(s) => Some(s)
case _ => None
}
override def toLua(value: String): LuaValue = LuaString(value)
}
implicit object BooleanConverter extends LuaConverter[Boolean] {
override def toJava(value: LuaValue): Option[Boolean] = value match {
case LuaBoolean(b) => Some(b)
case _ => None
}
override def toLua(value: Boolean): LuaValue = LuaBoolean(value)
}
implicit def luaValueConverter[T <: LuaValue]: LuaConverter[T] =
macro MacroImpl.luaValueConverterImpl[T]
}
| Technius/scalua | src/main/scala/co/technius/scalua/package.scala | Scala | mit | 1,652 |
/*
* Copyright (c) 2014-2020 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.reactive.consumers
import minitest.TestSuite
import monix.execution.Callback
import monix.eval.Task
import monix.execution.Ack.Continue
import monix.execution.Cancelable
import monix.execution.atomic.Atomic
import monix.execution.exceptions.DummyException
import monix.execution.schedulers.TestScheduler
import monix.reactive.{Consumer, Observable}
import scala.concurrent.Promise
import scala.util.{Failure, Success}
object ForeachParallelAsyncConsumerSuite extends TestSuite[TestScheduler] {
def setup(): TestScheduler = TestScheduler()
def tearDown(s: TestScheduler): Unit = {
assert(s.state.tasks.isEmpty, "TestScheduler should have no pending tasks")
}
test("should sum a long stream") { implicit s =>
val count = 10000L
val obs = Observable.range(0, count)
val sum = Atomic(0L)
val f = obs
.consumeWith(
Consumer
.foreachParallelTask(10)(x => Task.evalAsync(sum.add(x))))
.runToFuture
s.tick()
assertEquals(f.value, Some(Success(())))
assertEquals(sum.get(), count * (count - 1) / 2)
}
test("should interrupt with error") { implicit s =>
val ex = DummyException("dummy")
val obs = Observable.range(0, 10000).endWithError(ex)
val sum = Atomic(0L)
val f = obs
.consumeWith(
Consumer
.foreachParallelTask(10)(x => Task.evalAsync(sum.add(x))))
.runToFuture
s.tick()
assertEquals(f.value, Some(Failure(ex)))
}
test("should protect against user error") { implicit s =>
val ex = DummyException("dummy")
var mainWasCanceled = false
val consumer = Consumer.foreachParallelTask[Int](10)(x => throw ex)
val onFinish = Promise[Unit]()
val (out, c) = consumer.createSubscriber(Callback.fromPromise(onFinish), s)
c := Cancelable { () =>
mainWasCanceled = true
}
s.tick()
assertEquals(out.onNext(1), Continue)
s.tick()
assert(mainWasCanceled, "mainWasCanceled")
assertEquals(onFinish.future.value, Some(Failure(ex)))
}
}
| alexandru/monifu | monix-reactive/shared/src/test/scala/monix/reactive/consumers/ForeachParallelAsyncConsumerSuite.scala | Scala | apache-2.0 | 2,700 |
package breeze.util
import javax.management.MBeanServer;
import java.lang.management.ManagementFactory;
import java.io.File
;
/**
*
* Outputs information about the heap
*
* http://blogs.sun.com/sundararajan/entry/programmatically_dumping_heap_from_java
* @author dlwh
*/
object HeapDump {
// This is the name of the HotSpot Diagnostic MBean
private val HOTSPOT_BEAN_NAME = "com.sun.management:type=HotSpotDiagnostic";
// field to store the hotspot diagnostic MBean
/**
* Call this method from your application whenever you
* want to dump the heap snapshot into a file.
*
* @param fileName name of the heap dump file
* @param live flag that tells whether to dump
* only the live objects
*/
def dumpHeap(fileName: String, live: Boolean=false) {
}
}
| tjhunter/scalanlp-core | process/src/main/scala/breeze/util/HeapDump.scala | Scala | apache-2.0 | 802 |
/*
* Copyright 2014–2017 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.qscript
import slamdata.Predef._
import quasar._, RenderTree.ops._
import quasar.contrib.matryoshka._
import quasar.ejson._
import quasar.ejson.implicits._
import quasar.fp._
import quasar.fp.ski._
import quasar.std.TemporalPart
import matryoshka._
import matryoshka.data._
import matryoshka.implicits._
import matryoshka.patterns._
import monocle.macros.Lenses
import scalaz._, Scalaz._
sealed abstract class MapFuncCore[T[_[_]], A]
sealed abstract class Nullary[T[_[_]], A] extends MapFuncCore[T, A]
sealed abstract class Unary[T[_[_]], A] extends MapFuncCore[T, A] {
def a1: A
}
sealed abstract class Binary[T[_[_]], A] extends MapFuncCore[T, A] {
def a1: A
def a2: A
}
sealed abstract class Ternary[T[_[_]], A] extends MapFuncCore[T, A] {
def a1: A
def a2: A
def a3: A
}
// TODO all `Free` should be generalized to `T` once we can handle recursive `Free`
object MapFuncCore {
import MapFuncsCore._
val EC = Inject[Common, EJson]
val EX = Inject[Extension, EJson]
type CoMapFuncR[T[_[_]], A] = CoEnv[A, MapFunc[T, ?], FreeMapA[T, A]]
def rollMF[T[_[_]], A](mf: MapFunc[T, FreeMapA[T, A]])
: CoEnv[A, MapFunc[T, ?], FreeMapA[T, A]] =
CoEnv[A, MapFunc[T, ?], FreeMapA[T, A]](mf.right[A])
/** Returns a List that maps element-by-element to a MapFunc array. If we
* can’t statically determine _all_ of the elements, it doesn’t match.
*/
object StaticArray {
def apply[T[_[_]]: CorecursiveT, A](elems: List[FreeMapA[T, A]]): FreeMapA[T, A] =
elems.map(e => Free.roll(MFC(MakeArray[T, FreeMapA[T, A]](e)))) match {
case Nil => Free.roll(MFC(EmptyArray[T, FreeMapA[T, A]]))
case h :: t => t.foldLeft(h)((a, e) => Free.roll(MFC(ConcatArrays(a, e))))
}
def unapply[T[_[_]]: BirecursiveT, A](mf: CoMapFuncR[T, A]):
Option[List[FreeMapA[T, A]]] =
mf match {
case ConcatArraysN(as) =>
as.foldRightM[Option, List[FreeMapA[T, A]]](
Nil)(
(mf, acc) => (mf.project.run.toOption collect {
case MFC(MakeArray(value)) => (value :: acc)
case MFC(Constant(Embed(EC(ejson.Arr(values))))) =>
values.map(v => rollMF[T, A](MFC(Constant(v))).embed) ++ acc
}))
case _ => None
}
}
/** Like `StaticArray`, but returns as much of the array as can be statically
* determined. Useful if you just want to statically lookup into an array if
* possible, and punt otherwise.
*/
object StaticArrayPrefix {
def unapply[T[_[_]]: BirecursiveT, A](mf: CoMapFuncR[T, A]):
Option[List[FreeMapA[T, A]]] =
mf match {
case ConcatArraysN(as) =>
as.foldLeftM[List[FreeMapA[T, A]] \\/ ?, List[FreeMapA[T, A]]](
Nil)(
(acc, mf) => mf.project.run.fold(
κ(acc.left),
_ match {
case MFC(MakeArray(value)) => (acc :+ value).right
case MFC(Constant(Embed(EC(ejson.Arr(values))))) =>
(acc ++ values.map(v => rollMF[T, A](MFC(Constant(v))).embed)).right
case _ => acc.left
})).merge.some
case _ => None
}
}
object StaticMap {
def apply[T[_[_]]: CorecursiveT, A](elems: List[(T[EJson], FreeMapA[T, A])]): FreeMapA[T, A] =
elems.map(e => Free.roll(MFC(MakeMap[T, FreeMapA[T, A]](Free.roll(MFC(Constant(e._1))), e._2)))) match {
case Nil => Free.roll(MFC(EmptyMap[T, FreeMapA[T, A]]))
case h :: t => t.foldLeft(h)((a, e) => Free.roll(MFC(ConcatMaps(a, e))))
}
def unapply[T[_[_]]: BirecursiveT, A](mf: CoMapFuncR[T, A]):
Option[List[(T[EJson], FreeMapA[T, A])]] =
mf match {
case ConcatMapsN(as) =>
as.foldRightM[Option, List[(T[EJson], FreeMapA[T, A])]](
Nil)(
(mf, acc) => (mf.project.run.toOption >>=
{
case MFC(MakeMap(ExtractFunc(Constant(k)), v)) => ((k, v) :: acc).some
case MFC(Constant(Embed(EX(ejson.Map(kvs))))) =>
(kvs.map(_.map(v => rollMF[T, A](MFC(Constant(v))).embed)) ++ acc).some
case _ => None
}))
case _ => None
}
}
object EmptyArray {
def apply[T[_[_]]: CorecursiveT, A]: MapFuncCore[T, A] =
Constant[T, A](EJson.fromCommon(ejson.Arr[T[EJson]](Nil)))
}
object EmptyMap {
def apply[T[_[_]]: CorecursiveT, A]: MapFuncCore[T, A] =
Constant[T, A](EJson.fromExt(ejson.Map[T[EJson]](Nil)))
}
// TODO: subtyping is preventing embedding of MapFuncsCore
/** This returns the set of expressions that are concatenated together. It can
* include statically known pieces, like `MakeArray` and `Constant(Arr)`, but
* also arbitrary expressions that may evaluate to an array of any size.
*/
object ConcatArraysN {
def apply[T[_[_]]: BirecursiveT, A](args: List[FreeMapA[T, A]])
: CoEnv[A, MapFunc[T, ?], FreeMapA[T, A]] = {
args.toList match {
case h :: t => t.foldLeft(h)((a, b) => rollMF[T, A](MFC(ConcatArrays(a, b))).embed).project
case Nil => rollMF[T, A](MFC(EmptyArray[T, FreeMapA[T, A]]))
}
}
@SuppressWarnings(Array("org.wartremover.warts.Recursion"))
def unapply[T[_[_]]: BirecursiveT, A](mf: CoEnv[A, MapFunc[T, ?], FreeMapA[T, A]]):
Option[List[FreeMapA[T, A]]] =
mf.run.fold(
κ(None),
{
case MFC(MakeArray(_)) | MFC(Constant(Embed(EC(ejson.Arr(_))))) =>
List(mf.embed).some
case MFC(ConcatArrays(h, t)) =>
(unapply(h.project).getOrElse(List(h)) ++
unapply(t.project).getOrElse(List(t))).some
case _ => None
})
}
// TODO subtyping is preventing embedding of MapFuncsCore
object ConcatMapsN {
def apply[T[_[_]]: BirecursiveT, A](args: List[FreeMapA[T, A]])
: CoEnv[A, MapFunc[T, ?], FreeMapA[T, A]] =
args.toList match {
case h :: t => t.foldLeft(h)((a, b) => rollMF[T, A](MFC(ConcatMaps(a, b))).embed).project
case Nil => rollMF[T, A](MFC(EmptyMap[T, FreeMapA[T, A]]))
}
@SuppressWarnings(Array("org.wartremover.warts.Recursion"))
def unapply[T[_[_]]: BirecursiveT, A](mf: CoEnv[A, MapFunc[T, ?], FreeMapA[T, A]]):
Option[List[FreeMapA[T, A]]] =
mf.run.fold(
κ(None),
{
case MFC(MakeMap(_, _)) | MFC(Constant(Embed(EX(ejson.Map(_))))) =>
List(mf.embed).some
case MFC(ConcatMaps(h, t)) =>
(unapply(h.project).getOrElse(List(h)) ++
unapply(t.project).getOrElse(List(t))).some
case _ => None
})
}
// Transform effectively constant `MapFunc` into a `Constant` value.
// This is a mini-evaluator for constant qscript values.
def foldConstant[T[_[_]]: BirecursiveT, A]
: CoMapFuncR[T, A] => Option[T[EJson]] = {
object ConstEC {
def unapply[B](tco: FreeMapA[T, B]): Option[ejson.Common[T[EJson]]] = {
tco.project.run match {
case \\/-(MFC(Constant(Embed(EC(v))))) => Some(v)
case _ => None
}
}
}
_.run.fold[Option[ejson.EJson[T[ejson.EJson]]]](
κ(None),
{
// relations
case MFC(And(ConstEC(ejson.Bool(v1)), ConstEC(ejson.Bool(v2)))) =>
EC.inj(ejson.Bool(v1 && v2)).some
case MFC(Or(ConstEC(ejson.Bool(v1)), ConstEC(ejson.Bool(v2)))) =>
EC.inj(ejson.Bool(v1 || v2)).some
case MFC(Not(ConstEC(ejson.Bool(v1)))) =>
EC.inj(ejson.Bool(!v1)).some
// string
case MFC(Lower(ConstEC(ejson.Str(v1)))) =>
EC.inj(ejson.Str(v1.toLowerCase)).some
case MFC(Upper(ConstEC(ejson.Str(v1)))) =>
EC.inj(ejson.Str(v1.toUpperCase)).some
// structural
case MFC(MakeArray(ExtractFunc(Constant(v1)))) =>
EC.inj(ejson.Arr(List(v1))).some
case MFC(MakeMap(ConstEC(ejson.Str(v1)), ExtractFunc(Constant(v2)))) =>
EX.inj(ejson.Map(List(EC.inj(ejson.Str[T[ejson.EJson]](v1)).embed -> v2))).some
case MFC(ConcatArrays(ConstEC(ejson.Arr(v1)), ConstEC(ejson.Arr(v2)))) =>
EC.inj(ejson.Arr(v1 ++ v2)).some
case _ => None
}) ∘ (_.embed)
}
@SuppressWarnings(Array("org.wartremover.warts.Recursion"))
def flattenAnd[T[_[_]], A](fm: FreeMapA[T, A]): NonEmptyList[FreeMapA[T, A]] =
fm.resume match {
case -\\/(MFC(And(a, b))) => flattenAnd(a) append flattenAnd(b)
case _ => NonEmptyList(fm)
}
// NB: This _could_ be combined with `rewrite`, but it causes rewriting to
// take way too long, so instead we apply it separately afterward.
/** Pulls conditional `Undefined`s as far up an expression as possible. */
def extractGuards[T[_[_]]: BirecursiveT: EqualT, A: Equal]
: CoMapFuncR[T, A] => Option[CoMapFuncR[T, A]] =
_.run.toOption >>= (MFC.unapply) >>= {
// NB: The last case pulls guards into a wider scope, and we want to avoid
// doing that for certain MapFuncs, so we add explicit `none`s.
case Guard(_, _, _, _)
| IfUndefined(_, _)
| MakeArray(_)
| MakeMap(_, _)
| Or(_, _) => none
// TODO: This should be able to extract a guard where _either_ side is
// `Undefined`, and should also extract `Cond` with `Undefined` on a
// branch.
case func =>
val writer =
func.traverse[Writer[List[(FreeMapA[T, A], Type)], ?], FreeMapA[T, A]] {
case Embed(CoEnv(\\/-(MFC(Guard(e, t, s, ExtractFunc(Undefined())))))) =>
Writer(List((e, t)), s)
case arg => Writer(Nil, arg)
}
writer.written match {
case Nil => none
case guards =>
rollMF[T, A](guards.distinctE.foldRight(MFC(writer.value)) {
case ((e, t), s) =>
MFC(Guard(e, t, Free.roll(s), Free.roll(MFC(Undefined[T, FreeMapA[T, A]]()))))
}).some
}
}
/** Converts conditional `Undefined`s into conditions that can be used in a
* `Filter`.
*
* Returns the extracted predicate, the defined expression extracted from the
* original condition and a function to extract the defined branch from other
* expressions containing the same conditional test as the original.
*/
def extractFilter[T[_[_]]: BirecursiveT: EqualT, A: Equal](mf: FreeMapA[T, A])(test: A => Option[Hole])
: Option[(FreeMap[T], FreeMapA[T, A], FreeMapA[T, A] => Option[FreeMapA[T, A]])] =
mf.resume.swap.toOption >>= {
case MFC(Cond(c, e, ExtractFunc(Undefined()))) =>
c.traverse(test) ∘ ((_, e, {
case Embed(CoEnv(\\/-(MFC(Cond(c1, e1, ExtractFunc(Undefined())))))) =>
(c1 ≟ c) option e1
case _ => none
}))
case MFC(Cond(c, ExtractFunc(Undefined()), f)) =>
c.traverse(test) ∘ (h => (Free.roll(MFC(Not[T, FreeMap[T]](h))), f, {
case Embed(CoEnv(\\/-(MFC(Cond(c1, ExtractFunc(Undefined()), f1))))) =>
(c1 ≟ c) option f1
case _ => none
}))
case MFC(Guard(c, t, e, ExtractFunc(Undefined()))) =>
c.traverse(test) ∘ (h => (
Free.roll(MFC(Guard(h, t, BoolLit[T, Hole](true), BoolLit[T, Hole](false)))),
e,
{
case Embed(CoEnv(\\/-(MFC(Guard(c1, t1, e1, ExtractFunc(Undefined())))))) =>
(c1 ≟ c && t1 ≟ t) option e1
case _ => none
}
))
case MFC(Guard(c, t, ExtractFunc(Undefined()), f)) =>
c.traverse(test) ∘ (h => (
Free.roll(MFC(Guard(h, t, BoolLit[T, Hole](false), BoolLit[T, Hole](true)))),
f,
{
case Embed(CoEnv(\\/-(MFC(Guard(c1, t1, ExtractFunc(Undefined()), f1))))) =>
(c1 ≟ c && t1 ≟ t) option f1
case _ => none
}
))
case _ => none
}
def normalize[T[_[_]]: BirecursiveT: EqualT, A: Equal]
: CoMapFuncR[T, A] => CoMapFuncR[T, A] =
repeatedly(applyTransforms(
foldConstant[T, A].apply(_) ∘ (const => rollMF[T, A](MFC(Constant(const)))),
rewrite[T, A]))
def replaceJoinSides[T[_[_]]: BirecursiveT](left: Symbol, right: Symbol)
: CoMapFuncR[T, JoinSide] => CoMapFuncR[T, JoinSide] =
_.run match {
case \\/-(MFC(JoinSideName(`left`))) => CoEnv(-\\/(LeftSide))
case \\/-(MFC(JoinSideName(`right`))) => CoEnv(-\\/(RightSide))
case x => CoEnv(x)
}
// TODO: This could be split up as it is in LP, with each function containing
// its own normalization.
private def rewrite[T[_[_]]: BirecursiveT: EqualT, A: Equal]:
CoMapFuncR[T, A] => Option[CoMapFuncR[T, A]] =
_.run.toOption >>= (MFC.unapply _) >>= {
case Eq(v1, v2) if v1 ≟ v2 =>
rollMF[T, A](
MFC(Constant(EJson.fromCommon(ejson.Bool[T[EJson]](true))))).some
case Eq(ExtractFunc(Constant(v1)), ExtractFunc(Constant(v2))) =>
rollMF[T, A](
MFC(Constant(EJson.fromCommon(ejson.Bool[T[EJson]](v1 ≟ v2))))).some
case DeleteField(
Embed(StaticMap(map)),
ExtractFunc(Constant(field))) =>
StaticMap(map.filter(_._1 ≠ field)).project.some
// TODO: Generalize this to `StaticMapSuffix`.
case DeleteField(
Embed(CoEnv(\\/-(MFC(ConcatMaps(m, Embed(CoEnv(\\/-(MFC(MakeMap(k, _)))))))))),
f)
if k ≟ f =>
rollMF[T, A](MFC(DeleteField(m, f))).some
case ProjectIndex(
Embed(StaticArrayPrefix(as)),
ExtractFunc(Constant(Embed(EX(ejson.Int(index))))))
if index.isValidInt =>
as.lift(index.intValue).map(_.project)
case ProjectField(
Embed(StaticMap(map)),
ExtractFunc(Constant(field))) =>
map.reverse.find(_._1 ≟ field) ∘ (_._2.project)
// TODO: Generalize these to `StaticMapSuffix`
case ProjectField(Embed(CoEnv(\\/-(MFC(MakeMap(k, Embed(v)))))), f) if k ≟ f =>
v.some
case ProjectField(
Embed(CoEnv(\\/-(MFC(ConcatMaps(_, Embed(CoEnv(\\/-(MFC(MakeMap(k, Embed(v))))))))))),
f)
if k ≟ f =>
v.some
case ConcatArrays(Embed(StaticArray(Nil)), Embed(rhs)) => rhs.some
case ConcatArrays(Embed(lhs), Embed(StaticArray(Nil))) => lhs.some
case ConcatMaps(Embed(StaticMap(Nil)), Embed(rhs)) => rhs.some
case ConcatMaps(Embed(lhs), Embed(StaticMap(Nil))) => lhs.some
case _ => none
}
implicit def traverse[T[_[_]]]: Traverse[MapFuncCore[T, ?]] =
new Traverse[MapFuncCore[T, ?]] {
def traverseImpl[G[_], A, B](
fa: MapFuncCore[T, A])(
f: A => G[B])(
implicit G: Applicative[G]):
G[MapFuncCore[T, B]] = fa match {
// nullary
case Constant(v) => G.point(Constant[T, B](v))
case Undefined() => G.point(Undefined[T, B]())
case JoinSideName(n) => G.point(JoinSideName[T, B](n))
case Now() => G.point(Now[T, B]())
// unary
case ExtractCentury(a1) => f(a1) ∘ (ExtractCentury(_))
case ExtractDayOfMonth(a1) => f(a1) ∘ (ExtractDayOfMonth(_))
case ExtractDecade(a1) => f(a1) ∘ (ExtractDecade(_))
case ExtractDayOfWeek(a1) => f(a1) ∘ (ExtractDayOfWeek(_))
case ExtractDayOfYear(a1) => f(a1) ∘ (ExtractDayOfYear(_))
case ExtractEpoch(a1) => f(a1) ∘ (ExtractEpoch(_))
case ExtractHour(a1) => f(a1) ∘ (ExtractHour(_))
case ExtractIsoDayOfWeek(a1) => f(a1) ∘ (ExtractIsoDayOfWeek(_))
case ExtractIsoYear(a1) => f(a1) ∘ (ExtractIsoYear(_))
case ExtractMicroseconds(a1) => f(a1) ∘ (ExtractMicroseconds(_))
case ExtractMillennium(a1) => f(a1) ∘ (ExtractMillennium(_))
case ExtractMilliseconds(a1) => f(a1) ∘ (ExtractMilliseconds(_))
case ExtractMinute(a1) => f(a1) ∘ (ExtractMinute(_))
case ExtractMonth(a1) => f(a1) ∘ (ExtractMonth(_))
case ExtractQuarter(a1) => f(a1) ∘ (ExtractQuarter(_))
case ExtractSecond(a1) => f(a1) ∘ (ExtractSecond(_))
case ExtractTimezone(a1) => f(a1) ∘ (ExtractTimezone(_))
case ExtractTimezoneHour(a1) => f(a1) ∘ (ExtractTimezoneHour(_))
case ExtractTimezoneMinute(a1) => f(a1) ∘ (ExtractTimezoneMinute(_))
case ExtractWeek(a1) => f(a1) ∘ (ExtractWeek(_))
case ExtractYear(a1) => f(a1) ∘ (ExtractYear(_))
case Date(a1) => f(a1) ∘ (Date(_))
case Time(a1) => f(a1) ∘ (Time(_))
case Timestamp(a1) => f(a1) ∘ (Timestamp(_))
case Interval(a1) => f(a1) ∘ (Interval(_))
case StartOfDay(a1) => f(a1) ∘ (StartOfDay(_))
case TemporalTrunc(a1, a2) => f(a2) ∘ (TemporalTrunc(a1, _))
case TimeOfDay(a1) => f(a1) ∘ (TimeOfDay(_))
case ToTimestamp(a1) => f(a1) ∘ (ToTimestamp(_))
case TypeOf(a1) => f(a1) ∘ (TypeOf(_))
case Negate(a1) => f(a1) ∘ (Negate(_))
case Not(a1) => f(a1) ∘ (Not(_))
case Length(a1) => f(a1) ∘ (Length(_))
case Lower(a1) => f(a1) ∘ (Lower(_))
case Upper(a1) => f(a1) ∘ (Upper(_))
case Bool(a1) => f(a1) ∘ (Bool(_))
case Integer(a1) => f(a1) ∘ (Integer(_))
case Decimal(a1) => f(a1) ∘ (Decimal(_))
case Null(a1) => f(a1) ∘ (Null(_))
case ToString(a1) => f(a1) ∘ (ToString(_))
case MakeArray(a1) => f(a1) ∘ (MakeArray(_))
case Meta(a1) => f(a1) ∘ (Meta(_))
// binary
case Add(a1, a2) => (f(a1) ⊛ f(a2))(Add(_, _))
case Multiply(a1, a2) => (f(a1) ⊛ f(a2))(Multiply(_, _))
case Subtract(a1, a2) => (f(a1) ⊛ f(a2))(Subtract(_, _))
case Divide(a1, a2) => (f(a1) ⊛ f(a2))(Divide(_, _))
case Modulo(a1, a2) => (f(a1) ⊛ f(a2))(Modulo(_, _))
case Power(a1, a2) => (f(a1) ⊛ f(a2))(Power(_, _))
case Eq(a1, a2) => (f(a1) ⊛ f(a2))(Eq(_, _))
case Neq(a1, a2) => (f(a1) ⊛ f(a2))(Neq(_, _))
case Lt(a1, a2) => (f(a1) ⊛ f(a2))(Lt(_, _))
case Lte(a1, a2) => (f(a1) ⊛ f(a2))(Lte(_, _))
case Gt(a1, a2) => (f(a1) ⊛ f(a2))(Gt(_, _))
case Gte(a1, a2) => (f(a1) ⊛ f(a2))(Gte(_, _))
case IfUndefined(a1, a2) => (f(a1) ⊛ f(a2))(IfUndefined(_, _))
case And(a1, a2) => (f(a1) ⊛ f(a2))(And(_, _))
case Or(a1, a2) => (f(a1) ⊛ f(a2))(Or(_, _))
case Within(a1, a2) => (f(a1) ⊛ f(a2))(Within(_, _))
case MakeMap(a1, a2) => (f(a1) ⊛ f(a2))(MakeMap(_, _))
case ConcatMaps(a1, a2) => (f(a1) ⊛ f(a2))(ConcatMaps(_, _))
case ProjectIndex(a1, a2) => (f(a1) ⊛ f(a2))(ProjectIndex(_, _))
case ProjectField(a1, a2) => (f(a1) ⊛ f(a2))(ProjectField(_, _))
case DeleteField(a1, a2) => (f(a1) ⊛ f(a2))(DeleteField(_, _))
case ConcatArrays(a1, a2) => (f(a1) ⊛ f(a2))(ConcatArrays(_, _))
case Range(a1, a2) => (f(a1) ⊛ f(a2))(Range(_, _))
case Split(a1, a2) => (f(a1) ⊛ f(a2))(Split(_, _))
// ternary
case Between(a1, a2, a3) => (f(a1) ⊛ f(a2) ⊛ f(a3))(Between(_, _, _))
case Cond(a1, a2, a3) => (f(a1) ⊛ f(a2) ⊛ f(a3))(Cond(_, _, _))
case Search(a1, a2, a3) => (f(a1) ⊛ f(a2) ⊛ f(a3))(Search(_, _, _))
case Substring(a1, a2, a3) => (f(a1) ⊛ f(a2) ⊛ f(a3))(Substring(_, _, _))
case Guard(a1, tpe, a2, a3) => (f(a1) ⊛ f(a2) ⊛ f(a3))(Guard(_, tpe, _, _))
}
}
implicit def equal[T[_[_]]: BirecursiveT: EqualT, A]: Delay[Equal, MapFuncCore[T, ?]] =
new Delay[Equal, MapFuncCore[T, ?]] {
@SuppressWarnings(Array("org.wartremover.warts.Equals"))
def apply[A](in: Equal[A]): Equal[MapFuncCore[T, A]] = Equal.equal {
// nullary
case (Constant(v1), Constant(v2)) =>
// FIXME: Ensure we’re using _structural_ equality here.
v1 ≟ v2
case (JoinSideName(n1), JoinSideName(n2)) => n1 ≟ n2
case (Undefined(), Undefined()) => true
case (Now(), Now()) => true
// unary
case (ExtractCentury(a1), ExtractCentury(a2)) => in.equal(a1, a2)
case (ExtractDayOfMonth(a1), ExtractDayOfMonth(a2)) => in.equal(a1, a2)
case (ExtractDecade(a1), ExtractDecade(a2)) => in.equal(a1, a2)
case (ExtractDayOfWeek(a1), ExtractDayOfWeek(a2)) => in.equal(a1, a2)
case (ExtractDayOfYear(a1), ExtractDayOfYear(a2)) => in.equal(a1, a2)
case (ExtractEpoch(a1), ExtractEpoch(a2)) => in.equal(a1, a2)
case (ExtractHour(a1), ExtractHour(a2)) => in.equal(a1, a2)
case (ExtractIsoDayOfWeek(a1), ExtractIsoDayOfWeek(a2)) => in.equal(a1, a2)
case (ExtractIsoYear(a1), ExtractIsoYear(a2)) => in.equal(a1, a2)
case (ExtractMicroseconds(a1), ExtractMicroseconds(a2)) => in.equal(a1, a2)
case (ExtractMillennium(a1), ExtractMillennium(a2)) => in.equal(a1, a2)
case (ExtractMilliseconds(a1), ExtractMilliseconds(a2)) => in.equal(a1, a2)
case (ExtractMinute(a1), ExtractMinute(a2)) => in.equal(a1, a2)
case (ExtractMonth(a1), ExtractMonth(a2)) => in.equal(a1, a2)
case (ExtractQuarter(a1), ExtractQuarter(a2)) => in.equal(a1, a2)
case (ExtractSecond(a1), ExtractSecond(a2)) => in.equal(a1, a2)
case (ExtractTimezone(a1), ExtractTimezone(a2)) => in.equal(a1, a2)
case (ExtractTimezoneHour(a1), ExtractTimezoneHour(a2)) => in.equal(a1, a2)
case (ExtractTimezoneMinute(a1), ExtractTimezoneMinute(a2)) => in.equal(a1, a2)
case (ExtractWeek(a1), ExtractWeek(a2)) => in.equal(a1, a2)
case (ExtractYear(a1), ExtractYear(a2)) => in.equal(a1, a2)
case (Date(a1), Date(b1)) => in.equal(a1, b1)
case (Time(a1), Time(b1)) => in.equal(a1, b1)
case (Timestamp(a1), Timestamp(b1)) => in.equal(a1, b1)
case (Interval(a1), Interval(b1)) => in.equal(a1, b1)
case (StartOfDay(a1), StartOfDay(b1)) => in.equal(a1, b1)
case (TemporalTrunc(a1, a2), TemporalTrunc(b1, b2)) => a1 ≟ b1 && in.equal(a2, b2)
case (TimeOfDay(a1), TimeOfDay(b1)) => in.equal(a1, b1)
case (ToTimestamp(a1), ToTimestamp(b1)) => in.equal(a1, b1)
case (TypeOf(a1), TypeOf(b1)) => in.equal(a1, b1)
case (Negate(a1), Negate(b1)) => in.equal(a1, b1)
case (Not(a1), Not(b1)) => in.equal(a1, b1)
case (Length(a1), Length(b1)) => in.equal(a1, b1)
case (Lower(a1), Lower(b1)) => in.equal(a1, b1)
case (Upper(a1), Upper(b1)) => in.equal(a1, b1)
case (Bool(a1), Bool(b1)) => in.equal(a1, b1)
case (Integer(a1), Integer(b1)) => in.equal(a1, b1)
case (Decimal(a1), Decimal(b1)) => in.equal(a1, b1)
case (Null(a1), Null(b1)) => in.equal(a1, b1)
case (ToString(a1), ToString(b1)) => in.equal(a1, b1)
case (MakeArray(a1), MakeArray(b1)) => in.equal(a1, b1)
case (Meta(a1), Meta(b1)) => in.equal(a1, b1)
// binary
case (Add(a1, a2), Add(b1, b2)) => in.equal(a1, b1) && in.equal(a2, b2)
case (Multiply(a1, a2), Multiply(b1, b2)) => in.equal(a1, b1) && in.equal(a2, b2)
case (Subtract(a1, a2), Subtract(b1, b2)) => in.equal(a1, b1) && in.equal(a2, b2)
case (Divide(a1, a2), Divide(b1, b2)) => in.equal(a1, b1) && in.equal(a2, b2)
case (Modulo(a1, a2), Modulo(b1, b2)) => in.equal(a1, b1) && in.equal(a2, b2)
case (Power(a1, a2), Power(b1, b2)) => in.equal(a1, b1) && in.equal(a2, b2)
case (Eq(a1, a2), Eq(b1, b2)) => in.equal(a1, b1) && in.equal(a2, b2)
case (Neq(a1, a2), Neq(b1, b2)) => in.equal(a1, b1) && in.equal(a2, b2)
case (Lt(a1, a2), Lt(b1, b2)) => in.equal(a1, b1) && in.equal(a2, b2)
case (Lte(a1, a2), Lte(b1, b2)) => in.equal(a1, b1) && in.equal(a2, b2)
case (Gt(a1, a2), Gt(b1, b2)) => in.equal(a1, b1) && in.equal(a2, b2)
case (Gte(a1, a2), Gte(b1, b2)) => in.equal(a1, b1) && in.equal(a2, b2)
case (IfUndefined(a1, a2), IfUndefined(b1, b2)) => in.equal(a1, b1) && in.equal(a2, b2)
case (And(a1, a2), And(b1, b2)) => in.equal(a1, b1) && in.equal(a2, b2)
case (Or(a1, a2), Or(b1, b2)) => in.equal(a1, b1) && in.equal(a2, b2)
case (Within(a1, a2), Within(b1, b2)) => in.equal(a1, b1) && in.equal(a2, b2)
case (MakeMap(a1, a2), MakeMap(b1, b2)) => in.equal(a1, b1) && in.equal(a2, b2)
case (ConcatMaps(a1, a2), ConcatMaps(b1, b2)) => in.equal(a1, b1) && in.equal(a2, b2)
case (ProjectIndex(a1, a2), ProjectIndex(b1, b2)) => in.equal(a1, b1) && in.equal(a2, b2)
case (ProjectField(a1, a2), ProjectField(b1, b2)) => in.equal(a1, b1) && in.equal(a2, b2)
case (DeleteField(a1, a2), DeleteField(b1, b2)) => in.equal(a1, b1) && in.equal(a2, b2)
case (ConcatArrays(a1, a2), ConcatArrays(b1, b2)) => in.equal(a1, b1) && in.equal(a2, b2)
case (Range(a1, a2), Range(b1, b2)) => in.equal(a1, b1) && in.equal(a2, b2)
case (Split(a1, a2), Split(b1, b2)) => in.equal(a1, b1) && in.equal(a2, b2)
// ternary
case (Between(a1, a2, a3), Between(b1, b2, b3)) => in.equal(a1, b1) && in.equal(a2, b2) && in.equal(a3, b3)
case (Cond(a1, a2, a3), Cond(b1, b2, b3)) => in.equal(a1, b1) && in.equal(a2, b2) && in.equal(a3, b3)
case (Search(a1, a2, a3), Search(b1, b2, b3)) => in.equal(a1, b1) && in.equal(a2, b2) && in.equal(a3, b3)
case (Substring(a1, a2, a3), Substring(b1, b2, b3)) => in.equal(a1, b1) && in.equal(a2, b2) && in.equal(a3, b3)
case (Guard(a1, atpe, a2, a3), Guard(b1, btpe, b2, b3)) => atpe ≟ btpe && in.equal(a1, b1) && in.equal(a2, b2) && in.equal(a3, b3)
case (_, _) => false
}
}
implicit def show[T[_[_]]: ShowT]: Delay[Show, MapFuncCore[T, ?]] =
new Delay[Show, MapFuncCore[T, ?]] {
def apply[A](sh: Show[A]): Show[MapFuncCore[T, A]] = {
def shz(label: String, a: A*) =
Cord(label) ++ Cord("(") ++ a.map(sh.show).toList.intercalate(Cord(", ")) ++ Cord(")")
Show.show {
// nullary
case Constant(v) => Cord("Constant(") ++ v.show ++ Cord(")")
case Undefined() => Cord("Undefined()")
case JoinSideName(n) => Cord("JoinSideName(") ++ n.show ++ Cord(")")
case Now() => Cord("Now()")
// unary
case ExtractCentury(a1) => shz("ExtractCentury", a1)
case ExtractDayOfMonth(a1) => shz("ExtractDayOfMonth", a1)
case ExtractDecade(a1) => shz("ExtractDecade", a1)
case ExtractDayOfWeek(a1) => shz("ExtractDayOfWeek", a1)
case ExtractDayOfYear(a1) => shz("ExtractDayOfYear", a1)
case ExtractEpoch(a1) => shz("ExtractEpoch", a1)
case ExtractHour(a1) => shz("ExtractHour", a1)
case ExtractIsoDayOfWeek(a1) => shz("ExtractIsoDayOfWeek", a1)
case ExtractIsoYear(a1) => shz("ExtractIsoYear", a1)
case ExtractMicroseconds(a1) => shz("ExtractMicroseconds", a1)
case ExtractMillennium(a1) => shz("ExtractMillennium", a1)
case ExtractMilliseconds(a1) => shz("ExtractMilliseconds", a1)
case ExtractMinute(a1) => shz("ExtractMinute", a1)
case ExtractMonth(a1) => shz("ExtractMonth", a1)
case ExtractQuarter(a1) => shz("ExtractQuarter", a1)
case ExtractSecond(a1) => shz("ExtractSecond", a1)
case ExtractTimezone(a1) => shz("ExtractTimezone", a1)
case ExtractTimezoneHour(a1) => shz("ExtractTimezoneHour", a1)
case ExtractTimezoneMinute(a1) => shz("ExtractTimezoneMinute", a1)
case ExtractWeek(a1) => shz("ExtractWeek", a1)
case ExtractYear(a1) => shz("ExtractYear", a1)
case Date(a1) => shz("Date", a1)
case Time(a1) => shz("Time", a1)
case Timestamp(a1) => shz("Timestamp", a1)
case Interval(a1) => shz("Interval", a1)
case StartOfDay(a1) => shz("StartOfDay", a1)
case TemporalTrunc(a1, a2) => Cord("TemporalTrunc(", a1.show, ", ", sh.show(a2), ")")
case TimeOfDay(a1) => shz("TimeOfDay", a1)
case ToTimestamp(a1) => shz("ToTimestamp", a1)
case TypeOf(a1) => shz("TypeOf", a1)
case Negate(a1) => shz("Negate", a1)
case Not(a1) => shz("Not", a1)
case Length(a1) => shz("Length", a1)
case Lower(a1) => shz("Lower", a1)
case Upper(a1) => shz("Upper", a1)
case Bool(a1) => shz("Bool", a1)
case Integer(a1) => shz("Integer", a1)
case Decimal(a1) => shz("Decimal", a1)
case Null(a1) => shz("Null", a1)
case ToString(a1) => shz("ToString", a1)
case MakeArray(a1) => shz("MakeArray", a1)
case Meta(a1) => shz("Meta", a1)
// binary
case Add(a1, a2) => shz("Add", a1, a2)
case Multiply(a1, a2) => shz("Multiply", a1, a2)
case Subtract(a1, a2) => shz("Subtract", a1, a2)
case Divide(a1, a2) => shz("Divide", a1, a2)
case Modulo(a1, a2) => shz("Modulo", a1, a2)
case Power(a1, a2) => shz("Power", a1, a2)
case Eq(a1, a2) => shz("Eq", a1, a2)
case Neq(a1, a2) => shz("Neq", a1, a2)
case Lt(a1, a2) => shz("Lt", a1, a2)
case Lte(a1, a2) => shz("Lte", a1, a2)
case Gt(a1, a2) => shz("Gt", a1, a2)
case Gte(a1, a2) => shz("Gte", a1, a2)
case IfUndefined(a1, a2) => shz("IfUndefined", a1, a2)
case And(a1, a2) => shz("And", a1, a2)
case Or(a1, a2) => shz("Or", a1, a2)
case Within(a1, a2) => shz("Within", a1, a2)
case MakeMap(a1, a2) => shz("MakeMap", a1, a2)
case ConcatMaps(a1, a2) => shz("ConcatMaps", a1, a2)
case ProjectIndex(a1, a2) => shz("ProjectIndex", a1, a2)
case ProjectField(a1, a2) => shz("ProjectField", a1, a2)
case DeleteField(a1, a2) => shz("DeleteField", a1, a2)
case ConcatArrays(a1, a2) => shz("ConcatArrays", a1, a2)
case Range(a1, a2) => shz("Range", a1, a2)
case Split(a1, a2) => shz("Split", a1, a2)
// ternary
case Between(a1, a2, a3) => shz("Between", a1, a2, a3)
case Cond(a1, a2, a3) => shz("Cond", a1, a2, a3)
case Search(a1, a2, a3) => shz("Search", a1, a2, a3)
case Substring(a1, a2, a3) => shz("Substring", a1, a2, a3)
case Guard(a1, tpe, a2, a3) =>
Cord("Guard(") ++
sh.show(a1) ++ Cord(", ") ++
tpe.show ++ Cord(", ") ++
sh.show(a2) ++ Cord(", ") ++
sh.show(a3) ++ Cord(")")
}
}
}
// TODO: replace this with some kind of pretty-printing based on a syntax for
// MapFunc + EJson.
implicit def renderTree[T[_[_]]: ShowT]: Delay[RenderTree, MapFuncCore[T, ?]] =
new Delay[RenderTree, MapFuncCore[T, ?]] {
val nt = "MapFuncCore" :: Nil
@SuppressWarnings(Array("org.wartremover.warts.ToString"))
def apply[A](r: RenderTree[A]): RenderTree[MapFuncCore[T, A]] = {
def nAry(typ: String, as: A*): RenderedTree =
NonTerminal(typ :: nt, None, as.toList.map(r.render(_)))
RenderTree.make {
// nullary
case Constant(a1) => Terminal("Constant" :: nt, a1.shows.some)
case Undefined() => Terminal("Undefined" :: nt, None)
case JoinSideName(n) => Terminal("JoinSideName(" ::nt, n.shows.some)
case Now() => Terminal("Now" :: nt, None)
// unary
case ExtractCentury(a1) => nAry("ExtractCentury", a1)
case ExtractDayOfMonth(a1) => nAry("ExtractDayOfMonth", a1)
case ExtractDecade(a1) => nAry("ExtractDecade", a1)
case ExtractDayOfWeek(a1) => nAry("ExtractDayOfWeek", a1)
case ExtractDayOfYear(a1) => nAry("ExtractDayOfYear", a1)
case ExtractEpoch(a1) => nAry("ExtractEpoch", a1)
case ExtractHour(a1) => nAry("ExtractHour", a1)
case ExtractIsoDayOfWeek(a1) => nAry("ExtractIsoDayOfWeek", a1)
case ExtractIsoYear(a1) => nAry("ExtractIsoYear", a1)
case ExtractMicroseconds(a1) => nAry("ExtractMicroseconds", a1)
case ExtractMillennium(a1) => nAry("ExtractMillennium", a1)
case ExtractMilliseconds(a1) => nAry("ExtractMilliseconds", a1)
case ExtractMinute(a1) => nAry("ExtractMinute", a1)
case ExtractMonth(a1) => nAry("ExtractMonth", a1)
case ExtractQuarter(a1) => nAry("ExtractQuarter", a1)
case ExtractSecond(a1) => nAry("ExtractSecond", a1)
case ExtractTimezone(a1) => nAry("ExtractTimezone", a1)
case ExtractTimezoneHour(a1) => nAry("ExtractTimezoneHour", a1)
case ExtractTimezoneMinute(a1) => nAry("ExtractTimezoneMinute", a1)
case ExtractWeek(a1) => nAry("ExtractWeek", a1)
case ExtractYear(a1) => nAry("ExtractYear", a1)
case Date(a1) => nAry("Date", a1)
case Time(a1) => nAry("Time", a1)
case Timestamp(a1) => nAry("Timestamp", a1)
case Interval(a1) => nAry("Interval", a1)
case StartOfDay(a1) => nAry("StartOfDay", a1)
case TemporalTrunc(a1, a2) => NonTerminal("TemporalTrunc" :: nt, a1.shows.some, List(r.render(a2)))
case TimeOfDay(a1) => nAry("TimeOfDay", a1)
case ToTimestamp(a1) => nAry("ToTimestamp", a1)
case TypeOf(a1) => nAry("TypeOf", a1)
case Negate(a1) => nAry("Negate", a1)
case Not(a1) => nAry("Not", a1)
case Length(a1) => nAry("Length", a1)
case Lower(a1) => nAry("Lower", a1)
case Upper(a1) => nAry("Upper", a1)
case Bool(a1) => nAry("Bool", a1)
case Integer(a1) => nAry("Integer", a1)
case Decimal(a1) => nAry("Decimal", a1)
case Null(a1) => nAry("Null", a1)
case ToString(a1) => nAry("ToString", a1)
case MakeArray(a1) => nAry("MakeArray", a1)
case Meta(a1) => nAry("Meta", a1)
// binary
case Add(a1, a2) => nAry("Add", a1, a2)
case Multiply(a1, a2) => nAry("Multiply", a1, a2)
case Subtract(a1, a2) => nAry("Subtract", a1, a2)
case Divide(a1, a2) => nAry("Divide", a1, a2)
case Modulo(a1, a2) => nAry("Modulo", a1, a2)
case Power(a1, a2) => nAry("Power", a1, a2)
case Eq(a1, a2) => nAry("Eq", a1, a2)
case Neq(a1, a2) => nAry("Neq", a1, a2)
case Lt(a1, a2) => nAry("Lt", a1, a2)
case Lte(a1, a2) => nAry("Lte", a1, a2)
case Gt(a1, a2) => nAry("Gt", a1, a2)
case Gte(a1, a2) => nAry("Gte", a1, a2)
case IfUndefined(a1, a2) => nAry("IfUndefined", a1, a2)
case And(a1, a2) => nAry("And", a1, a2)
case Or(a1, a2) => nAry("Or", a1, a2)
case Within(a1, a2) => nAry("Within", a1, a2)
case MakeMap(a1, a2) => nAry("MakeMap", a1, a2)
case ConcatMaps(a1, a2) => nAry("ConcatMaps", a1, a2)
case ProjectIndex(a1, a2) => nAry("ProjectIndex", a1, a2)
case ProjectField(a1, a2) => nAry("ProjectField", a1, a2)
case DeleteField(a1, a2) => nAry("DeleteField", a1, a2)
case ConcatArrays(a1, a2) => nAry("ConcatArrays", a1, a2)
case Range(a1, a2) => nAry("Range", a1, a2)
case Split(a1, a2) => nAry("Split", a1, a2)
// ternary
case Between(a1, a2, a3) => nAry("Between", a1, a2, a3)
case Cond(a1, a2, a3) => nAry("Cond", a1, a2, a3)
case Search(a1, a2, a3) => nAry("Search", a1, a2, a3)
case Substring(a1, a2, a3) => nAry("Substring", a1, a2, a3)
case Guard(a1, tpe, a2, a3) => NonTerminal("Guard" :: nt, None,
List(r.render(a1), tpe.render, r.render(a2), r.render(a3)))
}
}
}
}
object MapFuncsCore {
// nullary
/** A value that is statically known.
*/
@Lenses final case class Constant[T[_[_]], A](ejson: T[EJson]) extends Nullary[T, A]
/** A value that doesn’t exist. Most operations on `Undefined` should evaluate
* to `Undefined`. The exceptions are
* - [[MakeMap]] returns `{}` if either argument is `Undefined`,
* - [[MakeArray]] returns `[]` if its argument is `Undefined`,
* - [[AddMetadata]] returns the _first_ argument if the _second_ is `Undefined`,
* - [[IfUndefined]] returns the _second_ argument if the _first_ is `Undefined`, and
* - [[Cond]] evaluates normally if neither the condition nor the taken branch are `Undefined`.
*/
@Lenses final case class Undefined[T[_[_]], A]() extends Nullary[T, A]
/** A placeholder for a `JoinSide` that should never be exposed to a backend.
*/
@Lenses final case class JoinSideName[T[_[_]], A](name: Symbol) extends Nullary[T, A]
// array
@Lenses final case class Length[T[_[_]], A](a1: A) extends Unary[T, A]
// date
// See https://www.postgresql.org/docs/9.2/static/functions-datetime.html#FUNCTIONS-DATETIME-EXTRACT
@Lenses final case class ExtractCentury[T[_[_]], A](a1: A) extends Unary[T, A]
@Lenses final case class ExtractDayOfMonth[T[_[_]], A](a1: A) extends Unary[T, A]
@Lenses final case class ExtractDecade[T[_[_]], A](a1: A) extends Unary[T, A]
@Lenses final case class ExtractDayOfWeek[T[_[_]], A](a1: A) extends Unary[T, A]
@Lenses final case class ExtractDayOfYear[T[_[_]], A](a1: A) extends Unary[T, A]
@Lenses final case class ExtractEpoch[T[_[_]], A](a1: A) extends Unary[T, A]
@Lenses final case class ExtractHour[T[_[_]], A](a1: A) extends Unary[T, A]
@Lenses final case class ExtractIsoDayOfWeek[T[_[_]], A](a1: A) extends Unary[T, A]
@Lenses final case class ExtractIsoYear[T[_[_]], A](a1: A) extends Unary[T, A]
@Lenses final case class ExtractMicroseconds[T[_[_]], A](a1: A) extends Unary[T, A]
@Lenses final case class ExtractMillennium[T[_[_]], A](a1: A) extends Unary[T, A]
@Lenses final case class ExtractMilliseconds[T[_[_]], A](a1: A) extends Unary[T, A]
@Lenses final case class ExtractMinute[T[_[_]], A](a1: A) extends Unary[T, A]
@Lenses final case class ExtractMonth[T[_[_]], A](a1: A) extends Unary[T, A]
@Lenses final case class ExtractQuarter[T[_[_]], A](a1: A) extends Unary[T, A]
@Lenses final case class ExtractSecond[T[_[_]], A](a1: A) extends Unary[T, A]
@Lenses final case class ExtractTimezone[T[_[_]], A](a1: A) extends Unary[T, A]
@Lenses final case class ExtractTimezoneHour[T[_[_]], A](a1: A) extends Unary[T, A]
@Lenses final case class ExtractTimezoneMinute[T[_[_]], A](a1: A) extends Unary[T, A]
@Lenses final case class ExtractWeek[T[_[_]], A](a1: A) extends Unary[T, A]
@Lenses final case class ExtractYear[T[_[_]], A](a1: A) extends Unary[T, A]
@Lenses final case class Date[T[_[_]], A](a1: A) extends Unary[T, A]
@Lenses final case class Time[T[_[_]], A](a1: A) extends Unary[T, A]
@Lenses final case class Timestamp[T[_[_]], A](a1: A) extends Unary[T, A]
@Lenses final case class Interval[T[_[_]], A](a1: A) extends Unary[T, A]
@Lenses final case class StartOfDay[T[_[_]], A](a1: A) extends Unary[T, A]
@Lenses final case class TemporalTrunc[T[_[_]], A](part: TemporalPart, a1: A) extends Unary[T, A]
@Lenses final case class TimeOfDay[T[_[_]], A](a1: A) extends Unary[T, A]
@Lenses final case class ToTimestamp[T[_[_]], A](a1: A) extends Unary[T, A]
/** Fetches the [[quasar.Type.Timestamp]] for the current instant in time. */
@Lenses final case class Now[T[_[_]], A]() extends Nullary[T, A]
// identity
/** Returns a string describing the type of the value. If the value has a
* metadata map containing an "_ejson.type" entry, that value is returned.
* Otherwise, it returns a string naming a [[quasar.common.PrimaryType]].
*/
@Lenses final case class TypeOf[T[_[_]], A](a1: A) extends Unary[T, A]
// math
@Lenses final case class Negate[T[_[_]], A](a1: A) extends Unary[T, A]
@Lenses final case class Add[T[_[_]], A](a1: A, a2: A) extends Binary[T, A]
@Lenses final case class Multiply[T[_[_]], A](a1: A, a2: A) extends Binary[T, A]
@Lenses final case class Subtract[T[_[_]], A](a1: A, a2: A) extends Binary[T, A]
@Lenses final case class Divide[T[_[_]], A](a1: A, a2: A) extends Binary[T, A]
@Lenses final case class Modulo[T[_[_]], A](a1: A, a2: A) extends Binary[T, A]
@Lenses final case class Power[T[_[_]], A](a1: A, a2: A) extends Binary[T, A]
// relations
@Lenses final case class Not[T[_[_]], A](a1: A) extends Unary[T, A]
@Lenses final case class Eq[T[_[_]], A](a1: A, a2: A) extends Binary[T, A]
@Lenses final case class Neq[T[_[_]], A](a1: A, a2: A) extends Binary[T, A]
@Lenses final case class Lt[T[_[_]], A](a1: A, a2: A) extends Binary[T, A]
@Lenses final case class Lte[T[_[_]], A](a1: A, a2: A) extends Binary[T, A]
@Lenses final case class Gt[T[_[_]], A](a1: A, a2: A) extends Binary[T, A]
@Lenses final case class Gte[T[_[_]], A](a1: A, a2: A) extends Binary[T, A]
/** This “catches” [[Undefined]] values and replaces them with a value.
*/
@Lenses final case class IfUndefined[T[_[_]], A](a1: A, a2: A) extends Binary[T, A]
@Lenses final case class And[T[_[_]], A](a1: A, a2: A) extends Binary[T, A]
@Lenses final case class Or[T[_[_]], A](a1: A, a2: A) extends Binary[T, A]
@Lenses final case class Between[T[_[_]], A](a1: A, a2: A, a3: A) extends Ternary[T, A]
@Lenses final case class Cond[T[_[_]], A](cond: A, then_ : A, else_ : A) extends Ternary[T, A] {
def a1 = cond
def a2 = then_
def a3 = else_
}
// set
@Lenses final case class Within[T[_[_]], A](a1: A, a2: A) extends Binary[T, A]
// string
@Lenses final case class Lower[T[_[_]], A](a1: A) extends Unary[T, A]
@Lenses final case class Upper[T[_[_]], A](a1: A) extends Unary[T, A]
@Lenses final case class Bool[T[_[_]], A](a1: A) extends Unary[T, A]
@Lenses final case class Integer[T[_[_]], A](a1: A) extends Unary[T, A]
@Lenses final case class Decimal[T[_[_]], A](a1: A) extends Unary[T, A]
@Lenses final case class Null[T[_[_]], A](a1: A) extends Unary[T, A]
@Lenses final case class ToString[T[_[_]], A](a1: A) extends Unary[T, A]
@Lenses final case class Split[T[_[_]], A](a1: A, a2: A) extends Binary[T, A]
@Lenses final case class Search[T[_[_]], A](a1: A, a2: A, a3: A) extends Ternary[T, A]
@Lenses final case class Substring[T[_[_]], A](string: A, from: A, count: A) extends Ternary[T, A] {
def a1 = string
def a2 = from
def a3 = count
}
// structural
/** Makes a single-element [[ejson.Arr]] containing `a1`.
*/
@Lenses final case class MakeArray[T[_[_]], A](a1: A) extends Unary[T, A]
/** Makes a single-element [[ejson.Map]] with key `key` and value `value`.
*/
@Lenses final case class MakeMap[T[_[_]], A](key: A, value: A) extends Binary[T, A] {
def a1 = key
def a2 = value
}
@Lenses final case class ConcatArrays[T[_[_]], A](a1: A, a2: A) extends Binary[T, A]
@Lenses final case class ConcatMaps[T[_[_]], A](a1: A, a2: A) extends Binary[T, A]
@Lenses final case class ProjectIndex[T[_[_]], A](src: A, index: A) extends Binary[T, A] {
def a1 = src
def a2 = index
}
@Lenses final case class ProjectField[T[_[_]], A](src: A, field: A) extends Binary[T, A] {
def a1 = src
def a2 = field
}
@Lenses final case class DeleteField[T[_[_]], A](src: A, field: A) extends Binary[T, A] {
def a1 = src
def a2 = field
}
@Lenses final case class Meta[T[_[_]], A](a1: A) extends Unary[T, A]
@Lenses final case class Range[T[_[_]], A](from: A, to: A) extends Binary[T, A] {
def a1 = from
def a2 = to
}
/** A conditional specifically for checking that `a1` satisfies `pattern`.
*/
@Lenses final case class Guard[T[_[_]], A](a1: A, pattern: Type, a2: A, a3: A)
extends Ternary[T, A]
object NullLit {
def apply[T[_[_]]: CorecursiveT, A](): FreeMapA[T, A] =
Free.roll(MFC(Constant[T, FreeMapA[T, A]](EJson.fromCommon(ejson.Null[T[EJson]]()))))
def unapply[T[_[_]]: RecursiveT, A](mf: FreeMapA[T, A]): Boolean =
mf.resume.fold ({
case MFC(Constant(ej)) => EJson.isNull(ej)
case _ => false
}, _ => false)
}
object BoolLit {
def apply[T[_[_]]: CorecursiveT, A](b: Boolean): FreeMapA[T, A] =
Free.roll(MFC(Constant[T, FreeMapA[T, A]](EJson.fromCommon(ejson.Bool[T[EJson]](b)))))
def unapply[T[_[_]]: RecursiveT, A](mf: FreeMapA[T, A]): Option[Boolean] =
mf.resume.fold ({
case MFC(Constant(ej)) => CommonEJson.prj(ej.project).flatMap {
case ejson.Bool(b) => b.some
case _ => None
}
case _ => None
}, _ => None)
}
object DecLit {
def apply[T[_[_]]: CorecursiveT, A](d: BigDecimal): FreeMapA[T, A] =
Free.roll(MFC(Constant[T, FreeMapA[T, A]](EJson.fromCommon(ejson.Dec[T[EJson]](d)))))
}
object IntLit {
def apply[T[_[_]]: CorecursiveT, A](i: BigInt): FreeMapA[T, A] =
Free.roll(MFC(Constant[T, FreeMapA[T, A]](EJson.fromExt(ejson.Int[T[EJson]](i)))))
def unapply[T[_[_]]: RecursiveT, A](mf: FreeMapA[T, A]): Option[BigInt] =
mf.resume.fold(IntLitMapFunc.unapply(_), _ => None)
}
object IntLitMapFunc {
def unapply[T[_[_]]: RecursiveT, A](mf: MapFunc[T, A]): Option[BigInt] =
mf match {
case MFC(Constant(ej)) => ExtEJson.prj(ej.project).flatMap {
case ejson.Int(i) => i.some
case _ => None
}
case _ => None
}
}
object StrLit {
def apply[T[_[_]]: CorecursiveT, A](str: String): FreeMapA[T, A] =
Free.roll(MFC(Constant[T, FreeMapA[T, A]](EJson.fromCommon(ejson.Str[T[EJson]](str)))))
def unapply[T[_[_]]: RecursiveT, A](mf: FreeMapA[T, A]):
Option[String] =
mf.resume.fold({
case MFC(Constant(ej)) => CommonEJson.prj(ej.project).flatMap {
case ejson.Str(str) => str.some
case _ => None
}
case _ => None
}, {
_ => None
})
}
}
| drostron/quasar | connector/src/main/scala/quasar/qscript/MapFuncCore.scala | Scala | apache-2.0 | 46,550 |
package com.criteo.vizatra.vizsql.js
import com.criteo.vizatra.vizsql.js.common._
import com.criteo.vizatra.vizsql.{DB, Query, VizSQL}
import scala.scalajs.js.JSConverters._
import scala.scalajs.js.annotation.JSExport
@JSExport("QueryParser")
object QueryParser {
@JSExport
def parse(query: String, db: DB): ParseResult =
VizSQL.parseQuery(query, db) match {
case Left(err) => new ParseResult(new ParseError(err.msg, err.pos))
case Right(query) => convert(query)
}
def convert(query: Query): ParseResult = {
val select = query.select
val db = query.db
val result = for {
columns <- select.getColumns(db).right
tables <- select.getTables(db).right
} yield (columns, tables)
result fold (
err => new ParseResult(new ParseError(err.msg, err.pos)), { case (columns, tables) =>
val cols = columns map Column.from
val tbls = tables map { case (maybeSchema, table) => Table.from(table, maybeSchema) }
new ParseResult(select = new Select(cols.toJSArray, tbls.toJSArray))
}
)
}
}
| criteo/vizsql | js/src/main/scala/com/criteo/vizatra/vizsql/js/QueryParser.scala | Scala | apache-2.0 | 1,075 |
package org.jetbrains.plugins.scala
package lang
package resolve
import com.intellij.openapi.progress.ProgressManager
import com.intellij.psi._
import com.intellij.psi.util.PsiTreeUtil
import org.jetbrains.plugins.scala.caches.CachesUtil
import org.jetbrains.plugins.scala.extensions.{PsiElementExt, PsiMethodExt, PsiNamedElementExt}
import org.jetbrains.plugins.scala.lang.psi.ScalaPsiUtil
import org.jetbrains.plugins.scala.lang.psi.api.base.types.{ScSelfTypeElement, ScTypeElement}
import org.jetbrains.plugins.scala.lang.psi.api.base.{ScConstructor, ScPrimaryConstructor}
import org.jetbrains.plugins.scala.lang.psi.api.expr._
import org.jetbrains.plugins.scala.lang.psi.api.statements.ScFunction
import org.jetbrains.plugins.scala.lang.psi.api.statements.params.{ScParameter, ScParameters}
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.ScTypedDefinition
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.templates.{ScExtendsBlock, ScTemplateBody}
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.{ScClass, ScTemplateDefinition}
import org.jetbrains.plugins.scala.lang.psi.fake.FakePsiMethod
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiElementFactory.{createExpressionFromText, createParameterFromText}
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiManager
import org.jetbrains.plugins.scala.lang.psi.implicits.ImplicitResolveResult.ResolverStateBuilder
import org.jetbrains.plugins.scala.lang.psi.implicits.{ImplicitResolveResult, ScImplicitlyConvertible}
import org.jetbrains.plugins.scala.lang.psi.types.Compatibility.Expression
import org.jetbrains.plugins.scala.lang.psi.types.Compatibility.Expression._
import org.jetbrains.plugins.scala.lang.psi.types.api.UndefinedType
import org.jetbrains.plugins.scala.lang.psi.types.api.designator.{ScDesignatorType, ScProjectionType}
import org.jetbrains.plugins.scala.lang.psi.types.nonvalue.{ScMethodType, ScTypePolymorphicType}
import org.jetbrains.plugins.scala.lang.psi.types.result.{Success, TypingContext}
import org.jetbrains.plugins.scala.lang.psi.types.{ScSubstitutor, ScType, ScalaType}
import org.jetbrains.plugins.scala.lang.refactoring.util.ScalaNamesUtil
import org.jetbrains.plugins.scala.lang.resolve.processor.DynamicResolveProcessor._
import org.jetbrains.plugins.scala.lang.resolve.processor._
import org.jetbrains.plugins.scala.project.ProjectContext
import scala.annotation.tailrec
import scala.collection.Set
import scala.collection.mutable.ArrayBuffer
import scala.language.implicitConversions
class ReferenceExpressionResolver(implicit projectContext: ProjectContext) {
private case class ContextInfo(arguments: Option[Seq[Expression]], expectedType: () => Option[ScType], isUnderscore: Boolean)
private def argumentsOf(ref: PsiElement): Seq[Expression] = {
ref.getContext match {
case infixExpr: ScInfixExpr =>
//TODO should rOp really be parsed as Tuple (not as argument list)?
infixExpr.rOp match {
case t: ScTuple => t.exprs
case op => Seq(op)
}
case methodCall: ScMethodCall => methodCall.argumentExpressions
}
}
private def getContextInfo(ref: ScReferenceExpression, e: ScExpression): ContextInfo = {
e.getContext match {
case generic : ScGenericCall => getContextInfo(ref, generic)
case call: ScMethodCall if !call.isUpdateCall =>
ContextInfo(Some(call.argumentExpressions), () => call.expectedType(), isUnderscore = false)
case call: ScMethodCall =>
val args = call.argumentExpressions ++ call.getContext.asInstanceOf[ScAssignStmt].getRExpression.toList
ContextInfo(Some(args), () => None, isUnderscore = false)
case section: ScUnderscoreSection => ContextInfo(None, () => section.expectedType(), isUnderscore = true)
case inf: ScInfixExpr if ref == inf.operation =>
ContextInfo(inf.getArgExpr match {
case tuple: ScTuple => Some(tuple.exprs) // See SCL-2001
case _: ScUnitExpr => Some(Nil) // See SCL-3485
case e: ScParenthesisedExpr => e.expr match {
case Some(expr) => Some(Seq(expr))
case _ => Some(Nil)
}
case rOp => Some(Seq(rOp))
}, () => None, isUnderscore = false)
case parents: ScParenthesisedExpr => getContextInfo(ref, parents)
case postf: ScPostfixExpr if ref == postf.operation => getContextInfo(ref, postf)
case pref: ScPrefixExpr if ref == pref.operation => getContextInfo(ref, pref)
case _ => ContextInfo(None, () => e.expectedType(), isUnderscore = false)
}
}
private def kinds(ref: ScReferenceExpression, e: ScExpression, incomplete: Boolean): scala.collection.Set[ResolveTargets.Value] = {
e.getContext match {
case gen: ScGenericCall => kinds(ref, gen, incomplete)
case parents: ScParenthesisedExpr => kinds(ref, parents, incomplete)
case _: ScMethodCall | _: ScUnderscoreSection => StdKinds.methodRef
case inf: ScInfixExpr if ref == inf.operation => StdKinds.methodRef
case postf: ScPostfixExpr if ref == postf.operation => StdKinds.methodRef
case pref: ScPrefixExpr if ref == pref.operation => StdKinds.methodRef
case _ => ref.getKinds(incomplete)
}
}
private def getTypeArgs(e : ScExpression) : Seq[ScTypeElement] = {
e.getContext match {
case generic: ScGenericCall => generic.arguments
case parents: ScParenthesisedExpr => getTypeArgs(parents)
case _ => Seq.empty
}
}
def resolve(reference: ScReferenceExpression, shapesOnly: Boolean, incomplete: Boolean): Array[ResolveResult] = {
val name = if (reference.isUnaryOperator) "unary_" + reference.refName else reference.refName
val info = getContextInfo(reference, reference)
//expectedOption different for cases
// val a: (Int) => Int = foo
// and for case
// val a: (Int) => Int = _.foo
val expectedOption = () => info.expectedType.apply()
val prevInfoTypeParams = reference.getPrevTypeInfoParams
def processor(smartProcessor: Boolean): MethodResolveProcessor =
new MethodResolveProcessor(reference, name, info.arguments.toList,
getTypeArgs(reference), prevInfoTypeParams, kinds(reference, reference, incomplete), expectedOption,
info.isUnderscore, shapesOnly, enableTupling = true) {
override def candidatesS: Set[ScalaResolveResult] = {
if (!smartProcessor) super.candidatesS
else {
val iterator = reference.shapeResolve.map(_.asInstanceOf[ScalaResolveResult]).iterator
while (iterator.hasNext) {
levelSet.add(iterator.next())
}
super.candidatesS
}
}
}
var result: Array[ResolveResult] = Array.empty
if (shapesOnly) {
result = doResolve(reference, processor(smartProcessor = false))
} else {
val candidatesS = processor(smartProcessor = true).candidatesS //let's try to avoid treeWalkUp
if (candidatesS.isEmpty || candidatesS.forall(!_.isApplicable())) {
// it has another resolve only in one case:
// clazz.ref(expr)
// clazz has method ref with one argument, but it's not ok
// so shape resolve return this wrong result
// however there is implicit conversion with right argument
// this is ugly, but it can improve performance
result = doResolve(reference, processor(smartProcessor = false))
} else {
result = candidatesS.toArray
}
}
if (result.isEmpty && reference.isAssignmentOperator) {
val assignProcessor = new MethodResolveProcessor(reference, reference.refName.init, List(argumentsOf(reference)),
Nil, prevInfoTypeParams, isShapeResolve = shapesOnly, enableTupling = true)
result = doResolve(reference, assignProcessor)
result.map(r => r.asInstanceOf[ScalaResolveResult].copy(isAssignment = true): ResolveResult)
} else {
result
}
}
def doResolve(ref: ScReferenceExpression, processor: BaseProcessor, accessibilityCheck: Boolean = true): Array[ResolveResult] = {
def resolveUnqalified(processor: BaseProcessor): BaseProcessor = {
ref.getContext match {
case ScSugarCallExpr(operand, operation, _) if ref == operation =>
processTypes(operand, processor)
case _ =>
resolveUnqualifiedExpression(processor)
processor
}
}
def resolveUnqualifiedExpression(processor: BaseProcessor) {
@tailrec
def treeWalkUp(place: PsiElement, lastParent: PsiElement) {
if (place == null) return
if (!place.processDeclarations(processor,
ResolveState.initial(),
lastParent, ref)) return
place match {
case (_: ScTemplateBody | _: ScExtendsBlock) => //template body and inherited members are at the same level
case _ => if (!processor.changedLevel) return
}
treeWalkUp(place.getContext, place)
}
val context = ref.getContext
val contextElement = (context, processor) match {
case (x: ScAssignStmt, _) if x.getLExpression == ref => Some(context)
case (_, cp: CompletionProcessor) if cp.isIncomplete => Some(ref)
case _ => None
}
contextElement.foreach(processAssignment(_, processor))
treeWalkUp(ref, null)
}
def processAssignment(assign: PsiElement, processor: BaseProcessor) {
assign.getContext match {
//trying to resolve naming parameter
case args: ScArgumentExprList =>
args.callReference match {
case Some(callReference) if args.getContext.isInstanceOf[MethodInvocation] =>
processAnyAssignment(args.exprs, args.getContext.asInstanceOf[MethodInvocation], callReference,
args.invocationCount, assign, processor)
case None => processConstructorReference(args, assign, processor)
}
case tuple: ScTuple => tuple.getContext match {
case inf: ScInfixExpr if inf.getArgExpr == tuple =>
processAnyAssignment(tuple.exprs, inf, inf.operation, 1, assign, processor)
case _ =>
}
case p: ScParenthesisedExpr => p.getContext match {
case inf: ScInfixExpr if inf.getArgExpr == p =>
processAnyAssignment(p.expr.toSeq, inf, inf.operation, 1, assign, processor)
case _ =>
}
case _ =>
}
}
def processAnyAssignment(exprs: Seq[ScExpression], call: MethodInvocation, callReference: ScReferenceExpression, invocationCount: Int,
assign: PsiElement, processor: BaseProcessor) {
val refName = ref.refName
for (variant <- callReference.multiResolve(false)) {
def processResult(r: ScalaResolveResult) = r match {
case ScalaResolveResult(fun: ScFunction, _) if r.isDynamic &&
fun.name == APPLY_DYNAMIC_NAMED =>
//add synthetic parameter
if (!processor.isInstanceOf[CompletionProcessor]) {
val state: ResolveState = ResolveState.initial().put(CachesUtil.NAMED_PARAM_KEY, java.lang.Boolean.TRUE)
processor.execute(createParameterFromText(refName + ": Any"), state)
}
case ScalaResolveResult(_, _) if call.applyOrUpdateElement.exists(_.isDynamic) &&
call.applyOrUpdateElement.get.name == APPLY_DYNAMIC_NAMED =>
//add synthetic parameter
if (!processor.isInstanceOf[CompletionProcessor]) {
val state: ResolveState = ResolveState.initial().put(CachesUtil.NAMED_PARAM_KEY, java.lang.Boolean.TRUE)
processor.execute(createParameterFromText(refName + ": Any"), state)
}
case ScalaResolveResult(fun: ScFunction, subst: ScSubstitutor) =>
if (!processor.isInstanceOf[CompletionProcessor]) {
fun.getParamByName(refName, invocationCount - 1) match {
//todo: why -1?
case Some(param) =>
var state = ResolveState.initial.put(ScSubstitutor.key, subst).
put(CachesUtil.NAMED_PARAM_KEY, java.lang.Boolean.TRUE)
if (!ScalaNamesUtil.equivalent(param.name, refName)) {
state = state.put(ResolverEnv.nameKey, ScalaNamesUtil.clean(param.deprecatedName.get))
}
processor.execute(param, state)
case None =>
}
} else {
//for completion only!
funCollectNamedCompletions(fun.paramClauses, assign, processor, subst, exprs, invocationCount)
}
case ScalaResolveResult(_: FakePsiMethod, _: ScSubstitutor) => //todo: ?
case ScalaResolveResult(method: PsiMethod, subst) =>
assign.getContext match {
case args: ScArgumentExprList =>
args.getContext match {
case methodCall: ScMethodCall if methodCall.isNamedParametersEnabledEverywhere =>
method.parameters.foreach {
p =>
processor.execute(p, ResolveState.initial().put(ScSubstitutor.key, subst).
put(CachesUtil.NAMED_PARAM_KEY, java.lang.Boolean.TRUE))
}
case _ =>
}
case _ =>
}
case _ =>
}
variant match {
case x: ScalaResolveResult =>
processResult(x)
// Consider named parameters of apply method; see SCL-2407
x.innerResolveResult.foreach(processResult)
case _ =>
}
}
}
def processConstructorReference(args: ScArgumentExprList, assign: PsiElement, baseProcessor: BaseProcessor) {
def processConstructor(elem: PsiElement, tp: ScType, typeArgs: Seq[ScTypeElement], arguments: Seq[ScArgumentExprList],
secondaryConstructors: (ScClass) => Seq[ScFunction]) {
tp.extractClassType match {
case Some((clazz, subst)) if !clazz.isInstanceOf[ScTemplateDefinition] && clazz.isAnnotationType =>
if (!baseProcessor.isInstanceOf[CompletionProcessor]) {
for (method <- clazz.getMethods) {
method match {
case p: PsiAnnotationMethod =>
if (ScalaNamesUtil.equivalent(p.name, ref.refName)) {
baseProcessor.execute(p, ResolveState.initial)
}
case _ =>
}
}
} else {
if (args.invocationCount == 1) {
val methods: ArrayBuffer[PsiAnnotationMethod] = new ArrayBuffer[PsiAnnotationMethod] ++
clazz.getMethods.toSeq.flatMap {
case f: PsiAnnotationMethod => Seq(f)
case _ => Seq.empty
}
val exprs = args.exprs
var i = 0
def tail() {
if (methods.nonEmpty) methods.remove(0)
}
while (exprs(i) != assign) {
exprs(i) match {
case assignStmt: ScAssignStmt =>
assignStmt.getLExpression match {
case ref: ScReferenceExpression =>
val ind = methods.indexWhere(p => ScalaNamesUtil.equivalent(p.name, ref.refName))
if (ind != -1) methods.remove(ind)
else tail()
case _ => tail()
}
case _ => tail()
}
i = i + 1
}
for (method <- methods) {
baseProcessor.execute(method, ResolveState.initial.put(ScSubstitutor.key, subst).
put(CachesUtil.NAMED_PARAM_KEY, java.lang.Boolean.TRUE))
}
}
}
case Some((clazz, subst)) =>
val processor: MethodResolveProcessor = new MethodResolveProcessor(elem, "this",
arguments.toList.map(_.exprs.map(Expression(_))), typeArgs, Seq.empty /* todo: ? */ ,
constructorResolve = true, enableTupling = true)
val state = ResolveState.initial.put(ScSubstitutor.key, subst)
clazz match {
case clazz: ScClass =>
for (constr <- secondaryConstructors(clazz)) {
processor.execute(constr, state)
}
clazz.constructor.foreach(processor.execute(_, state))
case _ =>
for (constr <- clazz.getConstructors) {
processor.execute(constr, state)
}
}
val refName = ref.refName
for (candidate <- processor.candidatesS) {
candidate match {
case ScalaResolveResult(fun: ScFunction, subst: ScSubstitutor) =>
if (!baseProcessor.isInstanceOf[CompletionProcessor]) {
fun.getParamByName(refName, arguments.indexOf(args)) match {
case Some(param) =>
var state = ResolveState.initial.put(ScSubstitutor.key, subst).
put(CachesUtil.NAMED_PARAM_KEY, java.lang.Boolean.TRUE)
if (!ScalaNamesUtil.equivalent(param.name, refName)) {
state = state.put(ResolverEnv.nameKey, ScalaNamesUtil.clean(param.deprecatedName.get))
}
baseProcessor.execute(param, state)
case None =>
}
} else {
//for completion only!
funCollectNamedCompletions(fun.paramClauses, assign, baseProcessor, subst, args.exprs, args.invocationCount)
}
case ScalaResolveResult(constructor: ScPrimaryConstructor, _) =>
if (!baseProcessor.isInstanceOf[CompletionProcessor])
constructor.getParamByName(refName, arguments.indexOf(args)) match {
case Some(param) =>
baseProcessor.execute(param, ResolveState.initial.put(ScSubstitutor.key, subst).
put(CachesUtil.NAMED_PARAM_KEY, java.lang.Boolean.TRUE))
case None =>
}
else {
//for completion only!
funCollectNamedCompletions(constructor.parameterList, assign, baseProcessor, subst, args.exprs, args.invocationCount)
}
case _ =>
}
}
case _ =>
}
}
args.getContext match {
case s: ScSelfInvocation =>
val clazz = ScalaPsiUtil.getContextOfType(s, true, classOf[ScClass])
if (clazz == null) return
val tp: ScType = clazz.asInstanceOf[ScClass].getType(TypingContext.empty).getOrElse(return)
val typeArgs: Seq[ScTypeElement] = Seq.empty
val arguments = s.arguments
val secondaryConstructors = (c: ScClass) => {
if (c != clazz) Seq.empty
else {
c.secondaryConstructors.filter(f =>
!PsiTreeUtil.isContextAncestor(f, s, true) &&
f.getTextRange.getStartOffset < s.getTextRange.getStartOffset
)
}
}
processConstructor(s, tp, typeArgs, arguments, secondaryConstructors)
case constr: ScConstructor =>
val tp: ScType = constr.typeElement.getType(TypingContext.empty).getOrElse(return)
val typeArgs: Seq[ScTypeElement] = constr.typeArgList.map(_.typeArgs).getOrElse(Seq())
val arguments = constr.arguments
val secondaryConstructors = (clazz: ScClass) => clazz.secondaryConstructors
processConstructor(constr, tp, typeArgs, arguments, secondaryConstructors)
case _ =>
}
}
def funCollectNamedCompletions(clauses: ScParameters, assign: PsiElement, processor: BaseProcessor,
subst: ScSubstitutor, exprs: Seq[ScExpression], invocationCount: Int) {
if (clauses.clauses.length >= invocationCount) {
val actualClause = clauses.clauses(invocationCount - 1)
val params = new ArrayBuffer[ScParameter] ++ actualClause.parameters
var i = 0
def tail() {
if (params.nonEmpty) params.remove(0)
}
while (exprs(i) != assign) {
exprs(i) match {
case assignStmt: ScAssignStmt =>
assignStmt.getLExpression match {
case ref: ScReferenceExpression =>
val ind = params.indexWhere(p => ScalaNamesUtil.equivalent(p.name, ref.refName))
if (ind != -1) params.remove(ind)
else tail()
case _ => tail()
}
case _ => tail()
}
i = i + 1
}
for (param <- params) {
processor.execute(param, ResolveState.initial.put(ScSubstitutor.key, subst).
put(CachesUtil.NAMED_PARAM_KEY, java.lang.Boolean.TRUE))
}
}
}
def processTypes(e: ScExpression, processor: BaseProcessor): BaseProcessor = {
ProgressManager.checkCanceled()
e.getNonValueType() match {
case Success(ScTypePolymorphicType(internal, tp), _) if tp.nonEmpty &&
!internal.isInstanceOf[ScMethodType] && !internal.isInstanceOf[UndefinedType] /* optimization */ =>
processType(internal, e, processor)
if (processor.candidates.nonEmpty) return processor
case _ =>
}
//if it's ordinary case
e.getType().toOption match {
case Some(tp) => processType(tp, e, processor)
case _ => processor
}
}
def processType(aType: ScType, e: ScExpression, processor: BaseProcessor): BaseProcessor = {
val shape = processor match {
case m: MethodResolveProcessor => m.isShapeResolve
case _ => false
}
val fromType = e match {
case ref: ScReferenceExpression => ref.bind() match {
case Some(ScalaResolveResult(_: ScSelfTypeElement, _)) => aType
case Some(r@ScalaResolveResult(b: ScTypedDefinition, _)) if b.isStable =>
r.fromType match {
case Some(fT) => ScProjectionType(fT, b, superReference = false)
case None => ScalaType.designator(b)
}
case _ => aType
}
case _ => aType
}
var state = ResolveState.initial()
fromType match {
case ScDesignatorType(_: PsiPackage) =>
case _ =>
state = state.put(BaseProcessor.FROM_TYPE_KEY, fromType)
}
processor.processType(aType, e, state)
val candidates = processor.candidatesS
aType match {
case d: ScDesignatorType if d.isStatic => return processor
case ScDesignatorType(_: PsiPackage) => return processor
case _ =>
}
if (candidates.isEmpty || (!shape && candidates.forall(!_.isApplicable())) ||
(processor.isInstanceOf[CompletionProcessor] &&
processor.asInstanceOf[CompletionProcessor].collectImplicits)) {
processor match {
case rp: ResolveProcessor =>
rp.resetPrecedence() //do not clear candidate set, we want wrong resolve, if don't found anything
case _ =>
}
collectImplicits(e, processor, noImplicitsForArgs = candidates.nonEmpty)
if (processor.candidates.length == 0)
return processDynamic(fromType, e, processor)
}
processor
}
def processDynamic(`type`: ScType, e: ScExpression, baseProcessor: BaseProcessor): BaseProcessor =
ScalaPsiManager.instance(ref.getProject).getCachedClass(ref.resolveScope, "scala.Dynamic").map {
ScDesignatorType(_)
}.filter {
`type`.conforms(_)
}.flatMap { _ =>
Option(baseProcessor).collect {
case processor: MethodResolveProcessor => processor
}.map { processor =>
val callOption = ref.getContext match {
case m: MethodInvocation if m.getInvokedExpr == ref => Some(m)
case _ => None
}
val argumentExpressions = callOption.toSeq.flatMap {
_.argumentExpressions
}
val name = callOption.map {
getDynamicNameForMethodInvocation
}.getOrElse {
ref.getContext match {
case a: ScAssignStmt if a.getLExpression == ref => UPDATE_DYNAMIC
case _ => SELECT_DYNAMIC
}
}
val emptyStringExpression = createExpressionFromText("\\"\\"")(e.getManager)
val newProcessor = new MethodResolveProcessor(e, name, List(List(emptyStringExpression), argumentExpressions),
processor.typeArgElements, processor.prevTypeInfo, processor.kinds, processor.expectedOption,
processor.isUnderscore, processor.isShapeResolve, processor.constructorResolve, processor.noImplicitsForArgs,
processor.enableTupling, processor.selfConstructorResolve, isDynamic = true)
newProcessor.processType(`type`, e, ResolveState.initial.put(BaseProcessor.FROM_TYPE_KEY, `type`))
newProcessor
}
}.getOrElse(baseProcessor)
def collectImplicits(e: ScExpression, processor: BaseProcessor, noImplicitsForArgs: Boolean) {
def builder(result: ImplicitResolveResult): ResolverStateBuilder = {
ProgressManager.checkCanceled()
new ImplicitResolveResult.ResolverStateBuilder(result).withImports
.withImplicitType
.withImplicitFunction
}
processor match {
case _: CompletionProcessor =>
new ScImplicitlyConvertible(e).implicitMap().foreach { result =>
//todo: args?
val state = builder(result).state
processor.processType(result.`type`, e, state)
}
return
case m: MethodResolveProcessor => m.noImplicitsForArgs = true
case _ =>
}
val name = processor match {
case rp: ResolveProcessor => rp.name // See SCL-2934.
case _ => ref.refName
}
ScalaPsiUtil.findImplicitConversion(e, name, ref, processor, noImplicitsForArgs).foreach { result =>
val state = builder(result).withType.state
processor.processType(result.typeWithDependentSubstitutor, e, state)
}
}
if (!accessibilityCheck) processor.doNotCheckAccessibility()
val actualProcessor = ref.qualifier match {
case None =>
resolveUnqalified(processor)
case Some(superQ: ScSuperReference) =>
ResolveUtils.processSuperReference(superQ, processor, ref)
processor
case Some(q) =>
processTypes(q, processor)
}
val res = actualProcessor.rrcandidates
if (accessibilityCheck && res.length == 0) return doResolve(ref, processor, accessibilityCheck = false)
res
}
} | loskutov/intellij-scala | src/org/jetbrains/plugins/scala/lang/resolve/ReferenceExpressionResolver.scala | Scala | apache-2.0 | 26,998 |
package is.hail.types.physical
import is.hail.annotations.{CodeOrdering, Region, UnsafeOrdering}
import is.hail.asm4s._
import is.hail.expr.ir.{EmitCodeBuilder, EmitMethodBuilder}
import is.hail.types.physical.stypes.SCode
import is.hail.types.physical.stypes.concrete.{SCanonicalCall, SCanonicalCallCode}
import is.hail.types.physical.stypes.interfaces.SCall
import is.hail.utils._
final case class PCanonicalCall(required: Boolean = false) extends PCall {
def _asIdent = "call"
override def _pretty(sb: StringBuilder, indent: Int, compact: Boolean): Unit = sb.append("PCCall")
val representation: PInt32 = PInt32(required)
def byteSize: Long = representation.byteSize
override def alignment: Long = representation.alignment
override lazy val fundamentalType: PInt32 = representation
override def unsafeOrdering(): UnsafeOrdering = representation.unsafeOrdering() // this was a terrible idea
def codeOrdering(mb: EmitMethodBuilder[_], other: PType): CodeOrdering = {
assert(other isOfType this)
PInt32().codeOrdering(mb)
}
def setRequired(required: Boolean) = if (required == this.required) this else PCanonicalCall(required)
override def unstagedStoreAtAddress(addr: Long, region: Region, srcPType: PType, srcAddress: Long, deepCopy: Boolean): Unit = {
srcPType match {
case pt: PCanonicalCall =>
representation.unstagedStoreAtAddress(addr, region, pt.representation, srcAddress, deepCopy)
}
}
override def encodableType: PType = representation.encodableType
override def containsPointers: Boolean = representation.containsPointers
def _copyFromAddress(region: Region, srcPType: PType, srcAddress: Long, deepCopy: Boolean): Long = {
srcPType match {
case pt: PCanonicalLocus => representation._copyFromAddress(region, pt.representation, srcAddress, deepCopy)
}
}
def sType: SCall = SCanonicalCall(required)
def loadCheapPCode(cb: EmitCodeBuilder, addr: Code[Long]): PCode = new SCanonicalCallCode(required, Region.loadInt(addr))
def store(cb: EmitCodeBuilder, region: Value[Region], value: SCode, deepCopy: Boolean): Code[Long] = {
value.st match {
case SCanonicalCall(r) =>
val newAddr = cb.newLocal[Long]("pcanonicalcall_store_addr", region.allocate(representation.alignment, representation.byteSize))
storeAtAddress(cb, newAddr, region, value, deepCopy)
newAddr
}
}
def storeAtAddress(cb: EmitCodeBuilder, addr: Code[Long], region: Value[Region], value: SCode, deepCopy: Boolean): Unit = {
cb += Region.storeInt(addr, value.asInstanceOf[SCanonicalCallCode].call)
}
}
| danking/hail | hail/src/main/scala/is/hail/types/physical/PCanonicalCall.scala | Scala | mit | 2,618 |
package com.azavea.opentransit.indicators.parameters
import com.azavea.opentransit.database.{ BoundariesTable, RoadsTable }
import scala.slick.jdbc.JdbcBackend.{Database, DatabaseDef, Session}
import com.azavea.gtfs.io.database._
import com.azavea.gtfs._
import com.azavea.opentransit._
import com.azavea.opentransit.indicators._
import com.azavea.gtfs.{TransitSystem, Stop}
import scala.collection.mutable
import grizzled.slf4j.Logging
/**
* Trait used to populate parameters with data from 'real time' GTFS
*/
trait ObservedStopTimes {
// Map of Trip IDs to Sequence of tuples of (scheduled, observed)
def observedStopsByTrip(tripId: String): Seq[(ScheduledStop, ScheduledStop)]
def observedTripById(tripId: String): Trip
def missingTripData: Int
}
object ObservedStopTimes {
def apply(
scheduledSystem: TransitSystem,
period: SamplePeriod,
db: DatabaseDef,
hasObserved: Boolean): ObservedStopTimes = {
// This is ugly: a thousand sorries. it also is apparently necessary -
// we have to index on SamplePeriod and again on trip id
lazy val observedSystem = {
val observedGtfsRecords =
db withSession { implicit session =>
new DatabaseGtfsRecords with DefaultProfile {
override val stopTimesTableName = "gtfs_stop_times_real"
}
}
val builder = TransitSystemBuilder(observedGtfsRecords)
// pruneStopsMinuteBuffer is an optional parameter (default=0) which specifies
// just how far a trip's stops can extend beyond the sampleperiod
builder.systemBetween(period.start, period.end, pruneStopsBufferMinutes=120)
}
lazy val observedTrips: Map[String, Trip] =
observedSystem.routes.flatMap { route =>
/* This line suffers from a problematic bug such that if a sample period is too long (>24 hours),
* ambiguity CAN exist between tripIds and trip objects. The practical effect of this is that
* the wrong trip object can be selected. A fix for this would be the inclusion of more
* robust notion of identity for trip instances be introduced. The GTFS parser could perhaps
* include such data - or - it is possible that a tuple of
* (tripId, trip.stop.scheduledstops.head.arrivalTime) be used for indexing these trip instances.
* See issue #566: https://github.com/WorldBank-Transport/open-transit-indicators/issues/566
* TODO: Introduce temporally robust trip instance indexing
*/
route.trips.map { trip => (trip.id -> trip) }
}.toMap
var missingTrips: Int = 0
lazy val observedStops: Map[String, Seq[(ScheduledStop, ScheduledStop)]] = {
val scheduledTrips = scheduledSystem.routes.flatMap(_.trips)
val observedTripsById = observedTrips
scheduledTrips.map { trip =>
(trip.id -> {
val schedStops: Map[String, ScheduledStop] =
trip.schedule.map(sst => sst.stop.id -> sst).toMap
val obsvdStops: Map[String, ScheduledStop] =
// allow for scheduled trips not in observed data
observedTripsById.get(trip.id) match {
case Some(observed) => observed.schedule.map(ost => ost.stop.id -> ost).toMap
case None => {
val tripId = trip.id.toString
missingTrips = missingTrips + 1
println(s"Missing observed stop times for trip ${tripId}")
Map()
}
}
// only return stops that are in the observed data
for {
s <- trip.schedule
if !obsvdStops.get(s.stop.id).isEmpty
} yield (schedStops(s.stop.id), obsvdStops(s.stop.id))
}) // Seq[(String, Seq[(ScheduledStop, ScheduledStop)])]
}.toMap
} // Map[String, Seq[(ScheduledStop, ScheduledStop)]])]
if (hasObserved) {
new ObservedStopTimes {
def observedStopsByTrip(tripId: String): Seq[(ScheduledStop, ScheduledStop)] =
observedStops.get(tripId) match {
case Some(s) => s
case None => Nil
}
def observedTripById(tripId: String): Trip =
observedTrips.get(tripId) match {
case Some(t) => t
case None => new Trip {
def headsign = None
def direction = None
def id = tripId
def schedule = Nil
def tripShape = None
}
}
def missingTripData: Int = missingTrips
}
} else {
new ObservedStopTimes {
def observedStopsByTrip(tripId: String): Seq[(ScheduledStop, ScheduledStop)] =
Nil
def observedTripById(tripId: String): Trip =
new Trip {
def headsign = None
def direction = None
def id = ""
def schedule = Nil
def tripShape = None
}
def missingTripData: Int = missingTrips
}
}
}
}
| WorldBank-Transport/open-transit-indicators | scala/opentransit/src/main/scala/com/azavea/opentransit/indicators/parameters/ObservedStopTimes.scala | Scala | gpl-3.0 | 4,962 |
package com.obecto.gattakka.genetics.operators
import com.obecto.gattakka.{IndividualDescriptor, PipelineOperator, IndividualState}
trait EliteOperator extends PipelineOperator {
def elitePercentage: Double
def apply(snapshot: List[IndividualDescriptor]): List[IndividualDescriptor] = {
val withoutDoomed = snapshot filter (_.state != IndividualState.DoomedToDie)
val sorted = withoutDoomed sortBy (-_.fitness)
val elites = sorted.slice(0, (snapshot.size * elitePercentage).ceil.toInt)
for (descriptor <- elites) {
println(s"Elite fitness: ${descriptor.fitness}")
descriptor.state = IndividualState.Elite
}
snapshot
}
}
| obecto/gattakka | src/main/scala/com/obecto/gattakka/genetics/operators/EliteOperator.scala | Scala | mit | 667 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.test
import org.apache.spark.sql.catalyst.util.{ArrayData, GenericArrayData}
import org.apache.spark.sql.types._
/**
* An example class to demonstrate UDT in Scala, Java, and Python.
* @param x x coordinate
* @param y y coordinate
*/
@SQLUserDefinedType(udt = classOf[ExamplePointUDT])
private[sql] class ExamplePoint(val x: Double, val y: Double) extends Serializable {
override def hashCode(): Int = 31 * (31 * x.hashCode()) + y.hashCode()
override def equals(other: Any): Boolean = other match {
case that: ExamplePoint => this.x == that.x && this.y == that.y
case _ => false
}
override def toString(): String = s"($x, $y)"
}
/**
* User-defined type for [[ExamplePoint]].
*/
private[sql] class ExamplePointUDT extends UserDefinedType[ExamplePoint] {
override def sqlType: DataType = ArrayType(DoubleType, false)
override def pyUDT: String = "pyspark.sql.tests.ExamplePointUDT"
override def serialize(p: ExamplePoint): GenericArrayData = {
val output = new Array[Any](2)
output(0) = p.x
output(1) = p.y
new GenericArrayData(output)
}
override def deserialize(datum: Any): ExamplePoint = {
datum match {
case values: ArrayData =>
new ExamplePoint(values.getDouble(0), values.getDouble(1))
}
}
override def userClass: Class[ExamplePoint] = classOf[ExamplePoint]
private[spark] override def asNullable: ExamplePointUDT = this
}
| lxsmnv/spark | sql/core/src/main/scala/org/apache/spark/sql/test/ExamplePointUDT.scala | Scala | apache-2.0 | 2,246 |
package org.smitt.conf
object Conf {
import scalaz.Reader
private val conf = Reader[Conf, Conf](identity)
val messengerConf = conf map (_.messengerConf)
val outputConf = conf map (_.outputConf)
}
trait Conf
extends MessengerConfComponent
with OutputConfComponent
| sergius/smitt | src/main/scala/org/smitt/conf/Conf.scala | Scala | mit | 282 |
package net.leonini.passwordgrids
import java.io.File
import java.lang.System
import com.typesafe.config.ConfigFactory
object CLI {
val version = 1.0
def main(args: Array[String]) {
println(f"# PasswordGrids v$version%2.1f")
val alphabet = {
val f: File = new File(System.getProperty("user.home") + File.separator + ".passwordgrids");
if (f.exists() && !f.isDirectory()) {
val cfg = ConfigFactory.parseFile(f)
if (cfg.hasPath("alphabet"))
cfg.getString("alphabet")
else "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ"
} else "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ"
}
val standardIn = System.console()
print("> master password: ")
val salt = standardIn.readPassword() mkString ""
print("> identifier: ")
val identifier = standardIn.readPassword() mkString ""
val d = new GridsDisplay(salt, identifier, alphabet = alphabet)
print(d)
}
}
| lleonini/passwordgrids-cli | src/main/scala/CLI.scala | Scala | mit | 959 |
package com.actian.spark_vector.provider
import org.scalatest._
import org.scalatest.funsuite.FixtureAnyFunSuite
import org.apache.spark.sql.SparkSession
import resource._
import org.apache.spark.SparkConf
import matchers.should._
import org.scalatestplus.scalacheck.ScalaCheckPropertyChecks
class CaseInsensivitiyTest extends FixtureAnyFunSuite with Matchers with PrivateMethodTester with ScalaCheckPropertyChecks
{
override type FixtureParam = SparkSession
override protected def withFixture(test: OneArgTest): Outcome = {
val conf = new SparkConf()
.setMaster("local[1]")
.setAppName("case sensitivity test")
managed(SparkSession.builder.config(conf).getOrCreate()).acquireAndGet { spark =>
withFixture(test.toNoArgTest(spark))
}
}
test("Job options case insensitive")
{
implicit spark =>
val scenarios =
Table(
("input", "result"),
(Map("header"-> "true"), Map("header"-> "true")),
(Map("HEADER"-> "true"), Map("header"-> "true")),
(Map("Header"-> "true"), Map("header"-> "true"))
)
forAll(scenarios) { (inp, res) =>
val jobPart = new JobPart("1", "test", "test op", "path", Some("csv"), Seq(new ColumnInfo("test", new LogicalType("Int", 4, 4), "Int", false, None)), Some(inp), new DataStream("test", "test", Seq(new StreamPerNode(1, 1, "test"))))
Utils.getOptions(jobPart) should be (res)
}
}
test("Job extra options case insensitive")
{
implicit spark =>
val scenarios =
Table(
("input", "result"),
(Map("schema"-> "test", "filter"-> "test"), Map("schema"-> "test", "filter"-> "test")),
(Map("SCHEMA"-> "test", "FILTER"-> "test"), Map("schema"-> "test", "filter"-> "test")),
(Map("Schema"-> "test", "filtER"-> "test"), Map("schema"-> "test", "filter"-> "test"))
)
val getExtraOptions = PrivateMethod[Map[String, String]]('getExtraOptions)
val handler = new RequestHandler(spark , ProviderAuth("test", "test"))
forAll(scenarios) { (inp, res) =>
val jobPart = new JobPart("1", "test", "test op", "path", Some("csv"), Seq(new ColumnInfo("test", new LogicalType("Int", 4, 4), "Int", false, None)), Some(inp), new DataStream("test", "test", Seq(new StreamPerNode(1, 1, "test"))))
handler invokePrivate getExtraOptions(jobPart) should be (res)
}
}
} | ActianCorp/spark-vector | provider/src/test/scala/com/actian/spark_vector/provider/CaseInsensitivityTest.scala | Scala | apache-2.0 | 2,494 |
package com.danielasfregola.twitter4s.http.clients.rest.trends
import akka.http.scaladsl.model.HttpMethods
import com.danielasfregola.twitter4s.entities.{Location, LocationTrends, RatedData}
import com.danielasfregola.twitter4s.helpers.ClientSpec
class TwitterTrendClientSpec extends ClientSpec {
class TwitterTrendClientSpecContext extends RestClientSpecContext with TwitterTrendClient
"Twitter Trend Client" should {
"get global trends" in new TwitterTrendClientSpecContext {
val result: RatedData[Seq[LocationTrends]] = when(globalTrends())
.expectRequest { request =>
request.method === HttpMethods.GET
request.uri.endpoint === "https://api.twitter.com/1.1/trends/place.json"
request.uri.rawQueryString === Some("id=1")
}
.respondWithRated("/twitter/rest/trends/trends.json")
.await
result.rate_limit === rateLimit
result.data === loadJsonAs[Seq[LocationTrends]]("/fixtures/rest/trends/trends.json")
}
"get trends for a location" in new TwitterTrendClientSpecContext {
val result: RatedData[Seq[LocationTrends]] = when(trends(1))
.expectRequest { request =>
request.method === HttpMethods.GET
request.uri.endpoint === "https://api.twitter.com/1.1/trends/place.json"
request.uri.rawQueryString === Some("id=1")
}
.respondWithRated("/twitter/rest/trends/trends.json")
.await
result.rate_limit === rateLimit
result.data === loadJsonAs[Seq[LocationTrends]]("/fixtures/rest/trends/trends.json")
}
"get trends for a location without hashtags" in new TwitterTrendClientSpecContext {
val result: RatedData[Seq[LocationTrends]] = when(trends(1, true))
.expectRequest { request =>
request.method === HttpMethods.GET
request.uri.endpoint === "https://api.twitter.com/1.1/trends/place.json"
request.uri.rawQueryString === Some("exclude=hashtags&id=1")
}
.respondWithRated("/twitter/rest/trends/trends.json")
.await
result.rate_limit === rateLimit
result.data === loadJsonAs[Seq[LocationTrends]]("/fixtures/rest/trends/trends.json")
}
"get locations with available trends" in new TwitterTrendClientSpecContext {
val result: RatedData[Seq[Location]] = when(locationTrends)
.expectRequest { request =>
request.method === HttpMethods.GET
request.uri.endpoint === "https://api.twitter.com/1.1/trends/available.json"
}
.respondWithRated("/twitter/rest/trends/available_locations.json")
.await
result.rate_limit === rateLimit
result.data === loadJsonAs[Seq[Location]]("/fixtures/rest/trends/available_locations.json")
}
"get closest location trends" in new TwitterTrendClientSpecContext {
val result: RatedData[Seq[Location]] = when(closestLocationTrends(37.781157, -122.400612831116))
.expectRequest { request =>
request.method === HttpMethods.GET
request.uri.endpoint === "https://api.twitter.com/1.1/trends/closest.json"
}
.respondWithRated("/twitter/rest/trends/closest_locations.json")
.await
result.rate_limit === rateLimit
result.data === loadJsonAs[Seq[Location]]("/fixtures/rest/trends/closest_locations.json")
}
}
}
| DanielaSfregola/twitter4s | src/test/scala/com/danielasfregola/twitter4s/http/clients/rest/trends/TwitterTrendClientSpec.scala | Scala | apache-2.0 | 3,348 |
/*
https://www.reddit.com/r/dailyprogrammer/comments/cdieag/20190715_challenge_379_easy_progressive_taxation/
*/
case class Bracket(max: Option[Int], rate: Double)
case class LimitedBracket(min: Int, max: Int, rate: Double)
object Tax {
val Taxes: List[Bracket] = List[Bracket](
Bracket(Some(10_000), 0.0),
Bracket(Some(30_000), 0.10),
Bracket(Some(100_000), 0.25),
Bracket(None, 0.40)
)
def calcTaxes: List[LimitedBracket] = {
var min = 0
Taxes.map(t => {
val max = t.max.getOrElse(Int.MaxValue)
val bracket = LimitedBracket(min, max, t.rate)
min = max + 1
bracket
})
}
def tax(amount: Int): Int = {
var tax = 0
calcTaxes.foreach(bracket => {
var toTax = 0
if (amount > bracket.max)
toTax = bracket.max - bracket.min
else if (amount > bracket.min)
toTax = amount - bracket.min
tax = tax + (toTax * bracket.rate).round.toInt
})
tax
}
def main(args: Array[String]): Unit = {
List(0, 10_000, 10_009, 10_010, 12_000, 56_789, 1_234_567)
.foreach(a => println("%s: %s".format(a, tax(a))))
}
}
| frankivo/dailyprogrammer | 379/src/main/scala/Tax.scala | Scala | gpl-3.0 | 1,134 |
package hasheq
import org.scalacheck.{Arbitrary, Properties}
import scala.language.higherKinds
package object immutable {
/** Witness that `S[A]` represents an immutable set of elements of type `A`. */
type SetRepr[S[_], A] = Setoid.Aux[S, A, Equality.type]
object SetRepr {
def properties[S[_], A](name: String = "SetRepr")(implicit S: SetRepr[S, A], EA: Equal[A], A: Arbitrary[A], PA: Arbitrary[A => Boolean]): Properties =
Setoid.properties[S, A, Equality.type](name)
}
type HashSet[A] = HashSetoid[A, Equality.type]
object HashSet {
def empty[A]: HashSet[A] = HashSetoid.empty[A, Equality.type]
def apply[A](elems: A*)(implicit A: Hash[A], E: Equal[A]): HashSet[A] = HashSetoid(elems:_*)
def of[A, Eq](elems: A*)(implicit A: HashEq[A, Eq], E: Equiv[A, Eq]): HashSetoid[A, Eq] = HashSetoid(elems:_*)
def setoidInstance[A](implicit A: Hash[A]): SetRepr[HashSet, A] = HashSetoid.setoidInstance[A, Equality.type]
}
}
| TomasMikula/hasheq | src/main/scala/hasheq/immutable/package.scala | Scala | bsd-3-clause | 963 |
package com.outr.arango.api.model
import io.circe.Json
case class PutAPISimpleWithinRectangle(collection: String,
geo: Option[String] = None,
latitude1: Option[String] = None,
latitude2: Option[String] = None,
limit: Option[String] = None,
longitude1: Option[String] = None,
longitude2: Option[String] = None,
skip: Option[String] = None) | outr/arangodb-scala | api/src/main/scala/com/outr/arango/api/model/PutAPISimpleWithinRectangle.scala | Scala | mit | 614 |
package org.bitcoins.rpc.marshallers.networking
import org.bitcoins.rpc.bitcoincore.networking.{NetworkConnections, NetworkConnectionsImpl}
import spray.json._
/**
* Created by Tom on 1/6/2016.
*/
object NetworkConnectionsMarshaller extends DefaultJsonProtocol {
val nameKey = "name"
val limitedKey = "limited"
val reachableKey = "reachable"
val proxyKey = "proxy"
val proxyRandomizeCredentialsKey = "proxy_randomize_credentials"
implicit object NetworkConnectionsFormatter extends RootJsonFormat[NetworkConnections] {
override def read(value : JsValue) : NetworkConnections = {
val obj = value.asJsObject
val name = obj.fields(nameKey).convertTo[String]
val limited = obj.fields(limitedKey).convertTo[Boolean]
val reachable = obj.fields(reachableKey).convertTo[Boolean]
val proxy = obj.fields(proxyKey).convertTo[String]
val proxyRandomizeCredentials = obj.fields(proxyRandomizeCredentialsKey).convertTo[Boolean]
NetworkConnectionsImpl(name, limited, reachable, proxy, proxyRandomizeCredentials)
}
override def write(detail : NetworkConnections) : JsValue = {
val m : Map[String, JsValue] = Map (
nameKey -> JsString(detail.name),
limitedKey -> JsBoolean(detail.limited),
reachableKey -> JsBoolean(detail.reachable),
proxyKey -> JsString(detail.proxy),
proxyRandomizeCredentialsKey -> JsBoolean(detail.proxyRandomizeCredentials)
)
JsObject(m)
}
}
} | bitcoin-s/bitcoin-s-rpc-client | src/main/scala/org/bitcoins/rpc/marshallers/networking/NetworkConnectionsMarshaller.scala | Scala | mit | 1,485 |
package openstackApi.domain
//=============================
//RESULT MESSAGES
sealed trait ResultMessage
case class Success2(message: String) extends ResultMessage
case class Error(message: String)
//=============================
// REQUEST MESSAGES
sealed trait RequestMessage
case class Get(id: String) extends RequestMessage
case class CreateVMrq(name: String, flavor: String, image: String, keyname: String) | Spirals-Team/ermis | src/main/scala/openstackApi/domain/Messages.scala | Scala | agpl-3.0 | 420 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.codegen
import org.apache.flink.table.api.TableConfig
import org.apache.flink.table.data.{GenericRowData, RowData}
import org.apache.flink.table.planner.calcite.FlinkTypeFactory
import org.apache.flink.table.runtime.operators.values.ValuesInputFormat
import org.apache.flink.table.runtime.typeutils.RowDataTypeInfo
import com.google.common.collect.ImmutableList
import org.apache.calcite.rel.`type`.RelDataType
import org.apache.calcite.rex.RexLiteral
import scala.collection.JavaConversions._
object ValuesCodeGenerator {
def generatorInputFormat(
config: TableConfig,
rowType: RelDataType,
tuples: ImmutableList[ImmutableList[RexLiteral]],
description: String): ValuesInputFormat = {
val outputType = FlinkTypeFactory.toLogicalRowType(rowType)
val ctx = CodeGeneratorContext(config)
val exprGenerator = new ExprCodeGenerator(ctx, false)
// generate code for every record
val generatedRecords = tuples.map { r =>
exprGenerator.generateResultExpression(
r.map(exprGenerator.generateExpression), outputType, classOf[GenericRowData])
}
// generate input format
val generatedFunction = InputFormatCodeGenerator.generateValuesInputFormat[RowData](
ctx,
description,
generatedRecords.map(_.code),
outputType)
new ValuesInputFormat(generatedFunction, RowDataTypeInfo.of(outputType))
}
}
| hequn8128/flink | flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/codegen/ValuesCodeGenerator.scala | Scala | apache-2.0 | 2,230 |
package com.geeksville.akka
import akka.actor.Actor
import akka.actor.PoisonPill
import scala.concurrent.duration._
/**
* This is an Actor mixin that runs doNextStep periodically
*/
trait TimesteppedActor extends Actor {
import context._
private case object SimNext
def numPoints: Int
def interval: Double
protected var numRemaining = numPoints
/// What step are we currently on?
def currentStep = numPoints - numRemaining
private def scheduleNext() = context.system.scheduler.scheduleOnce(interval seconds, self, SimNext)
// Start our sim
scheduleNext()
protected def doNextStep(): Unit
abstract override def receive = ({
case SimNext =>
if (numRemaining == 0)
self ! PoisonPill
else {
doNextStep()
numRemaining -= 1
scheduleNext()
}
}: PartialFunction[Any, Unit]).orElse(super.receive)
}
| geeksville/arduleader | common/src/main/scala/com/geeksville/akka/TimesteppedActor.scala | Scala | gpl-3.0 | 884 |
package io.hydrosphere.mist.master
import java.io.File
import cats.Eval
import com.typesafe.config.{Config, ConfigFactory, ConfigValueFactory, ConfigValueType}
import io.hydrosphere.mist.utils.ConfigUtils._
import io.hydrosphere.mist.master.data.ConfigRepr
import io.hydrosphere.mist.master.models.ContextConfig
import cats._
import cats.data.Reader
import cats.syntax._
import cats.implicits._
import io.hydrosphere.mist.utils.{Logger, NetUtils}
import scala.collection.JavaConverters._
import scala.concurrent.duration._
case class AsyncInterfaceConfig(
host: String,
port: Int,
publishTopic: String,
subscribeTopic: String
)
case class HostPortConfig(
host: String,
port: Int,
publicHost: String
)
object HostPortConfig {
def apply(config: Config): HostPortConfig =
HostPortConfig(config.getString("host"), config.getInt("port"), config.getString("public-host"))
}
case class HttpConfig(
host: String,
port: Int,
uiPath: String,
keepAliveTick: FiniteDuration,
publicHost: String
)
object HttpConfig {
def apply(config: Config): HttpConfig =
HttpConfig(
config.getString("host"),
config.getInt("port"),
config.getString("ui"),
config.getFiniteDuration("ws-keepalive-tick"),
config.getString("public-host")
)
}
case class LogServiceConfig(
host: String,
port: Int,
dumpDirectory: String,
publicHost: String
)
object LogServiceConfig {
def apply(config: Config): LogServiceConfig = {
LogServiceConfig(
host = config.getString("host"),
port = config.getInt("port"),
dumpDirectory = config.getString("dump_directory"),
publicHost = config.getString("public-host")
)
}
}
object AsyncInterfaceConfig {
def apply(config: Config): AsyncInterfaceConfig = {
AsyncInterfaceConfig(
host = config.getString("host"),
port = config.getInt("port"),
publishTopic = config.getString("publish-topic"),
subscribeTopic = config.getString("subscribe-topic")
)
}
def ifEnabled(config: Config): Option[AsyncInterfaceConfig] = {
if (config.getBoolean("on"))
AsyncInterfaceConfig(config).some
else
None
}
}
case class WorkersSettingsConfig(
runner: String,
runnerInitTimeout: Duration,
readyTimeout: FiniteDuration,
maxArtifactSize: Long,
dockerConfig: DockerRunnerConfig,
manualConfig: ManualRunnerConfig
)
sealed trait DockerNetworkConfiguration
case class NamedNetwork(name: String) extends DockerNetworkConfiguration
case class AutoMasterNetwork(masterId: String) extends DockerNetworkConfiguration
object DockerNetworkConfiguration {
def apply(config: Config): DockerNetworkConfiguration = {
config.getString("network-type") match {
case "auto-master" => AutoMasterNetwork(config.getConfig("auto-master-network").getString("container-id"))
case name => NamedNetwork(name)
}
}
}
case class DockerRunnerConfig(
dockerHost: String,
image: String,
network: DockerNetworkConfiguration,
mistHome: String,
sparkHome: String
)
object DockerRunnerConfig {
def apply(config: Config): DockerRunnerConfig = {
DockerRunnerConfig(
dockerHost = config.getString("host"),
image = config.getString("image"),
network = DockerNetworkConfiguration(config),
mistHome = config.getString("mist-home"),
sparkHome = config.getString("spark-home")
)
}
}
case class ManualRunnerConfig(
cmdStart: String,
cmdStop: Option[String],
async: Boolean
)
object ManualRunnerConfig {
def apply(config: Config): ManualRunnerConfig = {
def readOld(): ManualRunnerConfig = {
val stop = config.getString("cmdStop")
ManualRunnerConfig(
cmdStart = config.getString("cmd"),
cmdStop = if (stop.isEmpty) None else stop.some,
async = true
)
}
def readNew(): ManualRunnerConfig = {
val entry = config.getConfig("manual")
ManualRunnerConfig(
cmdStart = entry.getString("startCmd"),
cmdStop = entry.getOptString("stopCmd"),
async = entry.getBoolean("async")
)
}
if (config.getString("cmd").nonEmpty) readOld() else readNew()
}
}
object WorkersSettingsConfig {
def apply(config: Config): WorkersSettingsConfig = {
WorkersSettingsConfig(
runner = config.getString("runner"),
runnerInitTimeout = Duration(config.getString("runner-init-timeout")),
readyTimeout = Duration(config.getString("ready-timeout")) match {
case f: FiniteDuration => f
case _ => throw new IllegalArgumentException("Worker ready-teimout should be finite")
},
maxArtifactSize = config.getBytes("max-artifact-size"),
dockerConfig = DockerRunnerConfig(config.getConfig("docker")),
manualConfig = ManualRunnerConfig(config)
)
}
}
/**
* Context settings that are preconfigured in main config
*/
case class ContextsSettings(
default: ContextConfig,
contexts: Map[String, ContextConfig]
)
object ContextsSettings {
val Default = "default"
def apply(config: Config): ContextsSettings = {
val defaultCfg = config.getConfig("context-defaults")
val default = ConfigRepr.ContextConfigRepr.fromConfig(Default, defaultCfg)
val contextsCfg = config.getConfig("context")
val contexts = contextsCfg.root().entrySet().asScala.filter(entry => {
entry.getValue.valueType() == ConfigValueType.OBJECT
}).map(entry => {
val name = entry.getKey
val cfg = contextsCfg.getConfig(name).withFallback(defaultCfg)
name -> ConfigRepr.ContextConfigRepr.fromConfig(name, cfg)
}).toMap
ContextsSettings(default, contexts)
}
}
case class FunctionInfoProviderConfig(
runTimeout: FiniteDuration,
cacheEntryTtl: FiniteDuration,
sparkConf: Map[String, String]
)
object FunctionInfoProviderConfig {
import scala.collection.JavaConverters._
def apply(c: Config): FunctionInfoProviderConfig = {
FunctionInfoProviderConfig(
c.getFiniteDuration("init-timeout"),
c.getFiniteDuration("cache-entry-ttl"),
c.getConfig("spark-conf").entrySet().asScala
.map(entry => entry.getKey -> entry.getValue.unwrapped().toString)
.toMap
)
}
}
case class SecurityConfig(
keytab: String,
principal: String,
interval: FiniteDuration
)
object SecurityConfig {
def apply(c: Config): SecurityConfig = {
SecurityConfig(
keytab = c.getString("keytab"),
principal = c.getString("principal"),
interval = c.getFiniteDuration("interval")
)
}
def ifEnabled(c: Config): Option[SecurityConfig] = {
if (c.getBoolean("enabled")) SecurityConfig(c).some else None
}
}
sealed trait DbConfig
object DbConfig {
final case class H2OldConfig(filePath: String) extends DbConfig
final case class JDBCDbConfig(
poolSize: Int,
driverClass: String,
jdbcUrl: String,
username: Option[String],
password: Option[String],
migration: Boolean
) extends DbConfig
def apply(c: Config): DbConfig = {
c.getOptString("filepath") match {
case Some(path) => H2OldConfig(path)
case None =>
JDBCDbConfig(
c.getInt("poolSize"),
c.getString("driverClass"),
c.getString("jdbcUrl"),
c.getOptString("username"),
c.getOptString("password"),
c.getBoolean("migration")
)
}
}
}
case class MasterConfig(
cluster: HostPortConfig,
http: HttpConfig,
mqtt: Option[AsyncInterfaceConfig],
kafka: Option[AsyncInterfaceConfig],
logs: LogServiceConfig,
workers: WorkersSettingsConfig,
contextsSettings: ContextsSettings,
dbConfig: DbConfig,
contextsPath: String,
functionsPath: String,
security: Option[SecurityConfig],
jobInfoProviderConfig: FunctionInfoProviderConfig,
srcConfigPath: String,
jobsSavePath: String,
artifactRepositoryPath: String,
raw: Config
)
object MasterConfig extends Logger {
def load(filePath: String): MasterConfig = {
val cfg = loadConfig(filePath)
parse(filePath, cfg)
}
def loadConfig(filePath: String): Config = {
val user = ConfigFactory.parseFile(new File(filePath))
resolveUserConf(user)
}
def resolveUserConf(config: Config): Config = {
val appConfig = ConfigFactory.parseResourcesAnySyntax("master.conf")
val properties = ConfigFactory.systemProperties()
properties.withFallback(config.withFallback(appConfig)).resolve()
}
def parse(filePath: String, config: Config): MasterConfig = autoConfigure(parseOnly(filePath, config))
def parseOnly(filePath: String, config: Config): MasterConfig = {
val mist = config.getConfig("mist")
MasterConfig(
cluster = HostPortConfig(mist.getConfig("cluster")),
http = HttpConfig(mist.getConfig("http")),
mqtt = AsyncInterfaceConfig.ifEnabled(mist.getConfig("mqtt")),
kafka = AsyncInterfaceConfig.ifEnabled(mist.getConfig("kafka")),
logs = LogServiceConfig(mist.getConfig("log-service")),
workers = WorkersSettingsConfig(mist.getConfig("workers")),
contextsSettings = ContextsSettings(mist),
dbConfig = DbConfig(mist.getConfig("db")),
contextsPath = mist.getString("contexts-store.path"),
functionsPath = mist.getString("functions-store.path"),
jobsSavePath = mist.getString("jobs-resolver.save-path"),
artifactRepositoryPath = mist.getString("artifact-repository.save-path"),
security = SecurityConfig.ifEnabled(mist.getConfig("security")),
jobInfoProviderConfig = FunctionInfoProviderConfig(mist.getConfig("job-extractor")),
srcConfigPath = filePath,
raw = config
)
}
def autoConfigure(masterConfig: MasterConfig): MasterConfig =
autoConfigure(masterConfig, Eval.later(NetUtils.findLocalInetAddress().getHostAddress))
def autoConfigure(masterConfig: MasterConfig, host: Eval[String]): MasterConfig = {
import shadedshapeless._
type HostLens = Lens[MasterConfig, String]
def updateAuto(s: String, name: String): String = {
if (s == "auto") {
logger.info(s"Automatically update $name to ${host.value}")
host.value
} else s
}
def modify(lens: HostLens, name: String)(c: MasterConfig): MasterConfig = {
lens.modify(c)(s => updateAuto(s, name))
}
val optic = lens[MasterConfig]
val fieldsUpd = Seq[(HostLens, String)](
optic.http.host -> "http.host",
optic.http.publicHost -> "http.publicHost",
optic.cluster.host -> "cluster.host",
optic.cluster.publicHost -> "cluster.publicHost",
optic.logs.host -> "logs.host",
optic.logs.publicHost -> "logs.publicHost"
).map({case (l, n) => modify(l, n)(_)})
val akkaUpd = (c: MasterConfig) => optic.raw.modify(c)(r => {
r.withValue("akka.remote.netty.tcp.hostname", ConfigValueFactory.fromAnyRef(c.cluster.publicHost))
.withValue("akka.remote.netty.tcp.bind-hostname", ConfigValueFactory.fromAnyRef(c.cluster.host))
})
val all = (fieldsUpd :+ akkaUpd).reduceLeft(_ >>> _)
all(masterConfig)
}
}
| Hydrospheredata/mist | mist/master/src/main/scala/io/hydrosphere/mist/master/configs.scala | Scala | apache-2.0 | 11,017 |
package org.functionalkoans.forscala
import org.functionalkoans.forscala.support.KoanFunSuite
import org.scalatest.Matchers
class AboutForExpressions extends KoanFunSuite with Matchers {
koan("For loops can be simple") {
val someNumbers = Range(0, 10)
var sum = 0
for (i <- someNumbers)
sum += i
sum should equal(__)
}
koan("For loops can contain additional logic") {
val someNumbers = Range(0, 10)
var sum = 0
// sum only the even numbers
for (i <- someNumbers)
if (i % 2 == 0) sum += i
sum should equal(__)
}
koan("For expressions can nest, with later generators varying more rapidly than earlier ones") {
val xValues = Range(1, 5)
val yValues = Range(1, 3)
val coordinates = for {
x <- xValues
y <- yValues} yield (x, y)
coordinates(4) should be(__, __)
}
}
| pharmpress/codingdojo | scala-koans/src/test/scala/org/functionalkoans/forscala/AboutForExpressions.scala | Scala | apache-2.0 | 858 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.consumer
import scala.collection.JavaConversions._
import org.I0Itec.zkclient._
import joptsimple._
import java.util.Properties
import java.util.Random
import java.io.PrintStream
import kafka.message._
import kafka.serializer._
import kafka.utils._
import kafka.metrics.KafkaMetricsReporter
/**
* Consumer that dumps messages out to standard out.
*
*/
object ConsoleConsumer extends Logging {
def main(args: Array[String]) {
val parser = new OptionParser
val topicIdOpt = parser.accepts("topic", "The topic id to consume on.")
.withRequiredArg
.describedAs("topic")
.ofType(classOf[String])
val whitelistOpt = parser.accepts("whitelist", "Whitelist of topics to include for consumption.")
.withRequiredArg
.describedAs("whitelist")
.ofType(classOf[String])
val blacklistOpt = parser.accepts("blacklist", "Blacklist of topics to exclude from consumption.")
.withRequiredArg
.describedAs("blacklist")
.ofType(classOf[String])
val zkConnectOpt = parser.accepts("zookeeper", "REQUIRED: The connection string for the zookeeper connection in the form host:port. " +
"Multiple URLS can be given to allow fail-over.")
.withRequiredArg
.describedAs("urls")
.ofType(classOf[String])
val groupIdOpt = parser.accepts("group", "The group id to consume on.")
.withRequiredArg
.describedAs("gid")
.defaultsTo("console-consumer-" + new Random().nextInt(100000))
.ofType(classOf[String])
val fetchSizeOpt = parser.accepts("fetch-size", "The amount of data to fetch in a single request.")
.withRequiredArg
.describedAs("size")
.ofType(classOf[java.lang.Integer])
.defaultsTo(1024 * 1024)
val minFetchBytesOpt = parser.accepts("min-fetch-bytes", "The min number of bytes each fetch request waits for.")
.withRequiredArg
.describedAs("bytes")
.ofType(classOf[java.lang.Integer])
.defaultsTo(1)
val maxWaitMsOpt = parser.accepts("max-wait-ms", "The max amount of time each fetch request waits.")
.withRequiredArg
.describedAs("ms")
.ofType(classOf[java.lang.Integer])
.defaultsTo(100)
val socketBufferSizeOpt = parser.accepts("socket-buffer-size", "The size of the tcp RECV size.")
.withRequiredArg
.describedAs("size")
.ofType(classOf[java.lang.Integer])
.defaultsTo(2 * 1024 * 1024)
val socketTimeoutMsOpt = parser.accepts("socket-timeout-ms", "The socket timeout used for the connection to the broker")
.withRequiredArg
.describedAs("ms")
.ofType(classOf[java.lang.Integer])
.defaultsTo(ConsumerConfig.SocketTimeout)
val refreshMetadataBackoffMsOpt = parser.accepts("refresh-leader-backoff-ms", "Backoff time before refreshing metadata")
.withRequiredArg
.describedAs("ms")
.ofType(classOf[java.lang.Integer])
.defaultsTo(ConsumerConfig.RefreshMetadataBackoffMs)
val consumerTimeoutMsOpt = parser.accepts("consumer-timeout-ms", "consumer throws timeout exception after waiting this much " +
"of time without incoming messages")
.withRequiredArg
.describedAs("prop")
.ofType(classOf[java.lang.Integer])
.defaultsTo(-1)
val messageFormatterOpt = parser.accepts("formatter", "The name of a class to use for formatting kafka messages for display.")
.withRequiredArg
.describedAs("class")
.ofType(classOf[String])
.defaultsTo(classOf[DefaultMessageFormatter].getName)
val messageFormatterArgOpt = parser.accepts("property")
.withRequiredArg
.describedAs("prop")
.ofType(classOf[String])
val resetBeginningOpt = parser.accepts("from-beginning", "If the consumer does not already have an established offset to consume from, " +
"start with the earliest message present in the log rather than the latest message.")
val autoCommitIntervalOpt = parser.accepts("autocommit.interval.ms", "The time interval at which to save the current offset in ms")
.withRequiredArg
.describedAs("ms")
.ofType(classOf[java.lang.Integer])
.defaultsTo(ConsumerConfig.AutoCommitInterval)
val maxMessagesOpt = parser.accepts("max-messages", "The maximum number of messages to consume before exiting. If not set, consumption is continual.")
.withRequiredArg
.describedAs("num_messages")
.ofType(classOf[java.lang.Integer])
val skipMessageOnErrorOpt = parser.accepts("skip-message-on-error", "If there is an error when processing a message, " +
"skip it instead of halt.")
val csvMetricsReporterEnabledOpt = parser.accepts("csv-reporter-enabled", "If set, the CSV metrics reporter will be enabled")
val metricsDirectoryOpt = parser.accepts("metrics-dir", "If csv-reporter-enable is set, and this parameter is" +
"set, the csv metrics will be outputed here")
.withRequiredArg
.describedAs("metrics dictory")
.ofType(classOf[java.lang.String])
val options: OptionSet = tryParse(parser, args)
CommandLineUtils.checkRequiredArgs(parser, options, zkConnectOpt)
val topicOrFilterOpt = List(topicIdOpt, whitelistOpt, blacklistOpt).filter(options.has)
if (topicOrFilterOpt.size != 1) {
error("Exactly one of whitelist/blacklist/topic is required.")
parser.printHelpOn(System.err)
System.exit(1)
}
val topicArg = options.valueOf(topicOrFilterOpt.head)
val filterSpec = if (options.has(blacklistOpt))
new Blacklist(topicArg)
else
new Whitelist(topicArg)
val csvMetricsReporterEnabled = options.has(csvMetricsReporterEnabledOpt)
if (csvMetricsReporterEnabled) {
val csvReporterProps = new Properties()
csvReporterProps.put("kafka.metrics.polling.interval.secs", "5")
csvReporterProps.put("kafka.metrics.reporters", "kafka.metrics.KafkaCSVMetricsReporter")
if (options.has(metricsDirectoryOpt))
csvReporterProps.put("kafka.csv.metrics.dir", options.valueOf(metricsDirectoryOpt))
else
csvReporterProps.put("kafka.csv.metrics.dir", "kafka_metrics")
csvReporterProps.put("kafka.csv.metrics.reporter.enabled", "true")
val verifiableProps = new VerifiableProperties(csvReporterProps)
KafkaMetricsReporter.startReporters(verifiableProps)
}
val props = new Properties()
props.put("group.id", options.valueOf(groupIdOpt))
props.put("socket.receive.buffer.bytes", options.valueOf(socketBufferSizeOpt).toString)
props.put("socket.timeout.ms", options.valueOf(socketTimeoutMsOpt).toString)
props.put("fetch.message.max.bytes", options.valueOf(fetchSizeOpt).toString)
props.put("fetch.min.bytes", options.valueOf(minFetchBytesOpt).toString)
props.put("fetch.wait.max.ms", options.valueOf(maxWaitMsOpt).toString)
props.put("auto.commit.enable", "true")
props.put("auto.commit.interval.ms", options.valueOf(autoCommitIntervalOpt).toString)
props.put("auto.offset.reset", if(options.has(resetBeginningOpt)) "smallest" else "largest")
props.put("zookeeper.connect", options.valueOf(zkConnectOpt))
props.put("consumer.timeout.ms", options.valueOf(consumerTimeoutMsOpt).toString)
props.put("refresh.leader.backoff.ms", options.valueOf(refreshMetadataBackoffMsOpt).toString)
val config = new ConsumerConfig(props)
val skipMessageOnError = if (options.has(skipMessageOnErrorOpt)) true else false
val messageFormatterClass = Class.forName(options.valueOf(messageFormatterOpt))
val formatterArgs = MessageFormatter.tryParseFormatterArgs(options.valuesOf(messageFormatterArgOpt))
val maxMessages = if(options.has(maxMessagesOpt)) options.valueOf(maxMessagesOpt).intValue else -1
val connector = Consumer.create(config)
if(options.has(resetBeginningOpt))
ZkUtils.maybeDeletePath(options.valueOf(zkConnectOpt), "/consumers/" + options.valueOf(groupIdOpt))
Runtime.getRuntime.addShutdownHook(new Thread() {
override def run() {
connector.shutdown()
// if there is no group specified then avoid polluting zookeeper with persistent group data, this is a hack
if(!options.has(groupIdOpt))
ZkUtils.maybeDeletePath(options.valueOf(zkConnectOpt), "/consumers/" + options.valueOf(groupIdOpt))
}
})
var numMessages = 0L
val formatter: MessageFormatter = messageFormatterClass.newInstance().asInstanceOf[MessageFormatter]
formatter.init(formatterArgs)
try {
val stream = connector.createMessageStreamsByFilter(filterSpec, 1, new DefaultDecoder(), new DefaultDecoder()).get(0)
val iter = if(maxMessages >= 0)
stream.slice(0, maxMessages)
else
stream
for(messageAndTopic <- iter) {
try {
formatter.writeTo(messageAndTopic.key, messageAndTopic.message, System.out)
numMessages += 1
} catch {
case e: Throwable =>
if (skipMessageOnError)
error("Error processing message, skipping this message: ", e)
else
throw e
}
if(System.out.checkError()) {
// This means no one is listening to our output stream any more, time to shutdown
System.err.println("Unable to write to standard out, closing consumer.")
System.err.println("Consumed %d messages".format(numMessages))
formatter.close()
connector.shutdown()
System.exit(1)
}
}
} catch {
case e: Throwable => error("Error processing message, stopping consumer: ", e)
}
System.err.println("Consumed %d messages".format(numMessages))
System.out.flush()
formatter.close()
connector.shutdown()
}
def tryParse(parser: OptionParser, args: Array[String]) = {
try {
parser.parse(args : _*)
} catch {
case e: OptionException => {
Utils.croak(e.getMessage)
null
}
}
}
def tryCleanupZookeeper(zkUrl: String, groupId: String) {
try {
val dir = "/consumers/" + groupId
info("Cleaning up temporary zookeeper data under " + dir + ".")
val zk = new ZkClient(zkUrl, 30*1000, 30*1000, ZKStringSerializer)
zk.deleteRecursive(dir)
zk.close()
} catch {
case _: Throwable => // swallow
}
}
}
object MessageFormatter {
def tryParseFormatterArgs(args: Iterable[String]): Properties = {
val splits = args.map(_ split "=").filterNot(_ == null).filterNot(_.length == 0)
if(!splits.forall(_.length == 2)) {
System.err.println("Invalid parser arguments: " + args.mkString(" "))
System.exit(1)
}
val props = new Properties
for(a <- splits)
props.put(a(0), a(1))
props
}
}
trait MessageFormatter {
def writeTo(key: Array[Byte], value: Array[Byte], output: PrintStream)
def init(props: Properties) {}
def close() {}
}
class DefaultMessageFormatter extends MessageFormatter {
var printKey = false
var keySeparator = "\\t".getBytes
var lineSeparator = "\\n".getBytes
override def init(props: Properties) {
if(props.containsKey("print.key"))
printKey = props.getProperty("print.key").trim.toLowerCase.equals("true")
if(props.containsKey("key.separator"))
keySeparator = props.getProperty("key.separator").getBytes
if(props.containsKey("line.separator"))
lineSeparator = props.getProperty("line.separator").getBytes
}
def writeTo(key: Array[Byte], value: Array[Byte], output: PrintStream) {
if(printKey) {
output.write(if (key == null) "null".getBytes() else key)
output.write(keySeparator)
}
output.write(if (value == null) "null".getBytes() else value)
output.write(lineSeparator)
}
}
class NoOpMessageFormatter extends MessageFormatter {
override def init(props: Properties) {}
def writeTo(key: Array[Byte], value: Array[Byte], output: PrintStream) {}
}
class ChecksumMessageFormatter extends MessageFormatter {
private var topicStr: String = _
override def init(props: Properties) {
topicStr = props.getProperty("topic")
if (topicStr != null)
topicStr = topicStr + ":"
else
topicStr = ""
}
def writeTo(key: Array[Byte], value: Array[Byte], output: PrintStream) {
val chksum = new Message(value, key).checksum
output.println(topicStr + "checksum:" + chksum)
}
}
| unix1986/universe | tool/kafka-0.8.1.1-src/core/src/main/scala/kafka/consumer/ConsoleConsumer.scala | Scala | bsd-2-clause | 13,508 |
package org.backuity.matchete
import org.backuity.matchete.TestUtil.Person
import org.junit.Test
class DerivedDiffableTest extends JunitMatchers {
@Test
def eitherShouldBeDiffable(): Unit = {
val either: Either[String, Person] = Right(Person("john", 12))
{
either must_== Right(Person("john", 13))
} must throwAn[AssertionError].withMessage(
"""Right(Person(john,12)) is not equal to Right(Person(john,13))
|Got : value.age = 12
|Expected: value.age = 13""".stripMargin
)
}
@Test
def sealedHierarchiesShouldBeDiffable(): Unit = {
val stuff: Stuff = Flower("rosa", 12)
stuff must_== Flower("rosa", 12)
{
stuff must_== Flower("rosa", 22)
} must throwAn[AssertionError].withMessage(
"""Flower(rosa,12) is not equal to Flower(rosa,22)
|Got : price = 12
|Expected: price = 22""".stripMargin
)
{
stuff must_== Bike("bmx", 12, "xyz")
} must throwAn[AssertionError].withMessage(
"""Flower(rosa,12) is not equal to Bike(bmx,12,xyz)
|Got : Flower(rosa,12)
|Expected: Bike(bmx,12,xyz)""".stripMargin
)
}
}
| backuity/matchete | core/src/test/scala/org/backuity/matchete/DerivedDiffableTest.scala | Scala | apache-2.0 | 1,159 |
package lila.analyse
import org.joda.time.DateTime
import play.api.libs.json.Json
import play.modules.reactivemongo.json.ImplicitBSONHandlers.JsObjectWriter
import lila.db.api._
import lila.db.Implicits._
import lila.game.Game
import tube.analysisTube
object AnalysisRepo {
type ID = String
def done(id: ID, a: Analysis, serverIp: String) = $update(
$select(id),
$set(Json.obj(
"done" -> true,
"data" -> Info.encodeList(a.infos),
"ip" -> serverIp
))
)
def progress(id: ID, userId: ID, startPly: Int) = $update(
$select(id),
$set(
Json.obj(
"uid" -> userId,
"done" -> false,
"date" -> $date(DateTime.now)
) ++ (startPly == 0).fold(Json.obj(), Json.obj("ply" -> startPly))
) ++ $unset("old", "data"),
upsert = true)
def byId(id: ID): Fu[Option[Analysis]] = $find byId id
def doneById(id: ID): Fu[Option[Analysis]] =
$find.one($select(id) ++ Json.obj("done" -> true))
def notDoneById(id: ID): Fu[Option[Analysis]] =
$find.one($select(id) ++ Json.obj("done" -> false))
def doneByIds(ids: Seq[ID]): Fu[Seq[Option[Analysis]]] =
$find optionsByOrderedIds ids map2 { (a: Option[Analysis]) =>
a.filter(_.done)
}
def associateToGames(games: List[Game]): Fu[List[(Game, Analysis)]] =
doneByIds(games.map(_.id)) map { as =>
games zip as collect {
case (game, Some(analysis)) => game -> analysis
}
}
def doneByIdNotOld(id: ID): Fu[Option[Analysis]] =
$find.one($select(id) ++ Json.obj("done" -> true, "old" -> $exists(false)))
def isDone(id: ID): Fu[Boolean] =
$count.exists($select(id) ++ Json.obj("done" -> true))
def recent(nb: Int): Fu[List[Analysis]] =
$find($query(Json.obj("done" -> true)) sort $sort.desc("date"), nb)
def skipping(skip: Int, nb: Int): Fu[List[Analysis]] =
$find($query(Json.obj("done" -> true)) skip skip, nb)
def count = $count($select.all)
def remove(id: String) = $remove byId id
}
| JimmyMow/lila | modules/analyse/src/main/AnalysisRepo.scala | Scala | mit | 1,992 |
package yandex.metrika
/**
* --- YANDEX Global Information ---
*/
object Helpers {
/* urls for YANDEX METRIKA api */
val HOST = "https://api-metrika.yandex.ru/"
val _COUNTERS = "counters"
val _COUNTER = "counter/%d"
val _GOALS = _COUNTER + "/goals"
val _GOAL = _COUNTER + "/goal/%d"
val _FILTERS = _COUNTER + "/filters"
val _FILTER = _COUNTER + "/filter/%d"
val _OPERATIONS = _COUNTER + "/operations"
val _OPERATION = _COUNTER + "/operation/%d"
val _GRANTS = _COUNTER + "/grants"
val _GRANT = _COUNTER + "/grant/%s"
val _DELEGATES = "delegates"
val _DELEGATE = "delegate/%s"
val _ACCOUNTS = "accounts"
val _ACCOUNT = "account/%s"
val _STAT = "stat"
val _STAT_TRAFFIC = _STAT + "/traffic"
val _STAT_TRAFFIC_SUMMARY = _STAT_TRAFFIC + "/summary"
val _STAT_TRAFFIC_DEEPNESS = _STAT_TRAFFIC + "/deepness"
val _STAT_TRAFFIC_HOURLY = _STAT_TRAFFIC + "/hourly"
val _STAT_TRAFFIC_LOAD = _STAT_TRAFFIC + "/load"
val _STAT_SOURCES = _STAT + "/sources"
val _STAT_SOURCES_SUMMARY = _STAT_SOURCES + "/summary"
val _STAT_SOURCES_SITES = _STAT_SOURCES + "/sites"
val _STAT_SOURCES_SEARCH_ENGINES = _STAT_SOURCES + "/search_engines"
val _STAT_SOURCES_PHRASES = _STAT_SOURCES + "/phrases"
val _STAT_SOURCES_MARKETING = _STAT_SOURCES + "/marketing"
val _STAT_SOURCES_DIRECT = _STAT_SOURCES + "/direct"
val _STAT_SOURCES_DIRECT_SUMMARY = _STAT_SOURCES_DIRECT + "/summary"
val _STAT_SOURCES_DIRECT_PLATFORMS = _STAT_SOURCES_DIRECT + "/platforms"
val _STAT_SOURCES_DIRECT_REGIONS = _STAT_SOURCES_DIRECT + "/regions"
val _STAT_SOURCES_TAGS = _STAT_SOURCES + "/tags"
val _STAT_CONTENT = _STAT + "/content"
val _STAT_CONTENT_POPULAR = _STAT_CONTENT + "/popular"
val _STAT_CONTENT_ENTRANCE = _STAT_CONTENT + "/entrance"
val _STAT_CONTENT_EXIT = _STAT_CONTENT + "/exit"
val _STAT_CONTENT_TITLES = _STAT_CONTENT + "/titles"
val _STAT_CONTENT_URL_PARAM = _STAT_CONTENT + "/url_param"
val _STAT_CONTENT_USER_VARS = _STAT_SOURCES + "/user_vars"
val _STAT_CONTENT_ECOMMERCE = _STAT_CONTENT + "/ecommerce"
val _STAT_GEO = _STAT + "/geo"
val _STAT_DEMOGRAPHY = _STAT + "/demography"
val _STAT_DEMOGRAPHY_AGE_GENDER = _STAT_DEMOGRAPHY + "/age_gender"
val _STAT_DEMOGRAPHY_STRUCTURE = _STAT_DEMOGRAPHY + "/structure"
val _STAT_TECH = _STAT + "/tech"
val _STAT_TECH_BROWSERS = _STAT_TECH + "/browsers"
val _STAT_TECH_OS = _STAT_TECH + "/os"
val _STAT_TECH_DISPLAY = _STAT_TECH + "/display"
val _STAT_TECH_MOBILE = _STAT_TECH + "/mobile"
val _STAT_TECH_FLASH = _STAT_TECH + "/flash"
val _STAT_TECH_SILVERLIGHT = _STAT_TECH + "/silverlight"
val _STAT_TECH_DOTNET = _STAT_TECH + "/dotnet"
val _STAT_TECH_JAVA = _STAT_TECH + "/java"
val _STAT_TECH_COOKIES = _STAT_TECH + "/cookies"
val _STAT_TECH_JAVASCRIPT = _STAT_TECH + "/javascript"
}
case class OParameters(
//counters
var `type`: Option[String] = None,
var permission: Option[String] = None,
var ulogin: Option[String] = None,
var field: Option[String] = None,
//stats
var id: Option[Long] = None,
var mirror_id: Option[String] = None,
var goal_id: Option[Long] = None,
var se_id: Option[Long] = None,
var date1: Option[String] = None, //YYYMMDD
var date2: Option[String] = None, //YYYMMDD
var table_mode: Option[String] = None,
var group: Option[String] = None,
var per_page: Option[Long] = None,
var sort: Option[String] = None,
var reverse: Option[Byte] = None) {
def toSeq: Seq[(String, String)] = {
Seq(
//counters
("type" -> `type`.getOrElse("")),
("permission" -> permission.getOrElse("")),
("ulogin" -> ulogin.getOrElse("")),
("field" -> field.getOrElse("")),
//stats
("id" -> id.map(_.toString).getOrElse("")),
("mirror_id" -> mirror_id.getOrElse("")),
("goal_id" -> goal_id.map(_.toString).getOrElse("")),
("se_id" -> se_id.map(_.toString).getOrElse("")),
("date1" -> date1.getOrElse("")),
("date2" -> date2.getOrElse("")),
("table_mode" -> table_mode.getOrElse("")),
("group" -> group.getOrElse("")),
("per_page" -> per_page.map(_.toString).getOrElse("")),
("sort" -> sort.getOrElse("")),
("reverse" -> reverse.map(_.toString).getOrElse("")))
.filter(_._2 != "")
}
} | krispo/yandex-metrika | src/main/scala/yandex/metrika/Helpers.scala | Scala | mit | 4,268 |
/* Copyright 2009-2016 EPFL, Lausanne */
import leon.annotation._
import leon.lang._
object PatternMatching9 {
case class Wrapper(var x: Int)
def _main(): Int = {
var c = 1
def get0(): Int = {
c -= 1
0
}
val array = Array(Wrapper(42))
array(get0()) match {
case w if w.x == 42 => w.x = 0
case w => w.x = -1
}
array(0).x + c
} ensuring { _ == 0 }
@extern
def main(args: Array[String]): Unit = _main()
}
| epfl-lara/leon | src/test/resources/regression/genc/valid/PatternMatching9.scala | Scala | gpl-3.0 | 474 |
package org.positronicnet.sample.contacts
import org.positronicnet.ui._
import android.content.{Context, Intent}
import android.util.{AttributeSet, Log}
import android.view.{View, LayoutInflater}
import android.widget.{LinearLayout, ImageView}
import android.net.Uri
object ViewContactUiBinder extends UiBinder {
bind[ CategoryDisplay, AggregatedDatum[_] ](
(( categoryDisplay, datum ) => categoryDisplay.showForDatum( datum )),
(( categoryDisplay, datum ) => datum ) // no update
)
bind[ PhoneWidget, Phone ](
( _.bindItem( _ )),
( (display, datum) => datum))
bind[ EmailWidget, Email ](
( _.bindItem( _ )),
( (display, datum) => datum))
}
class ContactDataAggregateDisplay( ctx: Context, attrs: AttributeSet )
extends LinearLayout( ctx, attrs )
with TypedViewHolder
with WidgetUtils
{
def bind( data: AggregatedData ) =
for (display <- childrenOfType[ DataKindDisplay ])
display.bind( data )
}
class DataKindDisplay( ctx: Context, attrs: AttributeSet )
extends LinearLayout( ctx, attrs )
with TypedViewHolder
with WidgetUtils
{
val inflater =
ctx.getSystemService( Context.LAYOUT_INFLATER_SERVICE )
.asInstanceOf[ LayoutInflater ]
val itemLayoutResId = attrs.getAttributeResourceValue( null, "itemLayout", 0 )
val targetKlassUnk = Class.forName( attrs.getAttributeValue( null, "class" ))
val targetKlass =
targetKlassUnk.asInstanceOf[ Class[T] forSome {type T <: ContactData} ]
def bind( allData: AggregatedData ) = {
val myData = allData.dataOfClass( targetKlass )
if (!myData.isEmpty) {
setVisibility( View.VISIBLE )
for (aggregatedDatum <- myData) {
val view = inflater.inflate( itemLayoutResId, this, false )
addView( view )
// We have an AggregatedDatum which pairs a raw ContactData
// object with the accountInfo of the account it came from.
// Some widgets are associated with string properties of the
// raw datum; others need the AccountInfo to interpret. So,
// we have the binder try it both ways...
ViewContactUiBinder.show( aggregatedDatum, view )
ViewContactUiBinder.show( aggregatedDatum.datum, view )
}
}
}
}
class CategoryDisplay( ctx: Context, attrs: AttributeSet )
extends PositronicTextView( ctx, attrs )
{
def showForDatum( agg: AggregatedDatum[_] ) =
agg.datum match {
case datum: ContactDataWithCategoryLabel =>
agg.acctInfo.dataKinds.get( datum.typeTag ).map { info =>
setText( info.categoryLabelToString( datum.categoryLabel )) }
case _ =>
}
}
// A lot of our widgets, when clicked, start an activity relevant in
// some way to the contents of some ContactData row. This trait abstracts
// the basic pattern...
trait ActivityStarterFor[ Item <: ContactData ]
extends WidgetUtils
with ActivityResultDispatchClient
with PositronicHandlers
{
var item: Item
def activityResultDispatchKey = item.id
def bindItem( item: Item ) = {
Log.d( "XXX", "Bind " + item.toString )
this.item = item
onClick { startActivity( makeIntent ) }
}
def makeIntent: Intent
}
abstract class ActivityStartingGroup[ Item <: ContactData ](
ctx: Context,
attrs: AttributeSet )
extends LinearLayout( ctx, attrs )
with ActivityStarterFor[ Item ]
with UiBindingsForSelfAndChildren
// Details...
class PhoneWidget( ctx: Context, attrs: AttributeSet )
extends ActivityStartingGroup[ Phone ]( ctx, attrs )
{
var item: Phone = null
def makeIntent =
new Intent( Intent.ACTION_CALL, Uri.parse( "tel:" + this.item.number ))
}
class SmsImage( ctx: Context, attrs: AttributeSet )
extends ImageView( ctx, attrs )
with ActivityStarterFor[ Phone ]
{
var item: Phone = null
def makeIntent =
new Intent( Intent.ACTION_VIEW, Uri.parse( "smsto:" + this.item.number ))
}
class EmailWidget( ctx: Context, attrs: AttributeSet )
extends ActivityStartingGroup[ Email ]( ctx, attrs )
{
var item: Email = null
def makeIntent =
new Intent( Intent.ACTION_SENDTO, Uri.parse( "mailto:" + this.item.address))
}
class WebsiteWidget( ctx: Context, attrs: AttributeSet )
extends ActivityStartingGroup[ Website ]( ctx, attrs )
{
var item: Website = null
def makeIntent =
new Intent( Intent.ACTION_VIEW, Uri.parse( this.item.url ))
}
| rst/positronic_net | sample/contacts_app/src/main/scala/ViewWidgets.scala | Scala | bsd-3-clause | 4,339 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.api
import kafka.utils.nonthreadsafe
import kafka.api.ApiUtils._
import kafka.common.TopicAndPartition
import kafka.consumer.ConsumerConfig
import kafka.network.RequestChannel
import kafka.message.MessageSet
import java.util.concurrent.atomic.AtomicInteger
import java.nio.ByteBuffer
import org.apache.kafka.common.protocol.{ApiKeys, Errors}
import scala.collection.mutable.ArrayBuffer
import scala.util.Random
case class PartitionFetchInfo(offset: Long, fetchSize: Int)
object FetchRequest {
private val random = new Random
val CurrentVersion = 3.shortValue
val DefaultMaxWait = 0
val DefaultMinBytes = 0
val DefaultMaxBytes = Int.MaxValue
val DefaultCorrelationId = 0
def readFrom(buffer: ByteBuffer): FetchRequest = {
val versionId = buffer.getShort
val correlationId = buffer.getInt
val clientId = readShortString(buffer)
val replicaId = buffer.getInt
val maxWait = buffer.getInt
val minBytes = buffer.getInt
val maxBytes = if (versionId < 3) DefaultMaxBytes else buffer.getInt
val topicCount = buffer.getInt
val pairs = (1 to topicCount).flatMap(_ => {
val topic = readShortString(buffer)
val partitionCount = buffer.getInt
(1 to partitionCount).map(_ => {
val partitionId = buffer.getInt
val offset = buffer.getLong
val fetchSize = buffer.getInt
(TopicAndPartition(topic, partitionId), PartitionFetchInfo(offset, fetchSize))
})
})
FetchRequest(versionId, correlationId, clientId, replicaId, maxWait, minBytes, maxBytes, Vector(pairs:_*))
}
def shuffle(requestInfo: Seq[(TopicAndPartition, PartitionFetchInfo)]): Seq[(TopicAndPartition, PartitionFetchInfo)] = {
val groupedByTopic = requestInfo.groupBy { case (tp, _) => tp.topic }.map { case (topic, values) =>
topic -> random.shuffle(values)
}
random.shuffle(groupedByTopic.toSeq).flatMap { case (_, partitions) =>
partitions.map { case (tp, fetchInfo) => tp -> fetchInfo }
}
}
def batchByTopic[T](s: Seq[(TopicAndPartition, T)]): Seq[(String, Seq[(Int, T)])] = {
val result = new ArrayBuffer[(String, ArrayBuffer[(Int, T)])]
s.foreach { case (TopicAndPartition(t, p), value) =>
if (result.isEmpty || result.last._1 != t)
result += (t -> new ArrayBuffer)
result.last._2 += (p -> value)
}
result
}
}
case class FetchRequest(versionId: Short = FetchRequest.CurrentVersion,
correlationId: Int = FetchRequest.DefaultCorrelationId,
clientId: String = ConsumerConfig.DefaultClientId,
replicaId: Int = Request.OrdinaryConsumerId,
maxWait: Int = FetchRequest.DefaultMaxWait,
minBytes: Int = FetchRequest.DefaultMinBytes,
maxBytes: Int = FetchRequest.DefaultMaxBytes,
requestInfo: Seq[(TopicAndPartition, PartitionFetchInfo)])
extends RequestOrResponse(Some(ApiKeys.FETCH.id)) {
/**
* Partitions the request info into a list of lists (one for each topic) while preserving request info ordering
*/
private type PartitionInfos = Seq[(Int, PartitionFetchInfo)]
private lazy val requestInfoGroupedByTopic: Seq[(String, PartitionInfos)] = FetchRequest.batchByTopic(requestInfo)
/** Public constructor for the clients */
@deprecated("The order of partitions in `requestInfo` is relevant, so this constructor is deprecated in favour of the " +
"one that takes a Seq", since = "0.10.1.0")
def this(correlationId: Int,
clientId: String,
maxWait: Int,
minBytes: Int,
maxBytes: Int,
requestInfo: Map[TopicAndPartition, PartitionFetchInfo]) {
this(versionId = FetchRequest.CurrentVersion,
correlationId = correlationId,
clientId = clientId,
replicaId = Request.OrdinaryConsumerId,
maxWait = maxWait,
minBytes = minBytes,
maxBytes = maxBytes,
requestInfo = FetchRequest.shuffle(requestInfo.toSeq))
}
/** Public constructor for the clients */
def this(correlationId: Int,
clientId: String,
maxWait: Int,
minBytes: Int,
maxBytes: Int,
requestInfo: Seq[(TopicAndPartition, PartitionFetchInfo)]) {
this(versionId = FetchRequest.CurrentVersion,
correlationId = correlationId,
clientId = clientId,
replicaId = Request.OrdinaryConsumerId,
maxWait = maxWait,
minBytes = minBytes,
maxBytes = maxBytes,
requestInfo = requestInfo)
}
def writeTo(buffer: ByteBuffer) {
buffer.putShort(versionId)
buffer.putInt(correlationId)
writeShortString(buffer, clientId)
buffer.putInt(replicaId)
buffer.putInt(maxWait)
buffer.putInt(minBytes)
if (versionId >= 3)
buffer.putInt(maxBytes)
buffer.putInt(requestInfoGroupedByTopic.size) // topic count
requestInfoGroupedByTopic.foreach {
case (topic, partitionFetchInfos) =>
writeShortString(buffer, topic)
buffer.putInt(partitionFetchInfos.size) // partition count
partitionFetchInfos.foreach {
case (partition, PartitionFetchInfo(offset, fetchSize)) =>
buffer.putInt(partition)
buffer.putLong(offset)
buffer.putInt(fetchSize)
}
}
}
def sizeInBytes: Int = {
2 + /* versionId */
4 + /* correlationId */
shortStringLength(clientId) +
4 + /* replicaId */
4 + /* maxWait */
4 + /* minBytes */
(if (versionId >= 3) 4 /* maxBytes */ else 0) +
4 + /* topic count */
requestInfoGroupedByTopic.foldLeft(0)((foldedTopics, currTopic) => {
val (topic, partitionFetchInfos) = currTopic
foldedTopics +
shortStringLength(topic) +
4 + /* partition count */
partitionFetchInfos.size * (
4 + /* partition id */
8 + /* offset */
4 /* fetch size */
)
})
}
def isFromFollower = Request.isValidBrokerId(replicaId)
def isFromOrdinaryConsumer = replicaId == Request.OrdinaryConsumerId
def isFromLowLevelConsumer = replicaId == Request.DebuggingConsumerId
def numPartitions = requestInfo.size
override def toString: String = {
describe(true)
}
override def handleError(e: Throwable, requestChannel: RequestChannel, request: RequestChannel.Request): Unit = {
val fetchResponsePartitionData = requestInfo.map { case (topicAndPartition, _) =>
(topicAndPartition, FetchResponsePartitionData(Errors.forException(e).code, -1, MessageSet.Empty))
}
val errorResponse = FetchResponse(correlationId, fetchResponsePartitionData, request.header.apiVersion)
// Magic value does not matter here because the message set is empty
requestChannel.sendResponse(new RequestChannel.Response(request, new FetchResponseSend(request.connectionId, errorResponse)))
}
override def describe(details: Boolean): String = {
val fetchRequest = new StringBuilder
fetchRequest.append("Name: " + this.getClass.getSimpleName)
fetchRequest.append("; Version: " + versionId)
fetchRequest.append("; CorrelationId: " + correlationId)
fetchRequest.append("; ClientId: " + clientId)
fetchRequest.append("; ReplicaId: " + replicaId)
fetchRequest.append("; MaxWait: " + maxWait + " ms")
fetchRequest.append("; MinBytes: " + minBytes + " bytes")
fetchRequest.append("; MaxBytes:" + maxBytes + " bytes")
if(details)
fetchRequest.append("; RequestInfo: " + requestInfo.mkString(","))
fetchRequest.toString()
}
}
@nonthreadsafe
class FetchRequestBuilder() {
private val correlationId = new AtomicInteger(0)
private var versionId = FetchRequest.CurrentVersion
private var clientId = ConsumerConfig.DefaultClientId
private var replicaId = Request.OrdinaryConsumerId
private var maxWait = FetchRequest.DefaultMaxWait
private var minBytes = FetchRequest.DefaultMinBytes
private var maxBytes = FetchRequest.DefaultMaxBytes
private val requestMap = new collection.mutable.ArrayBuffer[(TopicAndPartition, PartitionFetchInfo)]
def addFetch(topic: String, partition: Int, offset: Long, fetchSize: Int) = {
requestMap.append((TopicAndPartition(topic, partition), PartitionFetchInfo(offset, fetchSize)))
this
}
def clientId(clientId: String): FetchRequestBuilder = {
this.clientId = clientId
this
}
/**
* Only for internal use. Clients shouldn't set replicaId.
*/
private[kafka] def replicaId(replicaId: Int): FetchRequestBuilder = {
this.replicaId = replicaId
this
}
def maxWait(maxWait: Int): FetchRequestBuilder = {
this.maxWait = maxWait
this
}
def minBytes(minBytes: Int): FetchRequestBuilder = {
this.minBytes = minBytes
this
}
def maxBytes(maxBytes: Int): FetchRequestBuilder = {
this.maxBytes = maxBytes
this
}
def requestVersion(versionId: Short): FetchRequestBuilder = {
this.versionId = versionId
this
}
def build() = {
val fetchRequest = FetchRequest(versionId, correlationId.getAndIncrement, clientId, replicaId, maxWait, minBytes,
maxBytes, new ArrayBuffer() ++ requestMap)
requestMap.clear()
fetchRequest
}
}
| eribeiro/kafka | core/src/main/scala/kafka/api/FetchRequest.scala | Scala | apache-2.0 | 10,037 |
/*
* Copyright 2016 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.linkedin.drelephant.analysis
import org.scalatest.{FunSpec, Matchers}
class SeverityThresholdsTest extends FunSpec with Matchers {
describe("SeverityThresholds") {
it("can be used to represent thresholds considered in ascending order") {
val thresholds = SeverityThresholds(low = 0.2D, moderate = 0.4D, severe = 0.6D, critical = 0.8D, ascending = true)
thresholds.severityOf(0.1D) should be(Severity.NONE)
thresholds.severityOf(0.2D) should be(Severity.LOW)
thresholds.severityOf(0.3D) should be(Severity.LOW)
thresholds.severityOf(0.4D) should be(Severity.MODERATE)
thresholds.severityOf(0.5D) should be(Severity.MODERATE)
thresholds.severityOf(0.6D) should be(Severity.SEVERE)
thresholds.severityOf(0.7D) should be(Severity.SEVERE)
thresholds.severityOf(0.8D) should be(Severity.CRITICAL)
thresholds.severityOf(0.9D) should be(Severity.CRITICAL)
}
it("can be used to represent thresholds considered in descending order") {
val thresholds = SeverityThresholds(low = 0.8D, moderate = 0.6D, severe = 0.4D, critical = 0.2D, ascending = false)
thresholds.severityOf(0.1D) should be(Severity.CRITICAL)
thresholds.severityOf(0.2D) should be(Severity.CRITICAL)
thresholds.severityOf(0.3D) should be(Severity.SEVERE)
thresholds.severityOf(0.4D) should be(Severity.SEVERE)
thresholds.severityOf(0.5D) should be(Severity.MODERATE)
thresholds.severityOf(0.6D) should be(Severity.MODERATE)
thresholds.severityOf(0.7D) should be(Severity.LOW)
thresholds.severityOf(0.8D) should be(Severity.LOW)
thresholds.severityOf(0.9D) should be(Severity.NONE)
}
it("can be parsed as ascending thresholds from a string that can be processed by Utils.getParam") {
SeverityThresholds.parse("0.2,0.4,0.6,0.8", ascending = true) should be(
Some(SeverityThresholds(low = 0.2D, moderate = 0.4D, severe = 0.6D, critical = 0.8D, ascending = true))
)
}
it("can be parsed as descending thresholds from a string that can be processed by Utils.getParam") {
SeverityThresholds.parse("0.8,0.6,0.4,0.2", ascending = false) should be(
Some(SeverityThresholds(low = 0.8D, moderate = 0.6D, severe = 0.4D, critical = 0.2D, ascending = false))
)
}
it("cannot be created as ascending thresholds with unordered values") {
an[IllegalArgumentException] should be thrownBy(
SeverityThresholds(low = 0.8D, moderate = 0.6D, severe = 0.4D, critical = 0.2D, ascending = true)
)
}
it("cannot be created as descending thresholds with unordered values") {
an[IllegalArgumentException] should be thrownBy(
SeverityThresholds(low = 0.2D, moderate = 0.4D, severe = 0.6D, critical = 0.8D, ascending = false)
)
}
}
}
| nntnag17/dr-elephant-1 | test/com/linkedin/drelephant/analysis/SeverityThresholdsTest.scala | Scala | apache-2.0 | 3,421 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.utils.tf.loaders
import java.nio.ByteOrder
import com.intel.analytics.bigdl.Module
import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric
import org.tensorflow.framework.NodeDef
import com.intel.analytics.bigdl.nn.ops.{LessEqual => LessEqualOps}
import com.intel.analytics.bigdl.utils.tf.Context
import scala.reflect.ClassTag
class LessEqual extends TensorflowOpsLoader {
override def build[T: ClassTag](nodeDef: NodeDef, byteOrder: ByteOrder,
context: Context[T])(implicit ev: TensorNumeric[T]): Module[T] = {
LessEqualOps[T]()
}
}
| wzhongyuan/BigDL | spark/dl/src/main/scala/com/intel/analytics/bigdl/utils/tf/loaders/LessEqual.scala | Scala | apache-2.0 | 1,204 |
/*
* Copyright (C) 2017 Michael Dippery <[email protected]>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mipadi.jupiter.io.files
import java.io.File
import java.nio.file.{FileSystems, Path}
import org.scalatest._
class LocatableSpec extends FlatSpec with Matchers {
val file = new File("src")
val path = FileSystems.getDefault.getPath("src")
val fEv = Locatable.LocatableFile
val pEv = Locatable.LocatablePath
val fLs = new File("src/main/scala/com/mipadi/jupiter/net")
val pLs = FileSystems.getDefault.getPath("src", "main", "scala", "com", "mipadi", "jupiter", "net")
val fs = List("Addressable", "NetworkConvertible", "RichURI", "package").map(s => new File(s"src/main/scala/com/mipadi/jupiter/net/$s.scala"))
val ps = List("Addressable", "NetworkConvertible", "RichURI", "package").map(s => FileSystems.getDefault.getPath("src", "main", "scala", "com", "mipadi", "jupiter", "net", s"$s.scala"))
"A file" should "be convertible to a file" in {
fEv.toFile(file) should be (file)
}
it should "be convertible to a path" in {
fEv.toPath(file) should be (path)
}
it should "join two paths to make a new path" in {
val expected = "src/main/scala"
val arg = new File("src/main")
fEv.join(arg, "scala").toString should be (expected)
}
it should "return a file listing" in {
fEv.getFiles(fLs) should equal (fs)
}
it should "return an empty file listing if it is not a directory" in {
val f = new File("build.sbt")
fEv.getFiles(f) shouldBe empty
}
it should "return its path" in {
fEv.getPath(file) should be ("src")
}
it should "return its absolute path" in {
fEv.getAbsolutePath(file).head should be ('/')
}
it should "return true if it is a directory" in {
fEv.isDirectory(file) should be (true)
}
it should "return false if it is not a directory" in {
val f = new File("build.sbt")
fEv.isDirectory(f) should be (false)
}
"A path" should "be convertible to a file" in {
pEv.toFile(path) should be (file)
}
it should "be convertible to a path" in {
pEv.toPath(path) should be (path)
}
it should "join two paths to make a new path" in {
val expected = "src/main/scala"
val arg = FileSystems.getDefault.getPath("src", "main")
pEv.join(arg, "scala").toString should be (expected)
}
it should "return a file listing" in {
pEv.getFiles(pLs) should equal (ps)
}
it should "return an empty file listing if it is not a directory" in {
val p = FileSystems.getDefault.getPath("build.sbt")
pEv.getFiles(p) shouldBe empty
}
it should "return its path" in {
pEv.getPath(path) should be ("src")
}
it should "return its absolute path" in {
pEv.getAbsolutePath(path).head should be ('/')
}
it should "return true if it is a directory" in {
pEv.isDirectory(path) should be (true)
}
it should "return false if it is not a directory" in {
val p = FileSystems.getDefault.getPath("build.sbt")
pEv.isDirectory(p) should be (false)
}
}
| mdippery/jupiter | src/test/scala/com/mipadi/jupiter/io/files/LocatableSpec.scala | Scala | apache-2.0 | 3,563 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.util
import java.util.{Properties, UUID}
import scala.collection.JavaConverters._
import scala.collection.Map
import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import org.json4s.DefaultFormats
import org.json4s.JsonAST._
import org.json4s.JsonDSL._
import org.json4s.jackson.JsonMethods._
import org.apache.spark._
import org.apache.spark.executor._
import org.apache.spark.rdd.RDDOperationScope
import org.apache.spark.scheduler._
import org.apache.spark.scheduler.cluster.ExecutorInfo
import org.apache.spark.storage._
/**
* Serializes SparkListener events to/from JSON. This protocol provides strong backwards-
* and forwards-compatibility guarantees: any version of Spark should be able to read JSON output
* written by any other version, including newer versions.
*
* JsonProtocolSuite contains backwards-compatibility tests which check that the current version of
* JsonProtocol is able to read output written by earlier versions. We do not currently have tests
* for reading newer JSON output with older Spark versions.
*
* To ensure that we provide these guarantees, follow these rules when modifying these methods:
*
* - Never delete any JSON fields.
* - Any new JSON fields should be optional; use `Utils.jsonOption` when reading these fields
* in `*FromJson` methods.
*/
private[spark] object JsonProtocol {
// TODO: Remove this file and put JSON serialization into each individual class.
private implicit val format = DefaultFormats
private val mapper = new ObjectMapper().registerModule(DefaultScalaModule)
/** ------------------------------------------------- *
* JSON serialization methods for SparkListenerEvents |
* -------------------------------------------------- */
def sparkEventToJson(event: SparkListenerEvent): JValue = {
event match {
case stageSubmitted: SparkListenerStageSubmitted =>
stageSubmittedToJson(stageSubmitted)
case stageCompleted: SparkListenerStageCompleted =>
stageCompletedToJson(stageCompleted)
case taskStart: SparkListenerTaskStart =>
taskStartToJson(taskStart)
case taskGettingResult: SparkListenerTaskGettingResult =>
taskGettingResultToJson(taskGettingResult)
case taskEnd: SparkListenerTaskEnd =>
taskEndToJson(taskEnd)
case jobStart: SparkListenerJobStart =>
jobStartToJson(jobStart)
case jobEnd: SparkListenerJobEnd =>
jobEndToJson(jobEnd)
case environmentUpdate: SparkListenerEnvironmentUpdate =>
environmentUpdateToJson(environmentUpdate)
case blockManagerAdded: SparkListenerBlockManagerAdded =>
blockManagerAddedToJson(blockManagerAdded)
case blockManagerRemoved: SparkListenerBlockManagerRemoved =>
blockManagerRemovedToJson(blockManagerRemoved)
case unpersistRDD: SparkListenerUnpersistRDD =>
unpersistRDDToJson(unpersistRDD)
case applicationStart: SparkListenerApplicationStart =>
applicationStartToJson(applicationStart)
case applicationEnd: SparkListenerApplicationEnd =>
applicationEndToJson(applicationEnd)
case executorAdded: SparkListenerExecutorAdded =>
executorAddedToJson(executorAdded)
case executorRemoved: SparkListenerExecutorRemoved =>
executorRemovedToJson(executorRemoved)
case logStart: SparkListenerLogStart =>
logStartToJson(logStart)
case metricsUpdate: SparkListenerExecutorMetricsUpdate =>
executorMetricsUpdateToJson(metricsUpdate)
case blockUpdated: SparkListenerBlockUpdated =>
throw new MatchError(blockUpdated) // TODO(ekl) implement this
case _ => parse(mapper.writeValueAsString(event))
}
}
def stageSubmittedToJson(stageSubmitted: SparkListenerStageSubmitted): JValue = {
val stageInfo = stageInfoToJson(stageSubmitted.stageInfo)
val properties = propertiesToJson(stageSubmitted.properties)
("Event" -> SPARK_LISTENER_EVENT_FORMATTED_CLASS_NAMES.stageSubmitted) ~
("Stage Info" -> stageInfo) ~
("Properties" -> properties)
}
def stageCompletedToJson(stageCompleted: SparkListenerStageCompleted): JValue = {
val stageInfo = stageInfoToJson(stageCompleted.stageInfo)
("Event" -> SPARK_LISTENER_EVENT_FORMATTED_CLASS_NAMES.stageCompleted) ~
("Stage Info" -> stageInfo)
}
def taskStartToJson(taskStart: SparkListenerTaskStart): JValue = {
val taskInfo = taskStart.taskInfo
("Event" -> SPARK_LISTENER_EVENT_FORMATTED_CLASS_NAMES.taskStart) ~
("Stage ID" -> taskStart.stageId) ~
("Stage Attempt ID" -> taskStart.stageAttemptId) ~
("Task Info" -> taskInfoToJson(taskInfo))
}
def taskGettingResultToJson(taskGettingResult: SparkListenerTaskGettingResult): JValue = {
val taskInfo = taskGettingResult.taskInfo
("Event" -> SPARK_LISTENER_EVENT_FORMATTED_CLASS_NAMES.taskGettingResult) ~
("Task Info" -> taskInfoToJson(taskInfo))
}
def taskEndToJson(taskEnd: SparkListenerTaskEnd): JValue = {
val taskEndReason = taskEndReasonToJson(taskEnd.reason)
val taskInfo = taskEnd.taskInfo
val taskMetrics = taskEnd.taskMetrics
val taskMetricsJson = if (taskMetrics != null) taskMetricsToJson(taskMetrics) else JNothing
("Event" -> SPARK_LISTENER_EVENT_FORMATTED_CLASS_NAMES.taskEnd) ~
("Stage ID" -> taskEnd.stageId) ~
("Stage Attempt ID" -> taskEnd.stageAttemptId) ~
("Task Type" -> taskEnd.taskType) ~
("Task End Reason" -> taskEndReason) ~
("Task Info" -> taskInfoToJson(taskInfo)) ~
("Task Metrics" -> taskMetricsJson)
}
def jobStartToJson(jobStart: SparkListenerJobStart): JValue = {
val properties = propertiesToJson(jobStart.properties)
("Event" -> SPARK_LISTENER_EVENT_FORMATTED_CLASS_NAMES.jobStart) ~
("Job ID" -> jobStart.jobId) ~
("Submission Time" -> jobStart.time) ~
("Stage Infos" -> jobStart.stageInfos.map(stageInfoToJson)) ~ // Added in Spark 1.2.0
("Stage IDs" -> jobStart.stageIds) ~
("Properties" -> properties)
}
def jobEndToJson(jobEnd: SparkListenerJobEnd): JValue = {
val jobResult = jobResultToJson(jobEnd.jobResult)
("Event" -> SPARK_LISTENER_EVENT_FORMATTED_CLASS_NAMES.jobEnd) ~
("Job ID" -> jobEnd.jobId) ~
("Completion Time" -> jobEnd.time) ~
("Job Result" -> jobResult)
}
def environmentUpdateToJson(environmentUpdate: SparkListenerEnvironmentUpdate): JValue = {
val environmentDetails = environmentUpdate.environmentDetails
val jvmInformation = mapToJson(environmentDetails("JVM Information").toMap)
val sparkProperties = mapToJson(environmentDetails("Spark Properties").toMap)
val systemProperties = mapToJson(environmentDetails("System Properties").toMap)
val classpathEntries = mapToJson(environmentDetails("Classpath Entries").toMap)
("Event" -> SPARK_LISTENER_EVENT_FORMATTED_CLASS_NAMES.environmentUpdate) ~
("JVM Information" -> jvmInformation) ~
("Spark Properties" -> sparkProperties) ~
("System Properties" -> systemProperties) ~
("Classpath Entries" -> classpathEntries)
}
def blockManagerAddedToJson(blockManagerAdded: SparkListenerBlockManagerAdded): JValue = {
val blockManagerId = blockManagerIdToJson(blockManagerAdded.blockManagerId)
("Event" -> SPARK_LISTENER_EVENT_FORMATTED_CLASS_NAMES.blockManagerAdded) ~
("Block Manager ID" -> blockManagerId) ~
("Maximum Memory" -> blockManagerAdded.maxMem) ~
("Timestamp" -> blockManagerAdded.time)
}
def blockManagerRemovedToJson(blockManagerRemoved: SparkListenerBlockManagerRemoved): JValue = {
val blockManagerId = blockManagerIdToJson(blockManagerRemoved.blockManagerId)
("Event" -> SPARK_LISTENER_EVENT_FORMATTED_CLASS_NAMES.blockManagerRemoved) ~
("Block Manager ID" -> blockManagerId) ~
("Timestamp" -> blockManagerRemoved.time)
}
def unpersistRDDToJson(unpersistRDD: SparkListenerUnpersistRDD): JValue = {
("Event" -> SPARK_LISTENER_EVENT_FORMATTED_CLASS_NAMES.unpersistRDD) ~
("RDD ID" -> unpersistRDD.rddId)
}
def applicationStartToJson(applicationStart: SparkListenerApplicationStart): JValue = {
("Event" -> SPARK_LISTENER_EVENT_FORMATTED_CLASS_NAMES.applicationStart) ~
("App Name" -> applicationStart.appName) ~
("App ID" -> applicationStart.appId.map(JString(_)).getOrElse(JNothing)) ~
("Timestamp" -> applicationStart.time) ~
("User" -> applicationStart.sparkUser) ~
("App Attempt ID" -> applicationStart.appAttemptId.map(JString(_)).getOrElse(JNothing)) ~
("Driver Logs" -> applicationStart.driverLogs.map(mapToJson).getOrElse(JNothing))
}
def applicationEndToJson(applicationEnd: SparkListenerApplicationEnd): JValue = {
("Event" -> SPARK_LISTENER_EVENT_FORMATTED_CLASS_NAMES.applicationEnd) ~
("Timestamp" -> applicationEnd.time)
}
def executorAddedToJson(executorAdded: SparkListenerExecutorAdded): JValue = {
("Event" -> SPARK_LISTENER_EVENT_FORMATTED_CLASS_NAMES.executorAdded) ~
("Timestamp" -> executorAdded.time) ~
("Executor ID" -> executorAdded.executorId) ~
("Executor Info" -> executorInfoToJson(executorAdded.executorInfo))
}
def executorRemovedToJson(executorRemoved: SparkListenerExecutorRemoved): JValue = {
("Event" -> SPARK_LISTENER_EVENT_FORMATTED_CLASS_NAMES.executorRemoved) ~
("Timestamp" -> executorRemoved.time) ~
("Executor ID" -> executorRemoved.executorId) ~
("Removed Reason" -> executorRemoved.reason)
}
def logStartToJson(logStart: SparkListenerLogStart): JValue = {
("Event" -> SPARK_LISTENER_EVENT_FORMATTED_CLASS_NAMES.logStart) ~
("Spark Version" -> SPARK_VERSION)
}
def executorMetricsUpdateToJson(metricsUpdate: SparkListenerExecutorMetricsUpdate): JValue = {
val execId = metricsUpdate.execId
val accumUpdates = metricsUpdate.accumUpdates
("Event" -> SPARK_LISTENER_EVENT_FORMATTED_CLASS_NAMES.metricsUpdate) ~
("Executor ID" -> execId) ~
("Metrics Updated" -> accumUpdates.map { case (taskId, stageId, stageAttemptId, updates) =>
("Task ID" -> taskId) ~
("Stage ID" -> stageId) ~
("Stage Attempt ID" -> stageAttemptId) ~
("Accumulator Updates" -> JArray(updates.map(accumulableInfoToJson).toList))
})
}
/** ------------------------------------------------------------------- *
* JSON serialization methods for classes SparkListenerEvents depend on |
* -------------------------------------------------------------------- */
def stageInfoToJson(stageInfo: StageInfo): JValue = {
val rddInfo = JArray(stageInfo.rddInfos.map(rddInfoToJson).toList)
val parentIds = JArray(stageInfo.parentIds.map(JInt(_)).toList)
val submissionTime = stageInfo.submissionTime.map(JInt(_)).getOrElse(JNothing)
val completionTime = stageInfo.completionTime.map(JInt(_)).getOrElse(JNothing)
val failureReason = stageInfo.failureReason.map(JString(_)).getOrElse(JNothing)
("Stage ID" -> stageInfo.stageId) ~
("Stage Attempt ID" -> stageInfo.attemptId) ~
("Stage Name" -> stageInfo.name) ~
("Number of Tasks" -> stageInfo.numTasks) ~
("RDD Info" -> rddInfo) ~
("Parent IDs" -> parentIds) ~
("Details" -> stageInfo.details) ~
("Submission Time" -> submissionTime) ~
("Completion Time" -> completionTime) ~
("Failure Reason" -> failureReason) ~
("Accumulables" -> JArray(
stageInfo.accumulables.values.map(accumulableInfoToJson).toList))
}
def taskInfoToJson(taskInfo: TaskInfo): JValue = {
("Task ID" -> taskInfo.taskId) ~
("Index" -> taskInfo.index) ~
("Attempt" -> taskInfo.attemptNumber) ~
("Launch Time" -> taskInfo.launchTime) ~
("Executor ID" -> taskInfo.executorId) ~
("Host" -> taskInfo.host) ~
("Locality" -> taskInfo.taskLocality.toString) ~
("Speculative" -> taskInfo.speculative) ~
("Getting Result Time" -> taskInfo.gettingResultTime) ~
("Finish Time" -> taskInfo.finishTime) ~
("Failed" -> taskInfo.failed) ~
("Killed" -> taskInfo.killed) ~
("Accumulables" -> JArray(taskInfo.accumulables.toList.map(accumulableInfoToJson)))
}
def accumulableInfoToJson(accumulableInfo: AccumulableInfo): JValue = {
val name = accumulableInfo.name
("ID" -> accumulableInfo.id) ~
("Name" -> name) ~
("Update" -> accumulableInfo.update.map { v => accumValueToJson(name, v) }) ~
("Value" -> accumulableInfo.value.map { v => accumValueToJson(name, v) }) ~
("Internal" -> accumulableInfo.internal) ~
("Count Failed Values" -> accumulableInfo.countFailedValues) ~
("Metadata" -> accumulableInfo.metadata)
}
/**
* Serialize the value of an accumulator to JSON.
*
* For accumulators representing internal task metrics, this looks up the relevant
* [[AccumulatorParam]] to serialize the value accordingly. For all other accumulators,
* this will simply serialize the value as a string.
*
* The behavior here must match that of [[accumValueFromJson]]. Exposed for testing.
*/
private[util] def accumValueToJson(name: Option[String], value: Any): JValue = {
if (name.exists(_.startsWith(InternalAccumulator.METRICS_PREFIX))) {
value match {
case v: Int => JInt(v)
case v: Long => JInt(v)
// We only have 3 kind of internal accumulator types, so if it's not int or long, it must be
// the blocks accumulator, whose type is `java.util.List[(BlockId, BlockStatus)]`
case v =>
JArray(v.asInstanceOf[java.util.List[(BlockId, BlockStatus)]].asScala.toList.map {
case (id, status) =>
("Block ID" -> id.toString) ~
("Status" -> blockStatusToJson(status))
})
}
} else {
// For all external accumulators, just use strings
JString(value.toString)
}
}
def taskMetricsToJson(taskMetrics: TaskMetrics): JValue = {
val shuffleReadMetrics: JValue =
("Remote Blocks Fetched" -> taskMetrics.shuffleReadMetrics.remoteBlocksFetched) ~
("Local Blocks Fetched" -> taskMetrics.shuffleReadMetrics.localBlocksFetched) ~
("Fetch Wait Time" -> taskMetrics.shuffleReadMetrics.fetchWaitTime) ~
("Remote Bytes Read" -> taskMetrics.shuffleReadMetrics.remoteBytesRead) ~
("Local Bytes Read" -> taskMetrics.shuffleReadMetrics.localBytesRead) ~
("Total Records Read" -> taskMetrics.shuffleReadMetrics.recordsRead)
val shuffleWriteMetrics: JValue =
("Shuffle Bytes Written" -> taskMetrics.shuffleWriteMetrics.bytesWritten) ~
("Shuffle Write Time" -> taskMetrics.shuffleWriteMetrics.writeTime) ~
("Shuffle Records Written" -> taskMetrics.shuffleWriteMetrics.recordsWritten)
val inputMetrics: JValue =
("Bytes Read" -> taskMetrics.inputMetrics.bytesRead) ~
("Records Read" -> taskMetrics.inputMetrics.recordsRead)
val outputMetrics: JValue =
("Bytes Written" -> taskMetrics.outputMetrics.bytesWritten) ~
("Records Written" -> taskMetrics.outputMetrics.recordsWritten)
val updatedBlocks =
JArray(taskMetrics.updatedBlockStatuses.toList.map { case (id, status) =>
("Block ID" -> id.toString) ~
("Status" -> blockStatusToJson(status))
})
("Executor Deserialize Time" -> taskMetrics.executorDeserializeTime) ~
("Executor Deserialize CPU Time" -> taskMetrics.executorDeserializeCpuTime) ~
("Executor Run Time" -> taskMetrics.executorRunTime) ~
("Executor CPU Time" -> taskMetrics.executorCpuTime) ~
("Result Size" -> taskMetrics.resultSize) ~
("JVM GC Time" -> taskMetrics.jvmGCTime) ~
("Result Serialization Time" -> taskMetrics.resultSerializationTime) ~
("Memory Bytes Spilled" -> taskMetrics.memoryBytesSpilled) ~
("Disk Bytes Spilled" -> taskMetrics.diskBytesSpilled) ~
("Shuffle Read Metrics" -> shuffleReadMetrics) ~
("Shuffle Write Metrics" -> shuffleWriteMetrics) ~
("Input Metrics" -> inputMetrics) ~
("Output Metrics" -> outputMetrics) ~
("Updated Blocks" -> updatedBlocks)
}
def taskEndReasonToJson(taskEndReason: TaskEndReason): JValue = {
val reason = Utils.getFormattedClassName(taskEndReason)
val json: JObject = taskEndReason match {
case fetchFailed: FetchFailed =>
val blockManagerAddress = Option(fetchFailed.bmAddress).
map(blockManagerIdToJson).getOrElse(JNothing)
("Block Manager Address" -> blockManagerAddress) ~
("Shuffle ID" -> fetchFailed.shuffleId) ~
("Map ID" -> fetchFailed.mapId) ~
("Reduce ID" -> fetchFailed.reduceId) ~
("Message" -> fetchFailed.message)
case exceptionFailure: ExceptionFailure =>
val stackTrace = stackTraceToJson(exceptionFailure.stackTrace)
val accumUpdates = JArray(exceptionFailure.accumUpdates.map(accumulableInfoToJson).toList)
("Class Name" -> exceptionFailure.className) ~
("Description" -> exceptionFailure.description) ~
("Stack Trace" -> stackTrace) ~
("Full Stack Trace" -> exceptionFailure.fullStackTrace) ~
("Accumulator Updates" -> accumUpdates)
case taskCommitDenied: TaskCommitDenied =>
("Job ID" -> taskCommitDenied.jobID) ~
("Partition ID" -> taskCommitDenied.partitionID) ~
("Attempt Number" -> taskCommitDenied.attemptNumber)
case ExecutorLostFailure(executorId, exitCausedByApp, reason) =>
("Executor ID" -> executorId) ~
("Exit Caused By App" -> exitCausedByApp) ~
("Loss Reason" -> reason.map(_.toString))
case _ => Utils.emptyJson
}
("Reason" -> reason) ~ json
}
def blockManagerIdToJson(blockManagerId: BlockManagerId): JValue = {
("Executor ID" -> blockManagerId.executorId) ~
("Host" -> blockManagerId.host) ~
("Port" -> blockManagerId.port)
}
def jobResultToJson(jobResult: JobResult): JValue = {
val result = Utils.getFormattedClassName(jobResult)
val json = jobResult match {
case JobSucceeded => Utils.emptyJson
case jobFailed: JobFailed =>
JObject("Exception" -> exceptionToJson(jobFailed.exception))
}
("Result" -> result) ~ json
}
def rddInfoToJson(rddInfo: RDDInfo): JValue = {
val storageLevel = storageLevelToJson(rddInfo.storageLevel)
val parentIds = JArray(rddInfo.parentIds.map(JInt(_)).toList)
("RDD ID" -> rddInfo.id) ~
("Name" -> rddInfo.name) ~
("Scope" -> rddInfo.scope.map(_.toJson)) ~
("Callsite" -> rddInfo.callSite) ~
("Parent IDs" -> parentIds) ~
("Storage Level" -> storageLevel) ~
("Number of Partitions" -> rddInfo.numPartitions) ~
("Number of Cached Partitions" -> rddInfo.numCachedPartitions) ~
("Memory Size" -> rddInfo.memSize) ~
("Disk Size" -> rddInfo.diskSize)
}
def storageLevelToJson(storageLevel: StorageLevel): JValue = {
("Use Disk" -> storageLevel.useDisk) ~
("Use Memory" -> storageLevel.useMemory) ~
("Deserialized" -> storageLevel.deserialized) ~
("Replication" -> storageLevel.replication)
}
def blockStatusToJson(blockStatus: BlockStatus): JValue = {
val storageLevel = storageLevelToJson(blockStatus.storageLevel)
("Storage Level" -> storageLevel) ~
("Memory Size" -> blockStatus.memSize) ~
("Disk Size" -> blockStatus.diskSize)
}
def executorInfoToJson(executorInfo: ExecutorInfo): JValue = {
("Host" -> executorInfo.executorHost) ~
("Total Cores" -> executorInfo.totalCores) ~
("Log Urls" -> mapToJson(executorInfo.logUrlMap))
}
/** ------------------------------ *
* Util JSON serialization methods |
* ------------------------------- */
def mapToJson(m: Map[String, String]): JValue = {
val jsonFields = m.map { case (k, v) => JField(k, JString(v)) }
JObject(jsonFields.toList)
}
def propertiesToJson(properties: Properties): JValue = {
Option(properties).map { p =>
mapToJson(p.asScala)
}.getOrElse(JNothing)
}
def UUIDToJson(id: UUID): JValue = {
("Least Significant Bits" -> id.getLeastSignificantBits) ~
("Most Significant Bits" -> id.getMostSignificantBits)
}
def stackTraceToJson(stackTrace: Array[StackTraceElement]): JValue = {
JArray(stackTrace.map { case line =>
("Declaring Class" -> line.getClassName) ~
("Method Name" -> line.getMethodName) ~
("File Name" -> line.getFileName) ~
("Line Number" -> line.getLineNumber)
}.toList)
}
def exceptionToJson(exception: Exception): JValue = {
("Message" -> exception.getMessage) ~
("Stack Trace" -> stackTraceToJson(exception.getStackTrace))
}
/** --------------------------------------------------- *
* JSON deserialization methods for SparkListenerEvents |
* ---------------------------------------------------- */
private object SPARK_LISTENER_EVENT_FORMATTED_CLASS_NAMES {
val stageSubmitted = Utils.getFormattedClassName(SparkListenerStageSubmitted)
val stageCompleted = Utils.getFormattedClassName(SparkListenerStageCompleted)
val taskStart = Utils.getFormattedClassName(SparkListenerTaskStart)
val taskGettingResult = Utils.getFormattedClassName(SparkListenerTaskGettingResult)
val taskEnd = Utils.getFormattedClassName(SparkListenerTaskEnd)
val jobStart = Utils.getFormattedClassName(SparkListenerJobStart)
val jobEnd = Utils.getFormattedClassName(SparkListenerJobEnd)
val environmentUpdate = Utils.getFormattedClassName(SparkListenerEnvironmentUpdate)
val blockManagerAdded = Utils.getFormattedClassName(SparkListenerBlockManagerAdded)
val blockManagerRemoved = Utils.getFormattedClassName(SparkListenerBlockManagerRemoved)
val unpersistRDD = Utils.getFormattedClassName(SparkListenerUnpersistRDD)
val applicationStart = Utils.getFormattedClassName(SparkListenerApplicationStart)
val applicationEnd = Utils.getFormattedClassName(SparkListenerApplicationEnd)
val executorAdded = Utils.getFormattedClassName(SparkListenerExecutorAdded)
val executorRemoved = Utils.getFormattedClassName(SparkListenerExecutorRemoved)
val logStart = Utils.getFormattedClassName(SparkListenerLogStart)
val metricsUpdate = Utils.getFormattedClassName(SparkListenerExecutorMetricsUpdate)
}
def sparkEventFromJson(json: JValue): SparkListenerEvent = {
import SPARK_LISTENER_EVENT_FORMATTED_CLASS_NAMES._
(json \\ "Event").extract[String] match {
case `stageSubmitted` => stageSubmittedFromJson(json)
case `stageCompleted` => stageCompletedFromJson(json)
case `taskStart` => taskStartFromJson(json)
case `taskGettingResult` => taskGettingResultFromJson(json)
case `taskEnd` => taskEndFromJson(json)
case `jobStart` => jobStartFromJson(json)
case `jobEnd` => jobEndFromJson(json)
case `environmentUpdate` => environmentUpdateFromJson(json)
case `blockManagerAdded` => blockManagerAddedFromJson(json)
case `blockManagerRemoved` => blockManagerRemovedFromJson(json)
case `unpersistRDD` => unpersistRDDFromJson(json)
case `applicationStart` => applicationStartFromJson(json)
case `applicationEnd` => applicationEndFromJson(json)
case `executorAdded` => executorAddedFromJson(json)
case `executorRemoved` => executorRemovedFromJson(json)
case `logStart` => logStartFromJson(json)
case `metricsUpdate` => executorMetricsUpdateFromJson(json)
case other => mapper.readValue(compact(render(json)), Utils.classForName(other))
.asInstanceOf[SparkListenerEvent]
}
}
def stageSubmittedFromJson(json: JValue): SparkListenerStageSubmitted = {
val stageInfo = stageInfoFromJson(json \\ "Stage Info")
val properties = propertiesFromJson(json \\ "Properties")
SparkListenerStageSubmitted(stageInfo, properties)
}
def stageCompletedFromJson(json: JValue): SparkListenerStageCompleted = {
val stageInfo = stageInfoFromJson(json \\ "Stage Info")
SparkListenerStageCompleted(stageInfo)
}
def taskStartFromJson(json: JValue): SparkListenerTaskStart = {
val stageId = (json \\ "Stage ID").extract[Int]
val stageAttemptId =
Utils.jsonOption(json \\ "Stage Attempt ID").map(_.extract[Int]).getOrElse(0)
val taskInfo = taskInfoFromJson(json \\ "Task Info")
SparkListenerTaskStart(stageId, stageAttemptId, taskInfo)
}
def taskGettingResultFromJson(json: JValue): SparkListenerTaskGettingResult = {
val taskInfo = taskInfoFromJson(json \\ "Task Info")
SparkListenerTaskGettingResult(taskInfo)
}
def taskEndFromJson(json: JValue): SparkListenerTaskEnd = {
val stageId = (json \\ "Stage ID").extract[Int]
val stageAttemptId =
Utils.jsonOption(json \\ "Stage Attempt ID").map(_.extract[Int]).getOrElse(0)
val taskType = (json \\ "Task Type").extract[String]
val taskEndReason = taskEndReasonFromJson(json \\ "Task End Reason")
val taskInfo = taskInfoFromJson(json \\ "Task Info")
val taskMetrics = taskMetricsFromJson(json \\ "Task Metrics")
SparkListenerTaskEnd(stageId, stageAttemptId, taskType, taskEndReason, taskInfo, taskMetrics)
}
def jobStartFromJson(json: JValue): SparkListenerJobStart = {
val jobId = (json \\ "Job ID").extract[Int]
val submissionTime =
Utils.jsonOption(json \\ "Submission Time").map(_.extract[Long]).getOrElse(-1L)
val stageIds = (json \\ "Stage IDs").extract[List[JValue]].map(_.extract[Int])
val properties = propertiesFromJson(json \\ "Properties")
// The "Stage Infos" field was added in Spark 1.2.0
val stageInfos = Utils.jsonOption(json \\ "Stage Infos")
.map(_.extract[Seq[JValue]].map(stageInfoFromJson)).getOrElse {
stageIds.map { id =>
new StageInfo(id, 0, "unknown", 0, Seq.empty, Seq.empty, "unknown")
}
}
SparkListenerJobStart(jobId, submissionTime, stageInfos, properties)
}
def jobEndFromJson(json: JValue): SparkListenerJobEnd = {
val jobId = (json \\ "Job ID").extract[Int]
val completionTime =
Utils.jsonOption(json \\ "Completion Time").map(_.extract[Long]).getOrElse(-1L)
val jobResult = jobResultFromJson(json \\ "Job Result")
SparkListenerJobEnd(jobId, completionTime, jobResult)
}
def environmentUpdateFromJson(json: JValue): SparkListenerEnvironmentUpdate = {
val environmentDetails = Map[String, Seq[(String, String)]](
"JVM Information" -> mapFromJson(json \\ "JVM Information").toSeq,
"Spark Properties" -> mapFromJson(json \\ "Spark Properties").toSeq,
"System Properties" -> mapFromJson(json \\ "System Properties").toSeq,
"Classpath Entries" -> mapFromJson(json \\ "Classpath Entries").toSeq)
SparkListenerEnvironmentUpdate(environmentDetails)
}
def blockManagerAddedFromJson(json: JValue): SparkListenerBlockManagerAdded = {
val blockManagerId = blockManagerIdFromJson(json \\ "Block Manager ID")
val maxMem = (json \\ "Maximum Memory").extract[Long]
val time = Utils.jsonOption(json \\ "Timestamp").map(_.extract[Long]).getOrElse(-1L)
SparkListenerBlockManagerAdded(time, blockManagerId, maxMem)
}
def blockManagerRemovedFromJson(json: JValue): SparkListenerBlockManagerRemoved = {
val blockManagerId = blockManagerIdFromJson(json \\ "Block Manager ID")
val time = Utils.jsonOption(json \\ "Timestamp").map(_.extract[Long]).getOrElse(-1L)
SparkListenerBlockManagerRemoved(time, blockManagerId)
}
def unpersistRDDFromJson(json: JValue): SparkListenerUnpersistRDD = {
SparkListenerUnpersistRDD((json \\ "RDD ID").extract[Int])
}
def applicationStartFromJson(json: JValue): SparkListenerApplicationStart = {
val appName = (json \\ "App Name").extract[String]
val appId = Utils.jsonOption(json \\ "App ID").map(_.extract[String])
val time = (json \\ "Timestamp").extract[Long]
val sparkUser = (json \\ "User").extract[String]
val appAttemptId = Utils.jsonOption(json \\ "App Attempt ID").map(_.extract[String])
val driverLogs = Utils.jsonOption(json \\ "Driver Logs").map(mapFromJson)
SparkListenerApplicationStart(appName, appId, time, sparkUser, appAttemptId, driverLogs)
}
def applicationEndFromJson(json: JValue): SparkListenerApplicationEnd = {
SparkListenerApplicationEnd((json \\ "Timestamp").extract[Long])
}
def executorAddedFromJson(json: JValue): SparkListenerExecutorAdded = {
val time = (json \\ "Timestamp").extract[Long]
val executorId = (json \\ "Executor ID").extract[String]
val executorInfo = executorInfoFromJson(json \\ "Executor Info")
SparkListenerExecutorAdded(time, executorId, executorInfo)
}
def executorRemovedFromJson(json: JValue): SparkListenerExecutorRemoved = {
val time = (json \\ "Timestamp").extract[Long]
val executorId = (json \\ "Executor ID").extract[String]
val reason = (json \\ "Removed Reason").extract[String]
SparkListenerExecutorRemoved(time, executorId, reason)
}
def logStartFromJson(json: JValue): SparkListenerLogStart = {
val sparkVersion = (json \\ "Spark Version").extract[String]
SparkListenerLogStart(sparkVersion)
}
def executorMetricsUpdateFromJson(json: JValue): SparkListenerExecutorMetricsUpdate = {
val execInfo = (json \\ "Executor ID").extract[String]
val accumUpdates = (json \\ "Metrics Updated").extract[List[JValue]].map { json =>
val taskId = (json \\ "Task ID").extract[Long]
val stageId = (json \\ "Stage ID").extract[Int]
val stageAttemptId = (json \\ "Stage Attempt ID").extract[Int]
val updates =
(json \\ "Accumulator Updates").extract[List[JValue]].map(accumulableInfoFromJson)
(taskId, stageId, stageAttemptId, updates)
}
SparkListenerExecutorMetricsUpdate(execInfo, accumUpdates)
}
/** --------------------------------------------------------------------- *
* JSON deserialization methods for classes SparkListenerEvents depend on |
* ---------------------------------------------------------------------- */
def stageInfoFromJson(json: JValue): StageInfo = {
val stageId = (json \\ "Stage ID").extract[Int]
val attemptId = Utils.jsonOption(json \\ "Stage Attempt ID").map(_.extract[Int]).getOrElse(0)
val stageName = (json \\ "Stage Name").extract[String]
val numTasks = (json \\ "Number of Tasks").extract[Int]
val rddInfos = (json \\ "RDD Info").extract[List[JValue]].map(rddInfoFromJson)
val parentIds = Utils.jsonOption(json \\ "Parent IDs")
.map { l => l.extract[List[JValue]].map(_.extract[Int]) }
.getOrElse(Seq.empty)
val details = Utils.jsonOption(json \\ "Details").map(_.extract[String]).getOrElse("")
val submissionTime = Utils.jsonOption(json \\ "Submission Time").map(_.extract[Long])
val completionTime = Utils.jsonOption(json \\ "Completion Time").map(_.extract[Long])
val failureReason = Utils.jsonOption(json \\ "Failure Reason").map(_.extract[String])
val accumulatedValues = {
Utils.jsonOption(json \\ "Accumulables").map(_.extract[List[JValue]]) match {
case Some(values) => values.map(accumulableInfoFromJson)
case None => Seq[AccumulableInfo]()
}
}
val stageInfo = new StageInfo(
stageId, attemptId, stageName, numTasks, rddInfos, parentIds, details)
stageInfo.submissionTime = submissionTime
stageInfo.completionTime = completionTime
stageInfo.failureReason = failureReason
for (accInfo <- accumulatedValues) {
stageInfo.accumulables(accInfo.id) = accInfo
}
stageInfo
}
def taskInfoFromJson(json: JValue): TaskInfo = {
val taskId = (json \\ "Task ID").extract[Long]
val index = (json \\ "Index").extract[Int]
val attempt = Utils.jsonOption(json \\ "Attempt").map(_.extract[Int]).getOrElse(1)
val launchTime = (json \\ "Launch Time").extract[Long]
val executorId = (json \\ "Executor ID").extract[String].intern()
val host = (json \\ "Host").extract[String].intern()
val taskLocality = TaskLocality.withName((json \\ "Locality").extract[String])
val speculative = Utils.jsonOption(json \\ "Speculative").exists(_.extract[Boolean])
val gettingResultTime = (json \\ "Getting Result Time").extract[Long]
val finishTime = (json \\ "Finish Time").extract[Long]
val failed = (json \\ "Failed").extract[Boolean]
val killed = Utils.jsonOption(json \\ "Killed").exists(_.extract[Boolean])
val accumulables = Utils.jsonOption(json \\ "Accumulables").map(_.extract[Seq[JValue]]) match {
case Some(values) => values.map(accumulableInfoFromJson)
case None => Seq[AccumulableInfo]()
}
val taskInfo =
new TaskInfo(taskId, index, attempt, launchTime, executorId, host, taskLocality, speculative)
taskInfo.gettingResultTime = gettingResultTime
taskInfo.finishTime = finishTime
taskInfo.failed = failed
taskInfo.killed = killed
taskInfo.setAccumulables(accumulables)
taskInfo
}
def accumulableInfoFromJson(json: JValue): AccumulableInfo = {
val id = (json \\ "ID").extract[Long]
val name = Utils.jsonOption(json \\ "Name").map(_.extract[String])
val update = Utils.jsonOption(json \\ "Update").map { v => accumValueFromJson(name, v) }
val value = Utils.jsonOption(json \\ "Value").map { v => accumValueFromJson(name, v) }
val internal = Utils.jsonOption(json \\ "Internal").exists(_.extract[Boolean])
val countFailedValues =
Utils.jsonOption(json \\ "Count Failed Values").exists(_.extract[Boolean])
val metadata = Utils.jsonOption(json \\ "Metadata").map(_.extract[String])
new AccumulableInfo(id, name, update, value, internal, countFailedValues, metadata)
}
/**
* Deserialize the value of an accumulator from JSON.
*
* For accumulators representing internal task metrics, this looks up the relevant
* [[AccumulatorParam]] to deserialize the value accordingly. For all other
* accumulators, this will simply deserialize the value as a string.
*
* The behavior here must match that of [[accumValueToJson]]. Exposed for testing.
*/
private[util] def accumValueFromJson(name: Option[String], value: JValue): Any = {
if (name.exists(_.startsWith(InternalAccumulator.METRICS_PREFIX))) {
value match {
case JInt(v) => v.toLong
case JArray(v) =>
v.map { blockJson =>
val id = BlockId((blockJson \\ "Block ID").extract[String])
val status = blockStatusFromJson(blockJson \\ "Status")
(id, status)
}.asJava
case _ => throw new IllegalArgumentException(s"unexpected json value $value for " +
"accumulator " + name.get)
}
} else {
value.extract[String]
}
}
def taskMetricsFromJson(json: JValue): TaskMetrics = {
val metrics = TaskMetrics.empty
if (json == JNothing) {
return metrics
}
metrics.setExecutorDeserializeTime((json \\ "Executor Deserialize Time").extract[Long])
metrics.setExecutorDeserializeCpuTime((json \\ "Executor Deserialize CPU Time") match {
case JNothing => 0
case x => x.extract[Long]
})
metrics.setExecutorRunTime((json \\ "Executor Run Time").extract[Long])
metrics.setExecutorCpuTime((json \\ "Executor CPU Time") match {
case JNothing => 0
case x => x.extract[Long]
})
metrics.setResultSize((json \\ "Result Size").extract[Long])
metrics.setJvmGCTime((json \\ "JVM GC Time").extract[Long])
metrics.setResultSerializationTime((json \\ "Result Serialization Time").extract[Long])
metrics.incMemoryBytesSpilled((json \\ "Memory Bytes Spilled").extract[Long])
metrics.incDiskBytesSpilled((json \\ "Disk Bytes Spilled").extract[Long])
// Shuffle read metrics
Utils.jsonOption(json \\ "Shuffle Read Metrics").foreach { readJson =>
val readMetrics = metrics.createTempShuffleReadMetrics()
readMetrics.incRemoteBlocksFetched((readJson \\ "Remote Blocks Fetched").extract[Int])
readMetrics.incLocalBlocksFetched((readJson \\ "Local Blocks Fetched").extract[Int])
readMetrics.incRemoteBytesRead((readJson \\ "Remote Bytes Read").extract[Long])
readMetrics.incLocalBytesRead(
Utils.jsonOption(readJson \\ "Local Bytes Read").map(_.extract[Long]).getOrElse(0L))
readMetrics.incFetchWaitTime((readJson \\ "Fetch Wait Time").extract[Long])
readMetrics.incRecordsRead(
Utils.jsonOption(readJson \\ "Total Records Read").map(_.extract[Long]).getOrElse(0L))
metrics.mergeShuffleReadMetrics()
}
// Shuffle write metrics
// TODO: Drop the redundant "Shuffle" since it's inconsistent with related classes.
Utils.jsonOption(json \\ "Shuffle Write Metrics").foreach { writeJson =>
val writeMetrics = metrics.shuffleWriteMetrics
writeMetrics.incBytesWritten((writeJson \\ "Shuffle Bytes Written").extract[Long])
writeMetrics.incRecordsWritten(
Utils.jsonOption(writeJson \\ "Shuffle Records Written").map(_.extract[Long]).getOrElse(0L))
writeMetrics.incWriteTime((writeJson \\ "Shuffle Write Time").extract[Long])
}
// Output metrics
Utils.jsonOption(json \\ "Output Metrics").foreach { outJson =>
val outputMetrics = metrics.outputMetrics
outputMetrics.setBytesWritten((outJson \\ "Bytes Written").extract[Long])
outputMetrics.setRecordsWritten(
Utils.jsonOption(outJson \\ "Records Written").map(_.extract[Long]).getOrElse(0L))
}
// Input metrics
Utils.jsonOption(json \\ "Input Metrics").foreach { inJson =>
val inputMetrics = metrics.inputMetrics
inputMetrics.incBytesRead((inJson \\ "Bytes Read").extract[Long])
inputMetrics.incRecordsRead(
Utils.jsonOption(inJson \\ "Records Read").map(_.extract[Long]).getOrElse(0L))
}
// Updated blocks
Utils.jsonOption(json \\ "Updated Blocks").foreach { blocksJson =>
metrics.setUpdatedBlockStatuses(blocksJson.extract[List[JValue]].map { blockJson =>
val id = BlockId((blockJson \\ "Block ID").extract[String])
val status = blockStatusFromJson(blockJson \\ "Status")
(id, status)
})
}
metrics
}
private object TASK_END_REASON_FORMATTED_CLASS_NAMES {
val success = Utils.getFormattedClassName(Success)
val resubmitted = Utils.getFormattedClassName(Resubmitted)
val fetchFailed = Utils.getFormattedClassName(FetchFailed)
val exceptionFailure = Utils.getFormattedClassName(ExceptionFailure)
val taskResultLost = Utils.getFormattedClassName(TaskResultLost)
val taskKilled = Utils.getFormattedClassName(TaskKilled)
val taskCommitDenied = Utils.getFormattedClassName(TaskCommitDenied)
val executorLostFailure = Utils.getFormattedClassName(ExecutorLostFailure)
val unknownReason = Utils.getFormattedClassName(UnknownReason)
}
def taskEndReasonFromJson(json: JValue): TaskEndReason = {
import TASK_END_REASON_FORMATTED_CLASS_NAMES._
(json \\ "Reason").extract[String] match {
case `success` => Success
case `resubmitted` => Resubmitted
case `fetchFailed` =>
val blockManagerAddress = blockManagerIdFromJson(json \\ "Block Manager Address")
val shuffleId = (json \\ "Shuffle ID").extract[Int]
val mapId = (json \\ "Map ID").extract[Int]
val reduceId = (json \\ "Reduce ID").extract[Int]
val message = Utils.jsonOption(json \\ "Message").map(_.extract[String])
new FetchFailed(blockManagerAddress, shuffleId, mapId, reduceId,
message.getOrElse("Unknown reason"))
case `exceptionFailure` =>
val className = (json \\ "Class Name").extract[String]
val description = (json \\ "Description").extract[String]
val stackTrace = stackTraceFromJson(json \\ "Stack Trace")
val fullStackTrace =
Utils.jsonOption(json \\ "Full Stack Trace").map(_.extract[String]).orNull
// Fallback on getting accumulator updates from TaskMetrics, which was logged in Spark 1.x
val accumUpdates = Utils.jsonOption(json \\ "Accumulator Updates")
.map(_.extract[List[JValue]].map(accumulableInfoFromJson))
.getOrElse(taskMetricsFromJson(json \\ "Metrics").accumulators().map(acc => {
acc.toInfo(Some(acc.value), None)
}))
ExceptionFailure(className, description, stackTrace, fullStackTrace, None, accumUpdates)
case `taskResultLost` => TaskResultLost
case `taskKilled` => TaskKilled
case `taskCommitDenied` =>
// Unfortunately, the `TaskCommitDenied` message was introduced in 1.3.0 but the JSON
// de/serialization logic was not added until 1.5.1. To provide backward compatibility
// for reading those logs, we need to provide default values for all the fields.
val jobId = Utils.jsonOption(json \\ "Job ID").map(_.extract[Int]).getOrElse(-1)
val partitionId = Utils.jsonOption(json \\ "Partition ID").map(_.extract[Int]).getOrElse(-1)
val attemptNo = Utils.jsonOption(json \\ "Attempt Number").map(_.extract[Int]).getOrElse(-1)
TaskCommitDenied(jobId, partitionId, attemptNo)
case `executorLostFailure` =>
val exitCausedByApp = Utils.jsonOption(json \\ "Exit Caused By App").map(_.extract[Boolean])
val executorId = Utils.jsonOption(json \\ "Executor ID").map(_.extract[String])
val reason = Utils.jsonOption(json \\ "Loss Reason").map(_.extract[String])
ExecutorLostFailure(
executorId.getOrElse("Unknown"),
exitCausedByApp.getOrElse(true),
reason)
case `unknownReason` => UnknownReason
}
}
def blockManagerIdFromJson(json: JValue): BlockManagerId = {
// On metadata fetch fail, block manager ID can be null (SPARK-4471)
if (json == JNothing) {
return null
}
val executorId = (json \\ "Executor ID").extract[String].intern()
val host = (json \\ "Host").extract[String].intern()
val port = (json \\ "Port").extract[Int]
BlockManagerId(executorId, host, port)
}
private object JOB_RESULT_FORMATTED_CLASS_NAMES {
val jobSucceeded = Utils.getFormattedClassName(JobSucceeded)
val jobFailed = Utils.getFormattedClassName(JobFailed)
}
def jobResultFromJson(json: JValue): JobResult = {
import JOB_RESULT_FORMATTED_CLASS_NAMES._
(json \\ "Result").extract[String] match {
case `jobSucceeded` => JobSucceeded
case `jobFailed` =>
val exception = exceptionFromJson(json \\ "Exception")
new JobFailed(exception)
}
}
def rddInfoFromJson(json: JValue): RDDInfo = {
val rddId = (json \\ "RDD ID").extract[Int]
val name = (json \\ "Name").extract[String]
val scope = Utils.jsonOption(json \\ "Scope")
.map(_.extract[String])
.map(RDDOperationScope.fromJson)
val callsite = Utils.jsonOption(json \\ "Callsite").map(_.extract[String]).getOrElse("")
val parentIds = Utils.jsonOption(json \\ "Parent IDs")
.map { l => l.extract[List[JValue]].map(_.extract[Int]) }
.getOrElse(Seq.empty)
val storageLevel = storageLevelFromJson(json \\ "Storage Level")
val numPartitions = (json \\ "Number of Partitions").extract[Int]
val numCachedPartitions = (json \\ "Number of Cached Partitions").extract[Int]
val memSize = (json \\ "Memory Size").extract[Long]
val diskSize = (json \\ "Disk Size").extract[Long]
val rddInfo = new RDDInfo(rddId, name, numPartitions, storageLevel, parentIds, callsite, scope)
rddInfo.numCachedPartitions = numCachedPartitions
rddInfo.memSize = memSize
rddInfo.diskSize = diskSize
rddInfo
}
def storageLevelFromJson(json: JValue): StorageLevel = {
val useDisk = (json \\ "Use Disk").extract[Boolean]
val useMemory = (json \\ "Use Memory").extract[Boolean]
val deserialized = (json \\ "Deserialized").extract[Boolean]
val replication = (json \\ "Replication").extract[Int]
StorageLevel(useDisk, useMemory, deserialized, replication)
}
def blockStatusFromJson(json: JValue): BlockStatus = {
val storageLevel = storageLevelFromJson(json \\ "Storage Level")
val memorySize = (json \\ "Memory Size").extract[Long]
val diskSize = (json \\ "Disk Size").extract[Long]
BlockStatus(storageLevel, memorySize, diskSize)
}
def executorInfoFromJson(json: JValue): ExecutorInfo = {
val executorHost = (json \\ "Host").extract[String]
val totalCores = (json \\ "Total Cores").extract[Int]
val logUrls = mapFromJson(json \\ "Log Urls").toMap
new ExecutorInfo(executorHost, totalCores, logUrls)
}
/** -------------------------------- *
* Util JSON deserialization methods |
* --------------------------------- */
def mapFromJson(json: JValue): Map[String, String] = {
val jsonFields = json.asInstanceOf[JObject].obj
jsonFields.map { case JField(k, JString(v)) => (k, v) }.toMap
}
def propertiesFromJson(json: JValue): Properties = {
Utils.jsonOption(json).map { value =>
val properties = new Properties
mapFromJson(json).foreach { case (k, v) => properties.setProperty(k, v) }
properties
}.getOrElse(null)
}
def UUIDFromJson(json: JValue): UUID = {
val leastSignificantBits = (json \\ "Least Significant Bits").extract[Long]
val mostSignificantBits = (json \\ "Most Significant Bits").extract[Long]
new UUID(leastSignificantBits, mostSignificantBits)
}
def stackTraceFromJson(json: JValue): Array[StackTraceElement] = {
json.extract[List[JValue]].map { line =>
val declaringClass = (line \\ "Declaring Class").extract[String]
val methodName = (line \\ "Method Name").extract[String]
val fileName = (line \\ "File Name").extract[String]
val lineNumber = (line \\ "Line Number").extract[Int]
new StackTraceElement(declaringClass, methodName, fileName, lineNumber)
}.toArray
}
def exceptionFromJson(json: JValue): Exception = {
val e = new Exception((json \\ "Message").extract[String])
e.setStackTrace(stackTraceFromJson(json \\ "Stack Trace"))
e
}
}
| sh-cho/cshSpark | util/JsonProtocol.scala | Scala | apache-2.0 | 46,514 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.feature
import org.apache.spark.SparkFunSuite
import org.apache.spark.mllib.linalg.{Vector, Vectors}
import org.apache.spark.mllib.regression.LabeledPoint
import org.apache.spark.mllib.util.MLlibTestSparkContext
import org.apache.spark.mllib.util.TestingUtils._
import org.apache.spark.sql.{Row, SQLContext}
class ChiSqSelectorSuite extends SparkFunSuite with MLlibTestSparkContext {
test("Test Chi-Square selector") {
val sqlContext = SQLContext.getOrCreate(sc)
import sqlContext.implicits._
val data = Seq(
LabeledPoint(0.0, Vectors.sparse(3, Array((0, 8.0), (1, 7.0)))),
LabeledPoint(1.0, Vectors.sparse(3, Array((1, 9.0), (2, 6.0)))),
LabeledPoint(1.0, Vectors.dense(Array(0.0, 9.0, 8.0))),
LabeledPoint(2.0, Vectors.dense(Array(8.0, 9.0, 5.0)))
)
val preFilteredData = Seq(
Vectors.dense(0.0),
Vectors.dense(6.0),
Vectors.dense(8.0),
Vectors.dense(5.0)
)
val df = sc.parallelize(data.zip(preFilteredData))
.map(x => (x._1.label, x._1.features, x._2))
.toDF("label", "data", "preFilteredData")
val model = new ChiSqSelector()
.setNumTopFeatures(1)
.setFeaturesCol("data")
.setLabelCol("label")
.setOutputCol("filtered")
model.fit(df).transform(df).select("filtered", "preFilteredData").collect().foreach {
case Row(vec1: Vector, vec2: Vector) =>
assert(vec1 ~== vec2 absTol 1e-1)
}
}
}
| pronix/spark | mllib/src/test/scala/org/apache/spark/ml/feature/ChiSqSelectorSuite.scala | Scala | apache-2.0 | 2,269 |
package grammarcomp
package benchmarks
import grammar.EBNFGrammar.BNFGrammar
import grammar.GrammarReaders
object JavascriptGrammar2 extends Benchmark {
import GrammarReaders._
def benchmarkName = "JSGrammar-Simple"
def benchmarkSource = "http://hepunx.rl.ac.uk/~adye/jsspec11/llr.htm"
def ebnfGrammar = bnfgrammar"""Program -> "" | Element Program
Element -> function Identifier '(' ParameterListOpt ')' CompoundStatement | Statement
ParameterListOpt -> "" | ParameterList
ParameterList -> Identifier | Identifier ',' ParameterList
CompoundStatement -> '{' Statements '}'
Statements -> "" | Statement Statements
Statement -> ';' | if Condition Statement | if Condition Statement else Statement | while Condition Statement | ForParen ';' ExpressionOpt ';' ExpressionOpt ')' Statement | ForBegin ';' ExpressionOpt ';' ExpressionOpt ')' Statement | ForBegin in Expression ')' Statement | break ';' | continue ';' | with '(' Expression ')' Statement | return ExpressionOpt ';' | CompoundStatement | VariablesOrExpression ;
Condition -> '(' Expression ')'
ForParen -> for '('
ForBegin -> ForParen VariablesOrExpression
VariablesOrExpression -> var Variables | Expression
Variables -> Variable | Variable ',' Variables
Variable -> Identifier | Identifier '=' AssignmentExpression
ExpressionOpt -> "" | Expression
Expression -> AssignmentExpression | AssignmentExpression ',' Expression
AssignmentExpression -> ConditionalExpression | ConditionalExpression AssignmentOperator AssignmentExpression
ConditionalExpression -> OrExpression | OrExpression '?' AssignmentExpression ':' AssignmentExpression
OrExpression -> AndExpression | AndExpression '||' OrExpression
AndExpression -> BitwiseOrExpression | BitwiseOrExpression '&&' AndExpression
BitwiseOrExpression -> BitwiseXorExpression | BitwiseXorExpression '|' BitwiseOrExpression
BitwiseXorExpression -> BitwiseAndExpression | BitwiseAndExpression ^ BitwiseXorExpression
BitwiseAndExpression -> EqualityExpression | EqualityExpression '&' BitwiseAndExpression
EqualityExpression -> RelationalExpression | RelationalExpression EqualityualityOperator EqualityExpression
RelationalExpression -> ShiftExpression | RelationalExpression RelationalationalOperator ShiftExpression
ShiftExpression -> AdditiveExpression | AdditiveExpression ShiftOperator ShiftExpression
AdditiveExpression -> MultiplicativeExpression | MultiplicativeExpression '+' AdditiveExpression | MultiplicativeExpression '-' AdditiveExpression
MultiplicativeExpression -> UnaryExpression | UnaryExpression MultiplicativeOperator MultiplicativeExpression
UnaryExpression -> MemberExpression | UnaryOperator UnaryExpression | '-' UnaryExpression | IncrementOperator MemberExpression | MemberExpression IncrementOperator | new Constructor | delete MemberExpression
Constructor -> this '.' ConstructorCall | ConstructorCall
ConstructorCall -> Identifier | Identifier '(' ArgumentListOpt ')' | Identifier . ConstructorCall
MemberExpression -> PrimaryExpression | PrimaryExpression '.' MemberExpression | PrimaryExpression '[' Expression ']' | PrimaryExpression '(' ArgumentListOpt ')'
ArgumentListOpt -> "" | ArgumentList
ArgumentList -> AssignmentExpression | AssignmentExpression ',' ArgumentList
PrimaryExpression -> '(' Expression ')' | Identifier | IntegerLiteral | FloatingPointLiteral | StringLiteral | false | true | null | this"""
} | epfl-lara/GrammarComparison | src/main/scala/grammarcomp/benchmarks/JavascriptGrammar2.scala | Scala | mit | 3,372 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.nn
import com.intel.analytics.bigdl.nn.abstractnn.TensorModule
import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric
import scala.reflect.ClassTag
/**
* Applies element-wise exp to input tensor.
*/
@SerialVersionUID(4918769744611296463L)
class Exp[T: ClassTag] (implicit ev: TensorNumeric[T])
extends TensorModule[T] {
override def updateOutput(input: Tensor[T]): Tensor[T] = {
output.exp(input)
}
override def updateGradInput(input: Tensor[T], gradOutput: Tensor[T]): Tensor[T] = {
gradInput
.resizeAs(gradOutput)
.cmul(output, gradOutput)
}
}
object Exp {
def apply[@specialized(Float, Double) T: ClassTag]()
(implicit ev: TensorNumeric[T]) : Exp[T] = {
new Exp[T]()
}
}
| jenniew/BigDL | spark/dl/src/main/scala/com/intel/analytics/bigdl/nn/Exp.scala | Scala | apache-2.0 | 1,426 |
/*
* Copyright (c) 2014 Contributor. All rights reserved.
*/
package org.scalaide.debug.internal.expression
import org.eclipse.jdt.debug.core.IJavaBreakpoint
import org.eclipse.jface.viewers.StructuredSelection
import org.junit.Assert._
import org.junit.Test
import org.scalaide.debug.internal.ScalaDebugger
import org.scalaide.debug.internal.expression.TestValues.DifferentStackFramesTestCase
import org.scalaide.debug.internal.model.ScalaStackFrame
import org.scalaide.debug.internal.model.ScalaThread
class DifferentStackFramesTest extends BaseIntegrationTest(DifferentStackFramesTest) {
private def changeThread(name: String): Unit = {
val newThread = ScalaDebugger.currentThread.getDebugTarget.getThreads
.filter(_.getName == name).head.asInstanceOf[ScalaThread]
ScalaDebugger.updateCurrentThread(new StructuredSelection(newThread))
assertTrue(s"Thread $name is not suspended", ScalaDebugger.currentThread.isSuspended)
}
private def changeFrame(index: Int): Unit = {
val currentThread = ScalaDebugger.currentThread
val newFrame = ScalaStackFrame(currentThread, currentThread.threadRef.frame(index), index)
ScalaDebugger.updateCurrentThread(new StructuredSelection(newFrame))
}
@Test
def testFrameAccess(): Unit = {
/* Frames:
0: recFunction(0)
1: recFunction(1)
2: recFunction(2)
3: recFunction(3)
4: compute()
...
*/
eval("input", 0, Names.Java.primitives.int)
changeFrame(1)
eval("input", 1, Names.Java.primitives.int)
changeFrame(2)
eval("input", 2, Names.Java.primitives.int)
changeFrame(4)
eval("input", 5, Names.Java.primitives.int)
changeFrame(0)
eval("input", 0, Names.Java.primitives.int)
}
@Test
def testThreadAccess(): Unit = {
changeThread(DifferentStackFramesTestCase.demonThreadName)
ExpressionManager.compute("input") match {
case EvaluationFailure(errorMessage) => assertTrue(s"Error message differs, got: $errorMessage",
errorMessage.contains("is not suspended as a result of JDI event."))
case other => fail(s"Expected `not at breakpoint` message, got: $other")
}
changeThread(DifferentStackFramesTestCase.mainThread)
eval("input", 0, Names.Java.primitives.int)
companion.session.disconnect()
}
}
object DifferentStackFramesTest extends BaseIntegrationTestCompanion(DifferentStackFramesTestCase) {
override protected val suspensionPolicy: Int = IJavaBreakpoint.SUSPEND_VM
}
| stephenh/scala-ide | org.scala-ide.sdt.debug.expression.tests/src/org/scalaide/debug/internal/expression/DifferentStackFramesTest.scala | Scala | bsd-3-clause | 2,476 |
/**
* Copyright (C) 2015 DANS - Data Archiving and Networked Services ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package nl.knaw.dans.easy.stage.dataset
import java.io.File
import java.net.URI
import nl.knaw.dans.easy.stage.Settings
import nl.knaw.dans.easy.stage.lib.Util._
import nl.knaw.dans.lib.logging.DebugEnhancedLogging
import nl.knaw.dans.pf.language.ddm.api.Ddm2EmdCrosswalk
import nl.knaw.dans.pf.language.emd.EasyMetadata
import nl.knaw.dans.pf.language.emd.binding.EmdMarshaller
import nl.knaw.dans.pf.language.emd.types.{ BasicIdentifier, EmdArchive, EmdConstants }
import scala.util.{ Failure, Success, Try }
object EMD extends DebugEnhancedLogging {
def create(sdoDir: File, licenseAccepted: Option[Boolean])(implicit s: Settings): Try[EasyMetadata] = {
trace(sdoDir)
new File(s.bagitDir, "metadata/dataset.xml") match {
case file if file.exists() =>
for {
emd <- getEasyMetadata(file)
_ = if (s.skipPayload) addExtraDescription(emd)
_ = s.urn.foreach(urn => emd.getEmdIdentifier.add(wrapUrn(urn)))
_ = s.doi.foreach(doi => emd.getEmdIdentifier.add(wrapDoi(doi, s.otherAccessDoi)))
_ = emd.getEmdIdentifier.add(createDmoIdWithPlaceholder())
_ = emd.getEmdOther.getEasApplicationSpecific.setArchive(createEmdArchive(s.archive))
_ = licenseAccepted.foreach(emd.getEmdRights.setAcceptedLicense)
/*
* DO NOT USE getXmlString !! It will get the XML bytes and convert them to string using the
* platform's default Charset, which may not be what we expect.
*
* See https://drivenbydata.atlassian.net/browse/EASY-984
*/
_ <- writeEMD(sdoDir, new String(new EmdMarshaller(emd).getXmlByteArray, "UTF-8"))
} yield emd
case _ => Failure(new RuntimeException(s"Couldn't find metadata/dataset.xml"))
}
}
private def addExtraDescription(emd: EasyMetadata)(implicit s: Settings): Try[Unit] = Try {
trace(emd)
s.extraDescription.foreach(d => emd.getEmdDescription.getDcDescription.add(new BasicIdentifier(d)))
}
private def getEasyMetadata(ddm: File): Try[EasyMetadata] = {
trace(ddm)
Try {
val crosswalk = new Ddm2EmdCrosswalk()
Option(crosswalk.createFrom(ddm))
.map(Success(_))
.getOrElse(Failure(new RuntimeException(s"${ crosswalk.getXmlErrorHandler.getMessages }")))
}.flatten
}
private def wrapUrn(urn: String): BasicIdentifier = {
trace(urn)
new BasicIdentifier(urn) {
setScheme(EmdConstants.SCHEME_PID)
setIdentificationSystem(new URI("http://www.persistent-identifier.nl"))
}
}
private def wrapDoi(doi: String, otherAccessDOI: Boolean): BasicIdentifier = {
trace(doi, otherAccessDOI)
new BasicIdentifier(doi) {
setScheme(if (otherAccessDOI) EmdConstants.SCHEME_DOI_OTHER_ACCESS
else EmdConstants.SCHEME_DOI)
setIdentificationSystem(new URI(EmdConstants.DOI_RESOLVER))
}
}
private def createDmoIdWithPlaceholder(): BasicIdentifier = {
trace(())
new BasicIdentifier("$sdo-id") {
setScheme(EmdConstants.SCHEME_DMO_ID)
}
}
private def createEmdArchive(archive: String): EmdArchive = {
trace(archive)
new EmdArchive() {
setLocation(EmdArchive.Location.valueOf(archive))
}
}
}
| DANS-KNAW/easy-stage-dataset | lib/src/main/scala/nl.knaw.dans.easy.stage/dataset/EMD.scala | Scala | apache-2.0 | 3,888 |
package moe.pizza.auth.plugins.pilotgraders
import moe.pizza.auth.models.Pilot
import moe.pizza.auth.plugins.pilotgraders.AlliedPilotGrader.SavedContactList
import moe.pizza.eveapi.endpoints.Corp
import moe.pizza.eveapi.generated.corp.ContactList.{Row, Rowset}
import moe.pizza.eveapi.{ApiKey, EVEAPI, XMLApiResponse}
import org.joda.time.DateTime
import org.mockito.Mockito.{times, verify, when}
import org.scalatest.mock.MockitoSugar
import org.scalatest.{MustMatchers, WordSpec}
import org.http4s.client.blaze.PooledHttp1Client
import scalaz.concurrent.Task
import scalaxb.{DataRecord, XMLStandardTypes}
import scalaz.concurrent.Task
/**
* Created by Andi on 26/02/2016.
*/
class AlliedPilotGraderSpec
extends WordSpec
with MustMatchers
with MockitoSugar
with XMLStandardTypes {
implicit val client = PooledHttp1Client() //TODO: no mocking?
"AlliedPilotGrader" when {
"pulling contacts" should {
"cope with failure when reading a corp contact list" in {
val now = DateTime.now()
val eveapi = mock[EVEAPI]
val corp = mock[Corp]
when(eveapi.corp).thenReturn(corp)
when(corp.ContactList()).thenReturn(
Task {
throw new Exception("oh no")
}
)
implicit val apikey = new ApiKey(1, "hi")
val apg = new AlliedPilotGrader(5.0, true, true, Some(eveapi), null)
val r = apg.pullAllies()
r must equal(None)
}
"read a corp contact list" in {
val now = DateTime.now()
val eveapi = mock[EVEAPI]
val corp = mock[Corp]
when(eveapi.corp).thenReturn(corp)
when(corp.ContactList()).thenReturn(
Task {
new XMLApiResponse(
now,
now,
Seq(
new Rowset(
Seq(
new Row(
Map(
"@contactTypeID" -> DataRecord.apply(null, BigInt(2)),
"@contactName" -> DataRecord.apply(null, "Terry"),
"@standing" -> DataRecord.apply(null, BigDecimal(10))
)
)
),
Map(
"@name" -> DataRecord.apply(null, "corporateContactList"))
)
)
)
}
)
implicit val apikey = new ApiKey(1, "hi")
val apg = new AlliedPilotGrader(5.0, true, true, Some(eveapi), null)
val r = apg.pullAllies()
r must equal(
Some(new SavedContactList(now, List(), List("Terry"), List())))
}
"read an alliance contact list" in {
val now = DateTime.now()
val eveapi = mock[EVEAPI]
val corp = mock[Corp]
when(eveapi.corp).thenReturn(corp)
when(corp.ContactList()).thenReturn(
Task {
new XMLApiResponse(
now,
now,
Seq(
new Rowset(
Seq(
new Row(
Map(
"@contactTypeID" -> DataRecord.apply(null, BigInt(2)),
"@contactName" -> DataRecord.apply(null, "Terry"),
"@standing" -> DataRecord.apply(null, BigDecimal(10))
)
)
),
Map("@name" -> DataRecord.apply(null, "allianceContactList"))
)
)
)
}
)
implicit val apikey = new ApiKey(1, "hi")
val apg = new AlliedPilotGrader(5.0, true, true, Some(eveapi), null)
val r = apg.pullAllies()
r must equal(
Some(new SavedContactList(now, List(), List("Terry"), List())))
}
}
"merge the lists" in {
val now = DateTime.now()
val eveapi = mock[EVEAPI]
val corp = mock[Corp]
when(eveapi.corp).thenReturn(corp)
when(corp.ContactList()).thenReturn(
Task {
new XMLApiResponse(
now,
now,
Seq(
new Rowset(
Seq(
new Row(
Map(
"@contactTypeID" -> DataRecord.apply(null, BigInt(2)),
"@contactName" -> DataRecord.apply(null, "Terry"),
"@standing" -> DataRecord.apply(null, BigDecimal(10))
)
)
),
Map("@name" -> DataRecord.apply(null, "allianceContactList"))
),
new Rowset(
Seq(
new Row(
Map(
"@contactTypeID" -> DataRecord.apply(null, BigInt(2)),
"@contactName" -> DataRecord.apply(null, "Terry2"),
"@standing" -> DataRecord.apply(null, BigDecimal(10))
)
)
),
Map("@name" -> DataRecord.apply(null, "corporateContactList"))
)
)
)
}
)
implicit val apikey = new ApiKey(1, "hi")
val apg = new AlliedPilotGrader(5.0, true, true, Some(eveapi), null)
val r = apg.pullAllies()
r must equal(
Some(
new SavedContactList(now, List(), List("Terry2", "Terry"), List())))
}
"parse people, corps and alliance into the correct lists" in {
val now = DateTime.now()
val eveapi = mock[EVEAPI]
val corp = mock[Corp]
when(eveapi.corp).thenReturn(corp)
when(corp.ContactList()).thenReturn(
Task {
new XMLApiResponse(
now,
now,
Seq(
new Rowset(
Seq(
new Row(
Map(
"@contactTypeID" -> DataRecord.apply(null, BigInt(1373)),
"@contactName" -> DataRecord.apply(null, "Terry"),
"@standing" -> DataRecord.apply(null, BigDecimal(10))
)
),
new Row(
Map(
"@contactTypeID" -> DataRecord.apply(null, BigInt(1375)),
"@contactName" -> DataRecord.apply(null, "Terry2"),
"@standing" -> DataRecord.apply(null, BigDecimal(10))
)
),
new Row(
Map(
"@contactTypeID" -> DataRecord.apply(null,
BigInt(16159)),
"@contactName" -> DataRecord
.apply(null, "Terry's Cool Alliance"),
"@standing" -> DataRecord.apply(null, BigDecimal(10))
)
)
),
Map("@name" -> DataRecord.apply(null, "allianceContactList"))
),
new Rowset(
Seq(
new Row(
Map(
"@contactTypeID" -> DataRecord.apply(null, BigInt(2)),
"@contactName" -> DataRecord.apply(null, "TerryCorp"),
"@standing" -> DataRecord.apply(null, BigDecimal(10))
)
)
),
Map("@name" -> DataRecord.apply(null, "corporateContactList"))
)
)
)
}
)
implicit val apikey = new ApiKey(1, "hi")
val apg = new AlliedPilotGrader(5.0, true, true, Some(eveapi), null)
val r = apg.pullAllies()
r must equal(
Some(
new SavedContactList(now,
List("Terry2", "Terry"),
List("TerryCorp"),
List("Terry's Cool Alliance"))))
}
"obey the flags for which lists to parse" in {
val now = DateTime.now()
val eveapi = mock[EVEAPI]
val corp = mock[Corp]
when(eveapi.corp).thenReturn(corp)
when(corp.ContactList()).thenReturn(
Task {
new XMLApiResponse(
now,
now,
Seq(
new Rowset(
Seq(
new Row(
Map(
"@contactTypeID" -> DataRecord.apply(null, BigInt(1373)),
"@contactName" -> DataRecord.apply(null, "Terry"),
"@standing" -> DataRecord.apply(null, BigDecimal(10))
)
),
new Row(
Map(
"@contactTypeID" -> DataRecord.apply(null, BigInt(1375)),
"@contactName" -> DataRecord.apply(null, "Terry2"),
"@standing" -> DataRecord.apply(null, BigDecimal(10))
)
),
new Row(
Map(
"@contactTypeID" -> DataRecord.apply(null,
BigInt(16159)),
"@contactName" -> DataRecord
.apply(null, "Terry's Cool Alliance"),
"@standing" -> DataRecord.apply(null, BigDecimal(10))
)
)
),
Map("@name" -> DataRecord.apply(null, "allianceContactList"))
),
new Rowset(
Seq(
new Row(
Map(
"@contactTypeID" -> DataRecord.apply(null, BigInt(2)),
"@contactName" -> DataRecord.apply(null, "TerryCorp"),
"@standing" -> DataRecord.apply(null, BigDecimal(10))
)
)
),
Map("@name" -> DataRecord.apply(null, "corporateContactList"))
)
)
)
}
)
implicit val apikey = new ApiKey(1, "hi")
val apg = new AlliedPilotGrader(5.0,
true,
usealliance = false,
Some(eveapi),
null)
val r = apg.pullAllies()
r must equal(
Some(new SavedContactList(now, List(), List("TerryCorp"), List())))
}
"obey the threshold" in {
val now = DateTime.now()
val eveapi = mock[EVEAPI]
val corp = mock[Corp]
when(eveapi.corp).thenReturn(corp)
when(corp.ContactList()).thenReturn(
Task {
new XMLApiResponse(
now,
now,
Seq(
new Rowset(
Seq(
new Row(
Map(
"@contactTypeID" -> DataRecord.apply(null, BigInt(1373)),
"@contactName" -> DataRecord.apply(null, "Terry"),
"@standing" -> DataRecord.apply(null, BigDecimal(2))
)
),
new Row(
Map(
"@contactTypeID" -> DataRecord.apply(null, BigInt(1375)),
"@contactName" -> DataRecord.apply(null, "Terry2"),
"@standing" -> DataRecord.apply(null, BigDecimal(7.2))
)
),
new Row(
Map(
"@contactTypeID" -> DataRecord.apply(null,
BigInt(16159)),
"@contactName" -> DataRecord
.apply(null, "Terry's Cool Alliance"),
"@standing" -> DataRecord.apply(null, BigDecimal(10))
)
)
),
Map("@name" -> DataRecord.apply(null, "allianceContactList"))
),
new Rowset(
Seq(
new Row(
Map(
"@contactTypeID" -> DataRecord.apply(null, BigInt(2)),
"@contactName" -> DataRecord.apply(null, "TerryCorp"),
"@standing" -> DataRecord.apply(null, BigDecimal(-10))
)
)
),
Map("@name" -> DataRecord.apply(null, "corporateContactList"))
)
)
)
}
)
implicit val apikey = new ApiKey(1, "hi")
val apg = new AlliedPilotGrader(5.0, true, true, Some(eveapi), null)
val r = apg.pullAllies()
r must equal(
Some(
new SavedContactList(now,
List("Terry2"),
List(),
List("Terry's Cool Alliance"))))
}
}
"grading pilots" should {
"grade allied pilots as allies" in {
val now = DateTime.now()
val expiry = DateTime.now().plusHours(2)
val eveapi = mock[EVEAPI]
val corp = mock[Corp]
when(eveapi.corp).thenReturn(corp)
when(corp.ContactList()).thenReturn(
Task {
new XMLApiResponse(
expiry,
expiry,
Seq(
new Rowset(
Seq(
new Row(
Map(
"@contactTypeID" -> DataRecord.apply(null, BigInt(1373)),
"@contactName" -> DataRecord.apply(null, "Terry"),
"@standing" -> DataRecord.apply(null, BigDecimal(10))
)
),
new Row(
Map(
"@contactTypeID" -> DataRecord.apply(null, BigInt(1375)),
"@contactName" -> DataRecord.apply(null, "Terry2"),
"@standing" -> DataRecord.apply(null, BigDecimal(10))
)
),
new Row(
Map(
"@contactTypeID" -> DataRecord.apply(null,
BigInt(16159)),
"@contactName" -> DataRecord
.apply(null, "Terry's Cool Alliance"),
"@standing" -> DataRecord.apply(null, BigDecimal(10))
)
)
),
Map("@name" -> DataRecord.apply(null, "allianceContactList"))
),
new Rowset(
Seq(
new Row(
Map(
"@contactTypeID" -> DataRecord.apply(null, BigInt(2)),
"@contactName" -> DataRecord.apply(null, "TerryCorp"),
"@standing" -> DataRecord.apply(null, BigDecimal(10))
)
)
),
Map("@name" -> DataRecord.apply(null, "corporateContactList"))
)
)
)
}
)
implicit val apikey = new ApiKey(1, "hi")
val apg = new AlliedPilotGrader(5.0, true, true, Some(eveapi), null)
val bob = new Pilot("bob",
Pilot.Status.unclassified,
"boballiance",
"bobcorp",
"Bob",
"none@none",
Pilot.OM.createObjectNode(),
List.empty[String],
List("1:REF"),
List.empty[String])
apg.grade(bob) must equal(Pilot.Status.unclassified)
apg.grade(bob.copy(characterName = "Terry")) must equal(
Pilot.Status.ally)
apg.grade(bob.copy(corporation = "TerryCorp")) must equal(
Pilot.Status.ally)
apg.grade(bob.copy(alliance = "Terry's Cool Alliance")) must equal(
Pilot.Status.ally)
verify(corp, times(1)).ContactList()
}
"pull a new contact list if the old one expired" in {
val now = DateTime.now()
val expiry = DateTime.now().plusHours(2)
val expired = DateTime.now().minusHours(2)
val eveapi = mock[EVEAPI]
val corp = mock[Corp]
when(eveapi.corp).thenReturn(corp)
when(corp.ContactList()).thenReturn(
Task {
new XMLApiResponse(
expired,
expired,
Seq(
new Rowset(
Seq(
new Row(
Map(
"@contactTypeID" -> DataRecord.apply(null, BigInt(1373)),
"@contactName" -> DataRecord.apply(null, "Terry"),
"@standing" -> DataRecord.apply(null, BigDecimal(10))
)
),
new Row(
Map(
"@contactTypeID" -> DataRecord.apply(null, BigInt(1375)),
"@contactName" -> DataRecord.apply(null, "Terry2"),
"@standing" -> DataRecord.apply(null, BigDecimal(10))
)
),
new Row(
Map(
"@contactTypeID" -> DataRecord.apply(null,
BigInt(16159)),
"@contactName" -> DataRecord
.apply(null, "Terry's Cool Alliance"),
"@standing" -> DataRecord.apply(null, BigDecimal(10))
)
)
),
Map("@name" -> DataRecord.apply(null, "allianceContactList"))
),
new Rowset(
Seq(
new Row(
Map(
"@contactTypeID" -> DataRecord.apply(null, BigInt(2)),
"@contactName" -> DataRecord.apply(null, "TerryCorp"),
"@standing" -> DataRecord.apply(null, BigDecimal(10))
)
)
),
Map("@name" -> DataRecord.apply(null, "corporateContactList"))
)
)
)
}
)
implicit val apikey = new ApiKey(1, "hi")
val apg = new AlliedPilotGrader(5.0, true, true, Some(eveapi), null)
when(corp.ContactList()).thenReturn(
Task {
new XMLApiResponse(
expiry,
expiry,
Seq(
new Rowset(
Seq(
new Row(
Map(
"@contactTypeID" -> DataRecord.apply(null, BigInt(1373)),
"@contactName" -> DataRecord.apply(null, "Terry"),
"@standing" -> DataRecord.apply(null, BigDecimal(10))
)
),
new Row(
Map(
"@contactTypeID" -> DataRecord.apply(null, BigInt(1375)),
"@contactName" -> DataRecord.apply(null, "Terry2"),
"@standing" -> DataRecord.apply(null, BigDecimal(10))
)
),
new Row(
Map(
"@contactTypeID" -> DataRecord.apply(null,
BigInt(16159)),
"@contactName" -> DataRecord
.apply(null, "Terry's Cool Alliance"),
"@standing" -> DataRecord.apply(null, BigDecimal(10))
)
)
),
Map("@name" -> DataRecord.apply(null, "allianceContactList"))
),
new Rowset(
Seq(
new Row(
Map(
"@contactTypeID" -> DataRecord.apply(null, BigInt(2)),
"@contactName" -> DataRecord.apply(null, "TerryCorp"),
"@standing" -> DataRecord.apply(null, BigDecimal(10))
)
)
),
Map("@name" -> DataRecord.apply(null, "corporateContactList"))
)
)
)
}
)
val bob = new Pilot("bob",
Pilot.Status.unclassified,
"boballiance",
"bobcorp",
"Bob",
"none@none",
Pilot.OM.createObjectNode(),
List.empty[String],
List("1:REF"),
List.empty[String])
apg.grade(bob) must equal(Pilot.Status.unclassified)
verify(corp, times(2)).ContactList()
}
}
"cope with failure when there's no contact list" in {
val now = DateTime.now()
val eveapi = mock[EVEAPI]
val corp = mock[Corp]
when(eveapi.corp).thenReturn(corp)
when(corp.ContactList()).thenReturn(
Task {
throw new Exception("oh no")
}
)
implicit val apikey = new ApiKey(1, "hi")
val apg = new AlliedPilotGrader(5.0, true, true, Some(eveapi), null)
val r = apg.pullAllies()
r must equal(None)
val bob = new Pilot("bob",
Pilot.Status.unclassified,
"boballiance",
"bobcorp",
"Bob",
"none@none",
Pilot.OM.createObjectNode(),
List.empty[String],
List("1:REF"),
List.empty[String])
apg.grade(bob) must equal(Pilot.Status.unclassified)
verify(corp, times(2)).ContactList()
}
"classify with old data if required" in {
val now = DateTime.now()
val expiry = DateTime.now().plusHours(2)
val expired = DateTime.now().minusHours(2)
val eveapi = mock[EVEAPI]
val corp = mock[Corp]
when(eveapi.corp).thenReturn(corp)
when(corp.ContactList()).thenReturn(
Task {
new XMLApiResponse(
expired,
expired,
Seq(
new Rowset(
Seq(
new Row(
Map(
"@contactTypeID" -> DataRecord.apply(null, BigInt(1373)),
"@contactName" -> DataRecord.apply(null, "Terry"),
"@standing" -> DataRecord.apply(null, BigDecimal(10))
)
),
new Row(
Map(
"@contactTypeID" -> DataRecord.apply(null, BigInt(1375)),
"@contactName" -> DataRecord.apply(null, "Terry2"),
"@standing" -> DataRecord.apply(null, BigDecimal(10))
)
),
new Row(
Map(
"@contactTypeID" -> DataRecord.apply(null, BigInt(16159)),
"@contactName" -> DataRecord
.apply(null, "Terry's Cool Alliance"),
"@standing" -> DataRecord.apply(null, BigDecimal(10))
)
)
),
Map("@name" -> DataRecord.apply(null, "allianceContactList"))
),
new Rowset(
Seq(
new Row(
Map(
"@contactTypeID" -> DataRecord.apply(null, BigInt(2)),
"@contactName" -> DataRecord.apply(null, "TerryCorp"),
"@standing" -> DataRecord.apply(null, BigDecimal(10))
)
)
),
Map("@name" -> DataRecord.apply(null, "corporateContactList"))
)
)
)
}
)
implicit val apikey = new ApiKey(1, "hi")
val apg = new AlliedPilotGrader(5.0, true, true, Some(eveapi), null)
when(corp.ContactList()).thenReturn(
Task {
throw new Exception("oh no")
}
)
val bob = new Pilot("bob",
Pilot.Status.unclassified,
"boballiance",
"bobcorp",
"Bob",
"none@none",
Pilot.OM.createObjectNode(),
List.empty[String],
List("1:REF"),
List.empty[String])
apg.grade(bob) must equal(Pilot.Status.unclassified)
apg.grade(bob.copy(characterName = "Terry")) must equal(Pilot.Status.ally)
apg.grade(bob.copy(corporation = "TerryCorp")) must equal(
Pilot.Status.ally)
apg.grade(bob.copy(alliance = "Terry's Cool Alliance")) must equal(
Pilot.Status.ally)
verify(corp, times(5)).ContactList()
}
}
| xxpizzaxx/pizza-auth-3 | src/test/scala/moe/pizza/auth/plugins/pilotgraders/AlliedPilotGraderSpec.scala | Scala | mit | 24,653 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.knockdata.spark.highcharts.plotoptions
import com.knockdata.spark.highcharts.AbstractTestCase
import org.junit.Test
class TestPlotOptions extends AbstractTestCase{
@Test
def testCodeInPlot() = {
val options = new Area()//.fillColor("linearGradient", Map("x1"->0,"y1"->0, "x2"->0, "y2"->1))
.fillColorStops((0, "Highcharts.getOptions().colors[0]"),
(1, "Highcharts.Color(Highcharts.getOptions().colors[0]).setOpacity(0).get('rgba')"))
val expected =
"""
|{
| "fillColor":{
| "stops":[[0,"--code-FEA24034CBC777B5F8EC1B3125E2BFC7--"],[1,"--code-7619821E4C9B6D7CA8090F17507C6024--"]]
| }
|}
""".stripMargin
assertEqualJson(expected, options.json)
}
}
| knockdata/spark-highcharts | src/test/scala/com/knockdata/spark/highcharts/plotoptions/TestPlotOptions.scala | Scala | apache-2.0 | 1,538 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.rpc.netty
import java.io._
import java.net.{InetSocketAddress, URI}
import java.nio.ByteBuffer
import java.nio.channels.{Pipe, ReadableByteChannel, WritableByteChannel}
import java.util.concurrent._
import java.util.concurrent.atomic.AtomicBoolean
import javax.annotation.Nullable
import scala.concurrent.{Future, Promise}
import scala.reflect.ClassTag
import scala.util.{DynamicVariable, Failure, Success, Try}
import scala.util.control.NonFatal
import org.apache.spark.{SecurityManager, SparkConf, SparkContext}
import org.apache.spark.internal.Logging
import org.apache.spark.internal.config.EXECUTOR_ID
import org.apache.spark.internal.config.Network._
import org.apache.spark.network.TransportContext
import org.apache.spark.network.client._
import org.apache.spark.network.crypto.{AuthClientBootstrap, AuthServerBootstrap}
import org.apache.spark.network.netty.SparkTransportConf
import org.apache.spark.network.server._
import org.apache.spark.rpc._
import org.apache.spark.serializer.{JavaSerializer, JavaSerializerInstance, SerializationStream}
import org.apache.spark.util.{ByteBufferInputStream, ByteBufferOutputStream, ThreadUtils, Utils}
private[netty] class NettyRpcEnv(
val conf: SparkConf,
javaSerializerInstance: JavaSerializerInstance,
host: String,
securityManager: SecurityManager,
numUsableCores: Int) extends RpcEnv(conf) with Logging {
val role = conf.get(EXECUTOR_ID).map { id =>
if (id == SparkContext.DRIVER_IDENTIFIER) "driver" else "executor"
}
private[netty] val transportConf = SparkTransportConf.fromSparkConf(
conf.clone.set(RPC_IO_NUM_CONNECTIONS_PER_PEER, 1),
"rpc",
conf.get(RPC_IO_THREADS).getOrElse(numUsableCores),
role)
private val dispatcher: Dispatcher = new Dispatcher(this, numUsableCores)
private val streamManager = new NettyStreamManager(this)
private val transportContext = new TransportContext(transportConf,
new NettyRpcHandler(dispatcher, this, streamManager))
private def createClientBootstraps(): java.util.List[TransportClientBootstrap] = {
if (securityManager.isAuthenticationEnabled()) {
java.util.Arrays.asList(new AuthClientBootstrap(transportConf,
securityManager.getSaslUser(), securityManager))
} else {
java.util.Collections.emptyList[TransportClientBootstrap]
}
}
private val clientFactory = transportContext.createClientFactory(createClientBootstraps())
/**
* A separate client factory for file downloads. This avoids using the same RPC handler as
* the main RPC context, so that events caused by these clients are kept isolated from the
* main RPC traffic.
*
* It also allows for different configuration of certain properties, such as the number of
* connections per peer.
*/
@volatile private var fileDownloadFactory: TransportClientFactory = _
val timeoutScheduler = ThreadUtils.newDaemonSingleThreadScheduledExecutor("netty-rpc-env-timeout")
// Because TransportClientFactory.createClient is blocking, we need to run it in this thread pool
// to implement non-blocking send/ask.
// TODO: a non-blocking TransportClientFactory.createClient in future
private[netty] val clientConnectionExecutor = ThreadUtils.newDaemonCachedThreadPool(
"netty-rpc-connection",
conf.get(RPC_CONNECT_THREADS))
@volatile private var server: TransportServer = _
private val stopped = new AtomicBoolean(false)
/**
* A map for [[RpcAddress]] and [[Outbox]]. When we are connecting to a remote [[RpcAddress]],
* we just put messages to its [[Outbox]] to implement a non-blocking `send` method.
*/
private val outboxes = new ConcurrentHashMap[RpcAddress, Outbox]()
/**
* Remove the address's Outbox and stop it.
*/
private[netty] def removeOutbox(address: RpcAddress): Unit = {
val outbox = outboxes.remove(address)
if (outbox != null) {
outbox.stop()
}
}
def startServer(bindAddress: String, port: Int): Unit = {
val bootstraps: java.util.List[TransportServerBootstrap] =
if (securityManager.isAuthenticationEnabled()) {
java.util.Arrays.asList(new AuthServerBootstrap(transportConf, securityManager))
} else {
java.util.Collections.emptyList()
}
server = transportContext.createServer(bindAddress, port, bootstraps)
dispatcher.registerRpcEndpoint(
RpcEndpointVerifier.NAME, new RpcEndpointVerifier(this, dispatcher))
}
@Nullable
override lazy val address: RpcAddress = {
if (server != null) RpcAddress(host, server.getPort()) else null
}
override def setupEndpoint(name: String, endpoint: RpcEndpoint): RpcEndpointRef = {
dispatcher.registerRpcEndpoint(name, endpoint)
}
def asyncSetupEndpointRefByURI(uri: String): Future[RpcEndpointRef] = {
val addr = RpcEndpointAddress(uri)
val endpointRef = new NettyRpcEndpointRef(conf, addr, this)
val verifier = new NettyRpcEndpointRef(
conf, RpcEndpointAddress(addr.rpcAddress, RpcEndpointVerifier.NAME), this)
verifier.ask[Boolean](RpcEndpointVerifier.CheckExistence(endpointRef.name)).flatMap { find =>
if (find) {
Future.successful(endpointRef)
} else {
Future.failed(new RpcEndpointNotFoundException(uri))
}
}(ThreadUtils.sameThread)
}
override def stop(endpointRef: RpcEndpointRef): Unit = {
require(endpointRef.isInstanceOf[NettyRpcEndpointRef])
dispatcher.stop(endpointRef)
}
private def postToOutbox(receiver: NettyRpcEndpointRef, message: OutboxMessage): Unit = {
if (receiver.client != null) {
message.sendWith(receiver.client)
} else {
require(receiver.address != null,
"Cannot send message to client endpoint with no listen address.")
val targetOutbox = {
val outbox = outboxes.get(receiver.address)
if (outbox == null) {
val newOutbox = new Outbox(this, receiver.address)
val oldOutbox = outboxes.putIfAbsent(receiver.address, newOutbox)
if (oldOutbox == null) {
newOutbox
} else {
oldOutbox
}
} else {
outbox
}
}
if (stopped.get) {
// It's possible that we put `targetOutbox` after stopping. So we need to clean it.
outboxes.remove(receiver.address)
targetOutbox.stop()
} else {
targetOutbox.send(message)
}
}
}
private[netty] def send(message: RequestMessage): Unit = {
val remoteAddr = message.receiver.address
if (remoteAddr == address) {
// Message to a local RPC endpoint.
try {
dispatcher.postOneWayMessage(message)
} catch {
case e: RpcEnvStoppedException => logDebug(e.getMessage)
}
} else {
// Message to a remote RPC endpoint.
postToOutbox(message.receiver, OneWayOutboxMessage(message.serialize(this)))
}
}
private[netty] def createClient(address: RpcAddress): TransportClient = {
clientFactory.createClient(address.host, address.port)
}
private[netty] def askAbortable[T: ClassTag](
message: RequestMessage, timeout: RpcTimeout): AbortableRpcFuture[T] = {
val promise = Promise[Any]()
val remoteAddr = message.receiver.address
var rpcMsg: Option[RpcOutboxMessage] = None
def onFailure(e: Throwable): Unit = {
if (!promise.tryFailure(e)) {
e match {
case e : RpcEnvStoppedException => logDebug (s"Ignored failure: $e")
case _ => logWarning(s"Ignored failure: $e")
}
}
}
def onSuccess(reply: Any): Unit = reply match {
case RpcFailure(e) => onFailure(e)
case rpcReply =>
if (!promise.trySuccess(rpcReply)) {
logWarning(s"Ignored message: $reply")
}
}
def onAbort(t: Throwable): Unit = {
onFailure(t)
rpcMsg.foreach(_.onAbort())
}
try {
if (remoteAddr == address) {
val p = Promise[Any]()
p.future.onComplete {
case Success(response) => onSuccess(response)
case Failure(e) => onFailure(e)
}(ThreadUtils.sameThread)
dispatcher.postLocalMessage(message, p)
} else {
val rpcMessage = RpcOutboxMessage(message.serialize(this),
onFailure,
(client, response) => onSuccess(deserialize[Any](client, response)))
rpcMsg = Option(rpcMessage)
postToOutbox(message.receiver, rpcMessage)
promise.future.failed.foreach {
case _: TimeoutException => rpcMessage.onTimeout()
case _ =>
}(ThreadUtils.sameThread)
}
val timeoutCancelable = timeoutScheduler.schedule(new Runnable {
override def run(): Unit = {
val remoteReceAddr = if (remoteAddr == null) {
Try {
message.receiver.client.getChannel.remoteAddress()
}.toOption.orNull
} else {
remoteAddr
}
onFailure(new TimeoutException(s"Cannot receive any reply from ${remoteReceAddr} " +
s"in ${timeout.duration}"))
}
}, timeout.duration.toNanos, TimeUnit.NANOSECONDS)
promise.future.onComplete { v =>
timeoutCancelable.cancel(true)
}(ThreadUtils.sameThread)
} catch {
case NonFatal(e) =>
onFailure(e)
}
new AbortableRpcFuture[T](
promise.future.mapTo[T].recover(timeout.addMessageIfTimeout)(ThreadUtils.sameThread),
onAbort)
}
private[netty] def ask[T: ClassTag](message: RequestMessage, timeout: RpcTimeout): Future[T] = {
askAbortable(message, timeout).future
}
private[netty] def serialize(content: Any): ByteBuffer = {
javaSerializerInstance.serialize(content)
}
/**
* Returns [[SerializationStream]] that forwards the serialized bytes to `out`.
*/
private[netty] def serializeStream(out: OutputStream): SerializationStream = {
javaSerializerInstance.serializeStream(out)
}
private[netty] def deserialize[T: ClassTag](client: TransportClient, bytes: ByteBuffer): T = {
NettyRpcEnv.currentClient.withValue(client) {
deserialize { () =>
javaSerializerInstance.deserialize[T](bytes)
}
}
}
override def endpointRef(endpoint: RpcEndpoint): RpcEndpointRef = {
dispatcher.getRpcEndpointRef(endpoint)
}
override def shutdown(): Unit = {
cleanup()
}
override def awaitTermination(): Unit = {
dispatcher.awaitTermination()
}
private def cleanup(): Unit = {
if (!stopped.compareAndSet(false, true)) {
return
}
val iter = outboxes.values().iterator()
while (iter.hasNext()) {
val outbox = iter.next()
outboxes.remove(outbox.address)
outbox.stop()
}
if (timeoutScheduler != null) {
timeoutScheduler.shutdownNow()
}
if (dispatcher != null) {
dispatcher.stop()
}
if (server != null) {
server.close()
}
if (clientFactory != null) {
clientFactory.close()
}
if (clientConnectionExecutor != null) {
clientConnectionExecutor.shutdownNow()
}
if (fileDownloadFactory != null) {
fileDownloadFactory.close()
}
if (transportContext != null) {
transportContext.close()
}
}
override def deserialize[T](deserializationAction: () => T): T = {
NettyRpcEnv.currentEnv.withValue(this) {
deserializationAction()
}
}
override def fileServer: RpcEnvFileServer = streamManager
override def openChannel(uri: String): ReadableByteChannel = {
val parsedUri = new URI(uri)
require(parsedUri.getHost() != null, "Host name must be defined.")
require(parsedUri.getPort() > 0, "Port must be defined.")
require(parsedUri.getPath() != null && parsedUri.getPath().nonEmpty, "Path must be defined.")
val pipe = Pipe.open()
val source = new FileDownloadChannel(pipe.source())
Utils.tryWithSafeFinallyAndFailureCallbacks(block = {
val client = downloadClient(parsedUri.getHost(), parsedUri.getPort())
val callback = new FileDownloadCallback(pipe.sink(), source, client)
client.stream(parsedUri.getPath(), callback)
})(catchBlock = {
pipe.sink().close()
source.close()
})
source
}
private def downloadClient(host: String, port: Int): TransportClient = {
if (fileDownloadFactory == null) synchronized {
if (fileDownloadFactory == null) {
val module = "files"
val prefix = "spark.rpc.io."
val clone = conf.clone()
// Copy any RPC configuration that is not overridden in the spark.files namespace.
conf.getAll.foreach { case (key, value) =>
if (key.startsWith(prefix)) {
val opt = key.substring(prefix.length())
clone.setIfMissing(s"spark.$module.io.$opt", value)
}
}
val ioThreads = clone.getInt("spark.files.io.threads", 1)
val downloadConf = SparkTransportConf.fromSparkConf(clone, module, ioThreads)
val downloadContext = new TransportContext(downloadConf, new NoOpRpcHandler(), true)
fileDownloadFactory = downloadContext.createClientFactory(createClientBootstraps())
}
}
fileDownloadFactory.createClient(host, port)
}
private class FileDownloadChannel(source: Pipe.SourceChannel) extends ReadableByteChannel {
@volatile private var error: Throwable = _
def setError(e: Throwable): Unit = {
// This setError callback is invoked by internal RPC threads in order to propagate remote
// exceptions to application-level threads which are reading from this channel. When an
// RPC error occurs, the RPC system will call setError() and then will close the
// Pipe.SinkChannel corresponding to the other end of the `source` pipe. Closing of the pipe
// sink will cause `source.read()` operations to return EOF, unblocking the application-level
// reading thread. Thus there is no need to actually call `source.close()` here in the
// onError() callback and, in fact, calling it here would be dangerous because the close()
// would be asynchronous with respect to the read() call and could trigger race-conditions
// that lead to data corruption. See the PR for SPARK-22982 for more details on this topic.
error = e
}
override def read(dst: ByteBuffer): Int = {
Try(source.read(dst)) match {
// See the documentation above in setError(): if an RPC error has occurred then setError()
// will be called to propagate the RPC error and then `source`'s corresponding
// Pipe.SinkChannel will be closed, unblocking this read. In that case, we want to propagate
// the remote RPC exception (and not any exceptions triggered by the pipe close, such as
// ChannelClosedException), hence this `error != null` check:
case _ if error != null => throw error
case Success(bytesRead) => bytesRead
case Failure(readErr) => throw readErr
}
}
override def close(): Unit = source.close()
override def isOpen(): Boolean = source.isOpen()
}
private class FileDownloadCallback(
sink: WritableByteChannel,
source: FileDownloadChannel,
client: TransportClient) extends StreamCallback {
override def onData(streamId: String, buf: ByteBuffer): Unit = {
while (buf.remaining() > 0) {
sink.write(buf)
}
}
override def onComplete(streamId: String): Unit = {
sink.close()
}
override def onFailure(streamId: String, cause: Throwable): Unit = {
logDebug(s"Error downloading stream $streamId.", cause)
source.setError(cause)
sink.close()
}
}
}
private[netty] object NettyRpcEnv extends Logging {
/**
* When deserializing the [[NettyRpcEndpointRef]], it needs a reference to [[NettyRpcEnv]].
* Use `currentEnv` to wrap the deserialization codes. E.g.,
*
* {{{
* NettyRpcEnv.currentEnv.withValue(this) {
* your deserialization codes
* }
* }}}
*/
private[netty] val currentEnv = new DynamicVariable[NettyRpcEnv](null)
/**
* Similar to `currentEnv`, this variable references the client instance associated with an
* RPC, in case it's needed to find out the remote address during deserialization.
*/
private[netty] val currentClient = new DynamicVariable[TransportClient](null)
}
private[rpc] class NettyRpcEnvFactory extends RpcEnvFactory with Logging {
def create(config: RpcEnvConfig): RpcEnv = {
val sparkConf = config.conf
// Use JavaSerializerInstance in multiple threads is safe. However, if we plan to support
// KryoSerializer in future, we have to use ThreadLocal to store SerializerInstance
val javaSerializerInstance =
new JavaSerializer(sparkConf).newInstance().asInstanceOf[JavaSerializerInstance]
val nettyEnv =
new NettyRpcEnv(sparkConf, javaSerializerInstance, config.advertiseAddress,
config.securityManager, config.numUsableCores)
if (!config.clientMode) {
val startNettyRpcEnv: Int => (NettyRpcEnv, Int) = { actualPort =>
nettyEnv.startServer(config.bindAddress, actualPort)
(nettyEnv, nettyEnv.address.port)
}
try {
Utils.startServiceOnPort(config.port, startNettyRpcEnv, sparkConf, config.name)._1
} catch {
case NonFatal(e) =>
nettyEnv.shutdown()
throw e
}
}
nettyEnv
}
}
/**
* The NettyRpcEnv version of RpcEndpointRef.
*
* This class behaves differently depending on where it's created. On the node that "owns" the
* RpcEndpoint, it's a simple wrapper around the RpcEndpointAddress instance.
*
* On other machines that receive a serialized version of the reference, the behavior changes. The
* instance will keep track of the TransportClient that sent the reference, so that messages
* to the endpoint are sent over the client connection, instead of needing a new connection to
* be opened.
*
* The RpcAddress of this ref can be null; what that means is that the ref can only be used through
* a client connection, since the process hosting the endpoint is not listening for incoming
* connections. These refs should not be shared with 3rd parties, since they will not be able to
* send messages to the endpoint.
*
* @param conf Spark configuration.
* @param endpointAddress The address where the endpoint is listening.
* @param nettyEnv The RpcEnv associated with this ref.
*/
private[netty] class NettyRpcEndpointRef(
@transient private val conf: SparkConf,
private val endpointAddress: RpcEndpointAddress,
@transient @volatile private var nettyEnv: NettyRpcEnv) extends RpcEndpointRef(conf) {
@transient @volatile var client: TransportClient = _
override def address: RpcAddress =
if (endpointAddress.rpcAddress != null) endpointAddress.rpcAddress else null
private def readObject(in: ObjectInputStream): Unit = {
in.defaultReadObject()
nettyEnv = NettyRpcEnv.currentEnv.value
client = NettyRpcEnv.currentClient.value
}
private def writeObject(out: ObjectOutputStream): Unit = {
out.defaultWriteObject()
}
override def name: String = endpointAddress.name
override def askAbortable[T: ClassTag](
message: Any, timeout: RpcTimeout): AbortableRpcFuture[T] = {
nettyEnv.askAbortable(new RequestMessage(nettyEnv.address, this, message), timeout)
}
override def ask[T: ClassTag](message: Any, timeout: RpcTimeout): Future[T] = {
askAbortable(message, timeout).future
}
override def send(message: Any): Unit = {
require(message != null, "Message is null")
nettyEnv.send(new RequestMessage(nettyEnv.address, this, message))
}
override def toString: String = s"NettyRpcEndpointRef(${endpointAddress})"
final override def equals(that: Any): Boolean = that match {
case other: NettyRpcEndpointRef => endpointAddress == other.endpointAddress
case _ => false
}
final override def hashCode(): Int =
if (endpointAddress == null) 0 else endpointAddress.hashCode()
}
/**
* The message that is sent from the sender to the receiver.
*
* @param senderAddress the sender address. It's `null` if this message is from a client
* `NettyRpcEnv`.
* @param receiver the receiver of this message.
* @param content the message content.
*/
private[netty] class RequestMessage(
val senderAddress: RpcAddress,
val receiver: NettyRpcEndpointRef,
val content: Any) {
/** Manually serialize [[RequestMessage]] to minimize the size. */
def serialize(nettyEnv: NettyRpcEnv): ByteBuffer = {
val bos = new ByteBufferOutputStream()
val out = new DataOutputStream(bos)
try {
writeRpcAddress(out, senderAddress)
writeRpcAddress(out, receiver.address)
out.writeUTF(receiver.name)
val s = nettyEnv.serializeStream(out)
try {
s.writeObject(content)
} finally {
s.close()
}
} finally {
out.close()
}
bos.toByteBuffer
}
private def writeRpcAddress(out: DataOutputStream, rpcAddress: RpcAddress): Unit = {
if (rpcAddress == null) {
out.writeBoolean(false)
} else {
out.writeBoolean(true)
out.writeUTF(rpcAddress.host)
out.writeInt(rpcAddress.port)
}
}
override def toString: String = s"RequestMessage($senderAddress, $receiver, $content)"
}
private[netty] object RequestMessage {
private def readRpcAddress(in: DataInputStream): RpcAddress = {
val hasRpcAddress = in.readBoolean()
if (hasRpcAddress) {
RpcAddress(in.readUTF(), in.readInt())
} else {
null
}
}
def apply(nettyEnv: NettyRpcEnv, client: TransportClient, bytes: ByteBuffer): RequestMessage = {
val bis = new ByteBufferInputStream(bytes)
val in = new DataInputStream(bis)
try {
val senderAddress = readRpcAddress(in)
val endpointAddress = RpcEndpointAddress(readRpcAddress(in), in.readUTF())
val ref = new NettyRpcEndpointRef(nettyEnv.conf, endpointAddress, nettyEnv)
ref.client = client
new RequestMessage(
senderAddress,
ref,
// The remaining bytes in `bytes` are the message content.
nettyEnv.deserialize(client, bytes))
} finally {
in.close()
}
}
}
/**
* A response that indicates some failure happens in the receiver side.
*/
private[netty] case class RpcFailure(e: Throwable)
/**
* Dispatches incoming RPCs to registered endpoints.
*
* The handler keeps track of all client instances that communicate with it, so that the RpcEnv
* knows which `TransportClient` instance to use when sending RPCs to a client endpoint (i.e.,
* one that is not listening for incoming connections, but rather needs to be contacted via the
* client socket).
*
* Events are sent on a per-connection basis, so if a client opens multiple connections to the
* RpcEnv, multiple connection / disconnection events will be created for that client (albeit
* with different `RpcAddress` information).
*/
private[netty] class NettyRpcHandler(
dispatcher: Dispatcher,
nettyEnv: NettyRpcEnv,
streamManager: StreamManager) extends RpcHandler with Logging {
// A variable to track the remote RpcEnv addresses of all clients
private val remoteAddresses = new ConcurrentHashMap[RpcAddress, RpcAddress]()
override def receive(
client: TransportClient,
message: ByteBuffer,
callback: RpcResponseCallback): Unit = {
val messageToDispatch = internalReceive(client, message)
dispatcher.postRemoteMessage(messageToDispatch, callback)
}
override def receive(
client: TransportClient,
message: ByteBuffer): Unit = {
val messageToDispatch = internalReceive(client, message)
dispatcher.postOneWayMessage(messageToDispatch)
}
private def internalReceive(client: TransportClient, message: ByteBuffer): RequestMessage = {
val addr = client.getChannel().remoteAddress().asInstanceOf[InetSocketAddress]
assert(addr != null)
val clientAddr = RpcAddress(addr.getHostString, addr.getPort)
val requestMessage = RequestMessage(nettyEnv, client, message)
if (requestMessage.senderAddress == null) {
// Create a new message with the socket address of the client as the sender.
new RequestMessage(clientAddr, requestMessage.receiver, requestMessage.content)
} else {
// The remote RpcEnv listens to some port, we should also fire a RemoteProcessConnected for
// the listening address
val remoteEnvAddress = requestMessage.senderAddress
if (remoteAddresses.putIfAbsent(clientAddr, remoteEnvAddress) == null) {
dispatcher.postToAll(RemoteProcessConnected(remoteEnvAddress))
}
requestMessage
}
}
override def getStreamManager: StreamManager = streamManager
override def exceptionCaught(cause: Throwable, client: TransportClient): Unit = {
val addr = client.getChannel.remoteAddress().asInstanceOf[InetSocketAddress]
if (addr != null) {
val clientAddr = RpcAddress(addr.getHostString, addr.getPort)
dispatcher.postToAll(RemoteProcessConnectionError(cause, clientAddr))
// If the remove RpcEnv listens to some address, we should also fire a
// RemoteProcessConnectionError for the remote RpcEnv listening address
val remoteEnvAddress = remoteAddresses.get(clientAddr)
if (remoteEnvAddress != null) {
dispatcher.postToAll(RemoteProcessConnectionError(cause, remoteEnvAddress))
}
} else {
// If the channel is closed before connecting, its remoteAddress will be null.
// See java.net.Socket.getRemoteSocketAddress
// Because we cannot get a RpcAddress, just log it
logError("Exception before connecting to the client", cause)
}
}
override def channelActive(client: TransportClient): Unit = {
val addr = client.getChannel().remoteAddress().asInstanceOf[InetSocketAddress]
assert(addr != null)
val clientAddr = RpcAddress(addr.getHostString, addr.getPort)
dispatcher.postToAll(RemoteProcessConnected(clientAddr))
}
override def channelInactive(client: TransportClient): Unit = {
val addr = client.getChannel.remoteAddress().asInstanceOf[InetSocketAddress]
if (addr != null) {
val clientAddr = RpcAddress(addr.getHostString, addr.getPort)
nettyEnv.removeOutbox(clientAddr)
dispatcher.postToAll(RemoteProcessDisconnected(clientAddr))
val remoteEnvAddress = remoteAddresses.remove(clientAddr)
// If the remove RpcEnv listens to some address, we should also fire a
// RemoteProcessDisconnected for the remote RpcEnv listening address
if (remoteEnvAddress != null) {
dispatcher.postToAll(RemoteProcessDisconnected(remoteEnvAddress))
}
} else {
// If the channel is closed before connecting, its remoteAddress will be null. In this case,
// we can ignore it since we don't fire "Associated".
// See java.net.Socket.getRemoteSocketAddress
}
}
}
| spark-test/spark | core/src/main/scala/org/apache/spark/rpc/netty/NettyRpcEnv.scala | Scala | apache-2.0 | 27,725 |
package co.spendabit.webapp.forms.v3.controls
import co.spendabit.webapp.forms.util.withAttr
object Checkbox extends TextBasedInput[Boolean] {
override def html(value: Option[Boolean] = None): xml.NodeSeq = {
// val cb = <input type="checkbox" name={ name } />
val cb = <input type="checkbox" />
if (value.contains(true)) withAttr(cb, "checked", "checked") else cb
}
def validate(s: String): Either[String, Boolean] =
Right(s == "on")
// def validate(params: Map[String, Seq[String]]): Either[String, Boolean] =
// Right(
// params.get(name) match {
// case Some(Seq("on")) => true
// case _ => false
// })
}
| spendabit/webapp-tools | src/co/spendabit/webapp/forms/v3/controls/Checkbox.scala | Scala | unlicense | 676 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.example.udfpredictor
import java.io.{File, InputStream, PrintWriter}
import com.intel.analytics.bigdl.example.utils.WordMeta
import com.intel.analytics.bigdl.example.utils.TextClassifier
import com.intel.analytics.bigdl.models.utils.ModelBroadcast
import com.intel.analytics.bigdl.nn.abstractnn.{AbstractModule, Activity}
import com.intel.analytics.bigdl.tensor.{Storage, Tensor}
import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.nn.Module
import org.apache.spark.SparkContext
import scala.io.Source
import scopt.OptionParser
object Utils {
type Model = AbstractModule[Activity, Activity, Float]
type Word2Meta = Map[String, WordMeta]
type Word2Index = Map[String, Int]
type Word2Vec = Map[Float, Array[Float]]
type SampleShape = Array[Int]
type TFP = TextClassificationUDFParams
case class Sample(filename: String, text: String)
private var textClassification: TextClassifier = null
def getTextClassifier(param: TFP): TextClassifier = {
if (textClassification == null) {
textClassification = new TextClassifier(param)
}
textClassification
}
def getModel(sc: SparkContext, param: TFP): (Model, Option[Word2Meta],
Option[Word2Vec], SampleShape) = {
val textClassification = getTextClassifier(param)
if (param.modelPath.isDefined) {
(Module.load[Float](param.modelPath.get),
None,
None,
Array(param.maxSequenceLength, param.embeddingDim))
} else {
// get train and validation rdds
val (rdds, word2Meta, word2Vec) = textClassification.getData(sc)
// save word2Meta for later generate vectors
val word2Index = word2Meta.mapValues[Int]((wordMeta: WordMeta) => wordMeta.index)
sc.parallelize(word2Index.toSeq).saveAsTextFile(s"${param.baseDir}/word2Meta.txt")
// train
val trainedModel = textClassification.trainFromData(sc, rdds)
// after training, save model
if (param.checkpoint.isDefined) {
trainedModel.save(s"${param.checkpoint.get}/model.1", overWrite = true)
}
(trainedModel.evaluate(),
Some(word2Meta),
Some(word2Vec),
Array(param.maxSequenceLength, param.embeddingDim))
}
}
def getWord2Vec(word2Index: Map[String, Int]): Map[Float, Array[Float]] = {
val word2Vec = textClassification.buildWord2VecWithIndex(word2Index)
word2Vec
}
def genUdf(sc: SparkContext,
model: Model,
sampleShape: Array[Int],
word2Index: Word2Index,
word2Vec: Word2Vec)
(implicit ev: TensorNumeric[Float]): (String) => Int = {
val broadcastModel = ModelBroadcast[Float].broadcast(sc, model)
val word2IndexBC = sc.broadcast(word2Index)
val word2VecBC = sc.broadcast(word2Vec)
val udf = (text: String) => {
val sequenceLen = sampleShape(0)
val embeddingDim = sampleShape(1)
val word2Meta = word2IndexBC.value
val word2Vec = word2VecBC.value
// first to tokens
val tokens = text.replaceAll("[^a-zA-Z]", " ")
.toLowerCase().split("\\s+").filter(_.length > 2).map { word: String =>
if (word2Meta.contains(word)) {
Some(word2Meta(word).toFloat)
} else {
None
}
}.flatten
// shaping
val paddedTokens = if (tokens.length > sequenceLen) {
tokens.slice(tokens.length - sequenceLen, tokens.length)
} else {
tokens ++ Array.fill[Float](sequenceLen - tokens.length)(0)
}
val data = paddedTokens.map { word: Float =>
if (word2Vec.contains(word)) {
word2Vec(word)
} else {
// Treat it as zeros if cannot be found from pre-trained word2Vec
Array.fill[Float](embeddingDim)(0)
}
}.flatten
val featureTensor: Tensor[Float] = Tensor[Float]()
var featureData: Array[Float] = null
val sampleSize = sampleShape.product
val localModel = broadcastModel.value
// create tensor from input column
if (featureData == null) {
featureData = new Array[Float](1 * sampleSize)
}
Array.copy(data.map(ev.fromType(_)), 0,
featureData, 0, sampleSize)
featureTensor.set(Storage[Float](featureData), sizes = Array(1) ++ sampleShape)
val tensorBuffer = featureTensor.transpose(2, 3)
// predict
val output = localModel.forward(tensorBuffer).toTensor[Float]
val predict = if (output.dim == 2) {
output.max(2)._2.squeeze().storage().array()
} else if (output.dim == 1) {
output.max(1)._2.squeeze().storage().array()
} else {
throw new IllegalArgumentException
}
ev.toType[Int](predict(0))
}
udf
}
def loadTestData(testDir: String): IndexedSeq[Sample] = {
val fileList = new File(testDir).listFiles()
.filter(_.isFile).filter(_.getName.forall(Character.isDigit)).sorted
val testData = fileList.map { file => {
val fileName = file.getName
val source = Source.fromFile(file, "ISO-8859-1")
val text = try source.getLines().toList.mkString("\n") finally source.close()
Sample(fileName, text)
}
}
testData
}
def getResourcePath(resource: String): String = {
val stream: InputStream = getClass.getResourceAsStream(resource)
val lines = scala.io.Source.fromInputStream(stream).mkString
val file = File.createTempFile(resource, "")
val pw = new PrintWriter(file)
pw.write(lines)
pw.close()
file.getAbsolutePath
}
val localParser = new OptionParser[TextClassificationUDFParams]("BigDL Example") {
opt[String]('b', "baseDir")
.text("Base dir containing the training and word2Vec data")
.action((x, c) => c.copy(baseDir = x))
opt[String]('p', "partitionNum")
.text("you may want to tune the partitionNum if run into spark mode")
.action((x, c) => c.copy(partitionNum = x.toInt))
opt[String]('s', "maxSequenceLength")
.text("maxSequenceLength")
.action((x, c) => c.copy(maxSequenceLength = x.toInt))
opt[String]('w', "maxWordsNum")
.text("maxWordsNum")
.action((x, c) => c.copy(maxWordsNum = x.toInt))
opt[String]('l', "trainingSplit")
.text("trainingSplit")
.action((x, c) => c.copy(trainingSplit = x.toDouble))
opt[String]('z', "batchSize")
.text("batchSize")
.action((x, c) => c.copy(batchSize = x.toInt))
opt[String]("modelPath")
.text("where to load the model")
.action((x, c) => c.copy(modelPath = Some(x)))
opt[String]("checkpoint")
.text("where to load the model")
.action((x, c) => c.copy(checkpoint = Some(x)))
opt[String]('f', "dataDir")
.text("Text dir containing the text data")
.action((x, c) => c.copy(testDir = x))
}
val parquetProducerParser
= new OptionParser[TextProducerParquetParams]("BigDL Streaming Example") {
opt[String]('s', "srcFolder")
.required()
.text("Base dir containing the text data")
.action((x, c) => c.copy(srcFolder = x))
opt[String]('d', "destFolder")
.required()
.text("Destination parquet dir containing the text data")
.action((x, c) => c.copy(destFolder = x))
opt[Int]('b', "batchsize")
.text("produce batchsize")
.action((x, c) => c.copy(batchsize = x))
opt[Long]('i', "interval")
.text("produce interval")
.action((x, c) => c.copy(interval = x))
}
}
| psyyz10/BigDL | spark/dl/src/main/scala/com/intel/analytics/bigdl/example/udfpredictor/Utils.scala | Scala | apache-2.0 | 8,085 |
import scala.reflect.runtime.{universe => ru, currentMirror}
import ru._
object SCL5592 {
def testFun(symbol :ClassSymbol) {
/*start*/currentMirror.runtimeClass(symbol)/*end*/
}
}
//runtime.universe.RuntimeClass | triggerNZ/intellij-scala | testdata/typeInference/scalaReflect/SCL5592.scala | Scala | apache-2.0 | 220 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.codegen.calls
import java.lang.reflect.Method
import org.apache.calcite.avatica.util.TimeUnit
import org.apache.calcite.avatica.util.TimeUnitRange
import org.apache.calcite.sql.SqlOperator
import org.apache.calcite.sql.fun.SqlStdOperatorTable._
import org.apache.calcite.sql.fun.SqlTrimFunction
import org.apache.calcite.util.BuiltInMethod
import org.apache.flink.api.common.typeinfo.BasicTypeInfo._
import org.apache.flink.api.common.typeinfo.{BasicTypeInfo, SqlTimeTypeInfo, TypeInformation}
import org.apache.flink.api.java.typeutils.GenericTypeInfo
import org.apache.flink.table.functions.sql.ScalarSqlFunctions
import org.apache.flink.table.functions.sql.ScalarSqlFunctions._
import org.apache.flink.table.functions.utils.{ScalarSqlFunction, TableSqlFunction}
import org.apache.flink.table.typeutils.TimeIntervalTypeInfo
import scala.collection.mutable
/**
* Global hub for user-defined and built-in advanced SQL functions.
*/
object FunctionGenerator {
private val sqlFunctions: mutable.Map[(SqlOperator, Seq[TypeInformation[_]]), CallGenerator] =
mutable.Map()
// ----------------------------------------------------------------------------------------------
// String functions
// ----------------------------------------------------------------------------------------------
addSqlFunctionMethod(
SUBSTRING,
Seq(STRING_TYPE_INFO, INT_TYPE_INFO, INT_TYPE_INFO),
STRING_TYPE_INFO,
BuiltInMethod.SUBSTRING.method)
addSqlFunctionMethod(
SUBSTRING,
Seq(STRING_TYPE_INFO, INT_TYPE_INFO),
STRING_TYPE_INFO,
BuiltInMethod.SUBSTRING.method)
addSqlFunction(
TRIM,
Seq(new GenericTypeInfo(classOf[SqlTrimFunction.Flag]), STRING_TYPE_INFO, STRING_TYPE_INFO),
new TrimCallGen())
addSqlFunctionMethod(
CHAR_LENGTH,
Seq(STRING_TYPE_INFO),
INT_TYPE_INFO,
BuiltInMethod.CHAR_LENGTH.method)
addSqlFunctionMethod(
CHARACTER_LENGTH,
Seq(STRING_TYPE_INFO),
INT_TYPE_INFO,
BuiltInMethod.CHAR_LENGTH.method)
addSqlFunctionMethod(
UPPER,
Seq(STRING_TYPE_INFO),
STRING_TYPE_INFO,
BuiltInMethod.UPPER.method)
addSqlFunctionMethod(
LOWER,
Seq(STRING_TYPE_INFO),
STRING_TYPE_INFO,
BuiltInMethod.LOWER.method)
addSqlFunctionMethod(
INITCAP,
Seq(STRING_TYPE_INFO),
STRING_TYPE_INFO,
BuiltInMethod.INITCAP.method)
addSqlFunctionMethod(
LIKE,
Seq(STRING_TYPE_INFO, STRING_TYPE_INFO),
BOOLEAN_TYPE_INFO,
BuiltInMethod.LIKE.method)
addSqlFunctionMethod(
LIKE,
Seq(STRING_TYPE_INFO, STRING_TYPE_INFO, STRING_TYPE_INFO),
BOOLEAN_TYPE_INFO,
BuiltInMethods.LIKE_WITH_ESCAPE)
addSqlFunctionNotMethod(
NOT_LIKE,
Seq(STRING_TYPE_INFO, STRING_TYPE_INFO),
BuiltInMethod.LIKE.method)
addSqlFunctionMethod(
SIMILAR_TO,
Seq(STRING_TYPE_INFO, STRING_TYPE_INFO),
BOOLEAN_TYPE_INFO,
BuiltInMethod.SIMILAR.method)
addSqlFunctionMethod(
SIMILAR_TO,
Seq(STRING_TYPE_INFO, STRING_TYPE_INFO, STRING_TYPE_INFO),
BOOLEAN_TYPE_INFO,
BuiltInMethods.SIMILAR_WITH_ESCAPE)
addSqlFunctionNotMethod(
NOT_SIMILAR_TO,
Seq(STRING_TYPE_INFO, STRING_TYPE_INFO),
BuiltInMethod.SIMILAR.method)
addSqlFunctionMethod(
POSITION,
Seq(STRING_TYPE_INFO, STRING_TYPE_INFO),
INT_TYPE_INFO,
BuiltInMethod.POSITION.method)
addSqlFunctionMethod(
OVERLAY,
Seq(STRING_TYPE_INFO, STRING_TYPE_INFO, INT_TYPE_INFO),
STRING_TYPE_INFO,
BuiltInMethod.OVERLAY.method)
addSqlFunctionMethod(
OVERLAY,
Seq(STRING_TYPE_INFO, STRING_TYPE_INFO, INT_TYPE_INFO, INT_TYPE_INFO),
STRING_TYPE_INFO,
BuiltInMethod.OVERLAY.method)
addSqlFunctionMethod(
REGEXP_REPLACE,
Seq(STRING_TYPE_INFO, STRING_TYPE_INFO, STRING_TYPE_INFO),
STRING_TYPE_INFO,
BuiltInMethods.REGEXP_REPLACE)
addSqlFunctionMethod(
REPLACE,
Seq(STRING_TYPE_INFO, STRING_TYPE_INFO, STRING_TYPE_INFO),
STRING_TYPE_INFO,
BuiltInMethod.REPLACE.method)
addSqlFunctionMethod(
REGEXP_EXTRACT,
Seq(STRING_TYPE_INFO, STRING_TYPE_INFO, INT_TYPE_INFO),
STRING_TYPE_INFO,
BuiltInMethods.REGEXP_EXTRACT)
addSqlFunctionMethod(
REGEXP_EXTRACT,
Seq(STRING_TYPE_INFO, STRING_TYPE_INFO),
STRING_TYPE_INFO,
BuiltInMethods.REGEXP_EXTRACT_WITHOUT_INDEX)
addSqlFunctionMethod(
FROM_BASE64,
Seq(STRING_TYPE_INFO),
STRING_TYPE_INFO,
BuiltInMethods.FROMBASE64)
addSqlFunctionMethod(
TO_BASE64,
Seq(STRING_TYPE_INFO),
STRING_TYPE_INFO,
BuiltInMethods.TOBASE64)
addSqlFunctionMethod(
UUID,
Seq(),
STRING_TYPE_INFO,
BuiltInMethods.UUID)
addSqlFunctionMethod(
LTRIM,
Seq(STRING_TYPE_INFO),
STRING_TYPE_INFO,
BuiltInMethod.LTRIM.method)
addSqlFunctionMethod(
RTRIM,
Seq(STRING_TYPE_INFO),
STRING_TYPE_INFO,
BuiltInMethod.RTRIM.method)
addSqlFunctionMethod(
REPEAT,
Seq(STRING_TYPE_INFO, INT_TYPE_INFO),
STRING_TYPE_INFO,
BuiltInMethods.REPEAT)
// ----------------------------------------------------------------------------------------------
// Arithmetic functions
// ----------------------------------------------------------------------------------------------
addSqlFunctionMethod(
LOG10,
Seq(DOUBLE_TYPE_INFO),
DOUBLE_TYPE_INFO,
BuiltInMethods.LOG10)
addSqlFunctionMethod(
LOG2,
Seq(DOUBLE_TYPE_INFO),
DOUBLE_TYPE_INFO,
BuiltInMethods.LOG2)
addSqlFunctionMethod(
COSH,
Seq(DOUBLE_TYPE_INFO),
DOUBLE_TYPE_INFO,
BuiltInMethods.COSH)
addSqlFunctionMethod(
COSH,
Seq(BIG_DEC_TYPE_INFO),
DOUBLE_TYPE_INFO,
BuiltInMethods.COSH_DEC)
addSqlFunctionMethod(
LN,
Seq(DOUBLE_TYPE_INFO),
DOUBLE_TYPE_INFO,
BuiltInMethods.LN)
addSqlFunctionMethod(
SINH,
Seq(DOUBLE_TYPE_INFO),
DOUBLE_TYPE_INFO,
BuiltInMethods.SINH)
addSqlFunctionMethod(
SINH,
Seq(BIG_DEC_TYPE_INFO),
DOUBLE_TYPE_INFO,
BuiltInMethods.SINH_DEC)
addSqlFunctionMethod(
EXP,
Seq(DOUBLE_TYPE_INFO),
DOUBLE_TYPE_INFO,
BuiltInMethods.EXP)
addSqlFunctionMethod(
POWER,
Seq(DOUBLE_TYPE_INFO, DOUBLE_TYPE_INFO),
DOUBLE_TYPE_INFO,
BuiltInMethods.POWER)
addSqlFunctionMethod(
POWER,
Seq(DOUBLE_TYPE_INFO, BIG_DEC_TYPE_INFO),
DOUBLE_TYPE_INFO,
BuiltInMethods.POWER_DEC)
addSqlFunctionMethod(
POWER,
Seq(BIG_DEC_TYPE_INFO, BIG_DEC_TYPE_INFO),
DOUBLE_TYPE_INFO,
BuiltInMethods.POWER_DEC_DEC)
addSqlFunction(
ABS,
Seq(DOUBLE_TYPE_INFO),
new MultiTypeMethodCallGen(BuiltInMethods.ABS))
addSqlFunction(
ABS,
Seq(BIG_DEC_TYPE_INFO),
new MultiTypeMethodCallGen(BuiltInMethods.ABS_DEC))
addSqlFunction(
FLOOR,
Seq(DOUBLE_TYPE_INFO),
new FloorCeilCallGen(BuiltInMethod.FLOOR.method))
addSqlFunction(
FLOOR,
Seq(BIG_DEC_TYPE_INFO),
new FloorCeilCallGen(BuiltInMethod.FLOOR.method))
addSqlFunction(
CEIL,
Seq(DOUBLE_TYPE_INFO),
new FloorCeilCallGen(BuiltInMethod.CEIL.method))
addSqlFunction(
CEIL,
Seq(BIG_DEC_TYPE_INFO),
new FloorCeilCallGen(BuiltInMethod.CEIL.method))
addSqlFunctionMethod(
SIN,
Seq(DOUBLE_TYPE_INFO),
DOUBLE_TYPE_INFO,
BuiltInMethods.SIN)
addSqlFunctionMethod(
SIN,
Seq(BIG_DEC_TYPE_INFO),
DOUBLE_TYPE_INFO,
BuiltInMethods.SIN_DEC)
addSqlFunctionMethod(
COS,
Seq(DOUBLE_TYPE_INFO),
DOUBLE_TYPE_INFO,
BuiltInMethods.COS)
addSqlFunctionMethod(
COS,
Seq(BIG_DEC_TYPE_INFO),
DOUBLE_TYPE_INFO,
BuiltInMethods.COS_DEC)
addSqlFunctionMethod(
TAN,
Seq(DOUBLE_TYPE_INFO),
DOUBLE_TYPE_INFO,
BuiltInMethods.TAN)
addSqlFunctionMethod(
TAN,
Seq(BIG_DEC_TYPE_INFO),
DOUBLE_TYPE_INFO,
BuiltInMethods.TAN_DEC)
addSqlFunctionMethod(
TANH,
Seq(DOUBLE_TYPE_INFO),
DOUBLE_TYPE_INFO,
BuiltInMethods.TANH)
addSqlFunctionMethod(
TANH,
Seq(BIG_DEC_TYPE_INFO),
DOUBLE_TYPE_INFO,
BuiltInMethods.TANH_DEC)
addSqlFunctionMethod(
COT,
Seq(DOUBLE_TYPE_INFO),
DOUBLE_TYPE_INFO,
BuiltInMethods.COT)
addSqlFunctionMethod(
COT,
Seq(BIG_DEC_TYPE_INFO),
DOUBLE_TYPE_INFO,
BuiltInMethods.COT_DEC)
addSqlFunctionMethod(
ASIN,
Seq(DOUBLE_TYPE_INFO),
DOUBLE_TYPE_INFO,
BuiltInMethods.ASIN)
addSqlFunctionMethod(
ASIN,
Seq(BIG_DEC_TYPE_INFO),
DOUBLE_TYPE_INFO,
BuiltInMethods.ASIN_DEC)
addSqlFunctionMethod(
ACOS,
Seq(DOUBLE_TYPE_INFO),
DOUBLE_TYPE_INFO,
BuiltInMethods.ACOS)
addSqlFunctionMethod(
ACOS,
Seq(BIG_DEC_TYPE_INFO),
DOUBLE_TYPE_INFO,
BuiltInMethods.ACOS_DEC)
addSqlFunctionMethod(
ATAN,
Seq(DOUBLE_TYPE_INFO),
DOUBLE_TYPE_INFO,
BuiltInMethods.ATAN)
addSqlFunctionMethod(
ATAN,
Seq(BIG_DEC_TYPE_INFO),
DOUBLE_TYPE_INFO,
BuiltInMethods.ATAN_DEC)
addSqlFunctionMethod(
ATAN2,
Seq(DOUBLE_TYPE_INFO, DOUBLE_TYPE_INFO),
DOUBLE_TYPE_INFO,
BuiltInMethods.ATAN2_DOUBLE_DOUBLE)
addSqlFunctionMethod(
ATAN2,
Seq(BIG_DEC_TYPE_INFO, BIG_DEC_TYPE_INFO),
DOUBLE_TYPE_INFO,
BuiltInMethods.ATAN2_DEC_DEC)
addSqlFunctionMethod(
DEGREES,
Seq(DOUBLE_TYPE_INFO),
DOUBLE_TYPE_INFO,
BuiltInMethods.DEGREES)
addSqlFunctionMethod(
DEGREES,
Seq(BIG_DEC_TYPE_INFO),
DOUBLE_TYPE_INFO,
BuiltInMethods.DEGREES_DEC)
addSqlFunctionMethod(
RADIANS,
Seq(DOUBLE_TYPE_INFO),
DOUBLE_TYPE_INFO,
BuiltInMethods.RADIANS)
addSqlFunctionMethod(
RADIANS,
Seq(BIG_DEC_TYPE_INFO),
DOUBLE_TYPE_INFO,
BuiltInMethods.RADIANS_DEC)
addSqlFunctionMethod(
SIGN,
Seq(DOUBLE_TYPE_INFO),
DOUBLE_TYPE_INFO,
BuiltInMethods.SIGN_DOUBLE)
addSqlFunctionMethod(
SIGN,
Seq(INT_TYPE_INFO),
INT_TYPE_INFO,
BuiltInMethods.SIGN_INT)
addSqlFunctionMethod(
SIGN,
Seq(LONG_TYPE_INFO),
LONG_TYPE_INFO,
BuiltInMethods.SIGN_LONG)
addSqlFunctionMethod(
SIGN,
Seq(BIG_DEC_TYPE_INFO),
BIG_DEC_TYPE_INFO,
BuiltInMethods.SIGN_DEC)
addSqlFunctionMethod(
ROUND,
Seq(LONG_TYPE_INFO, INT_TYPE_INFO),
LONG_TYPE_INFO,
BuiltInMethods.ROUND_LONG)
addSqlFunctionMethod(
ROUND,
Seq(INT_TYPE_INFO, INT_TYPE_INFO),
INT_TYPE_INFO,
BuiltInMethods.ROUND_INT)
addSqlFunctionMethod(
ROUND,
Seq(BIG_DEC_TYPE_INFO, INT_TYPE_INFO),
BIG_DEC_TYPE_INFO,
BuiltInMethods.ROUND_DEC)
addSqlFunctionMethod(
ROUND,
Seq(DOUBLE_TYPE_INFO, INT_TYPE_INFO),
DOUBLE_TYPE_INFO,
BuiltInMethods.ROUND_DOUBLE)
addSqlFunction(
PI,
Seq(),
new ConstantCallGen(DOUBLE_TYPE_INFO, Math.PI.toString))
addSqlFunction(
E,
Seq(),
new ConstantCallGen(DOUBLE_TYPE_INFO, Math.E.toString))
addSqlFunction(
RAND,
Seq(),
new RandCallGen(isRandInteger = false, hasSeed = false))
addSqlFunction(
RAND,
Seq(INT_TYPE_INFO),
new RandCallGen(isRandInteger = false, hasSeed = true))
addSqlFunction(
RAND_INTEGER,
Seq(INT_TYPE_INFO),
new RandCallGen(isRandInteger = true, hasSeed = false))
addSqlFunction(
RAND_INTEGER,
Seq(INT_TYPE_INFO, INT_TYPE_INFO),
new RandCallGen(isRandInteger = true, hasSeed = true))
addSqlFunctionMethod(
ScalarSqlFunctions.LOG,
Seq(DOUBLE_TYPE_INFO),
DOUBLE_TYPE_INFO,
BuiltInMethods.LOG)
addSqlFunctionMethod(
ScalarSqlFunctions.LOG,
Seq(DOUBLE_TYPE_INFO, DOUBLE_TYPE_INFO),
DOUBLE_TYPE_INFO,
BuiltInMethods.LOG_WITH_BASE)
addSqlFunction(
ScalarSqlFunctions.E,
Seq(),
new ConstantCallGen(DOUBLE_TYPE_INFO, Math.E.toString))
addSqlFunctionMethod(
ScalarSqlFunctions.BIN,
Seq(LONG_TYPE_INFO),
STRING_TYPE_INFO,
BuiltInMethods.BIN)
addSqlFunctionMethod(
ScalarSqlFunctions.HEX,
Seq(LONG_TYPE_INFO),
STRING_TYPE_INFO,
BuiltInMethods.HEX_LONG)
addSqlFunctionMethod(
ScalarSqlFunctions.HEX,
Seq(STRING_TYPE_INFO),
STRING_TYPE_INFO,
BuiltInMethods.HEX_STRING)
addSqlFunctionMethod(
TRUNCATE,
Seq(LONG_TYPE_INFO),
LONG_TYPE_INFO,
BuiltInMethods.TRUNCATE_LONG_ONE)
addSqlFunctionMethod(
TRUNCATE,
Seq(INT_TYPE_INFO),
INT_TYPE_INFO,
BuiltInMethods.TRUNCATE_INT_ONE)
addSqlFunctionMethod(
TRUNCATE,
Seq(BIG_DEC_TYPE_INFO),
BIG_DEC_TYPE_INFO,
BuiltInMethods.TRUNCATE_DEC_ONE)
addSqlFunctionMethod(
TRUNCATE,
Seq(DOUBLE_TYPE_INFO),
DOUBLE_TYPE_INFO,
BuiltInMethods.TRUNCATE_DOUBLE_ONE)
addSqlFunctionMethod(
TRUNCATE,
Seq(LONG_TYPE_INFO, INT_TYPE_INFO),
LONG_TYPE_INFO,
BuiltInMethods.TRUNCATE_LONG)
addSqlFunctionMethod(
TRUNCATE,
Seq(INT_TYPE_INFO, INT_TYPE_INFO),
INT_TYPE_INFO,
BuiltInMethods.TRUNCATE_INT)
addSqlFunctionMethod(
TRUNCATE,
Seq(BIG_DEC_TYPE_INFO, INT_TYPE_INFO),
BIG_DEC_TYPE_INFO,
BuiltInMethods.TRUNCATE_DEC)
addSqlFunctionMethod(
TRUNCATE,
Seq(DOUBLE_TYPE_INFO, INT_TYPE_INFO),
DOUBLE_TYPE_INFO,
BuiltInMethods.TRUNCATE_DOUBLE)
// ----------------------------------------------------------------------------------------------
// Temporal functions
// ----------------------------------------------------------------------------------------------
addSqlFunction(
EXTRACT,
Seq(new GenericTypeInfo(classOf[TimeUnitRange]), LONG_TYPE_INFO),
new ExtractCallGen(LONG_TYPE_INFO, BuiltInMethod.UNIX_DATE_EXTRACT.method))
addSqlFunction(
EXTRACT,
Seq(new GenericTypeInfo(classOf[TimeUnitRange]), TimeIntervalTypeInfo.INTERVAL_MILLIS),
new ExtractCallGen(LONG_TYPE_INFO, BuiltInMethod.UNIX_DATE_EXTRACT.method))
addSqlFunction(
EXTRACT,
Seq(new GenericTypeInfo(classOf[TimeUnitRange]), SqlTimeTypeInfo.TIMESTAMP),
new ExtractCallGen(LONG_TYPE_INFO, BuiltInMethod.UNIX_DATE_EXTRACT.method))
addSqlFunction(
EXTRACT,
Seq(new GenericTypeInfo(classOf[TimeUnitRange]), SqlTimeTypeInfo.TIME),
new ExtractCallGen(LONG_TYPE_INFO, BuiltInMethod.UNIX_DATE_EXTRACT.method))
addSqlFunction(
EXTRACT,
Seq(new GenericTypeInfo(classOf[TimeUnitRange]), TimeIntervalTypeInfo.INTERVAL_MONTHS),
new ExtractCallGen(LONG_TYPE_INFO, BuiltInMethod.UNIX_DATE_EXTRACT.method))
addSqlFunction(
EXTRACT,
Seq(new GenericTypeInfo(classOf[TimeUnitRange]), SqlTimeTypeInfo.DATE),
new ExtractCallGen(LONG_TYPE_INFO, BuiltInMethod.UNIX_DATE_EXTRACT.method))
addSqlFunction(
TIMESTAMP_DIFF,
Seq(
new GenericTypeInfo(classOf[TimeUnit]),
SqlTimeTypeInfo.TIMESTAMP,
SqlTimeTypeInfo.TIMESTAMP),
new TimestampDiffCallGen)
addSqlFunction(
TIMESTAMP_DIFF,
Seq(new GenericTypeInfo(classOf[TimeUnit]), SqlTimeTypeInfo.TIMESTAMP, SqlTimeTypeInfo.DATE),
new TimestampDiffCallGen)
addSqlFunction(
TIMESTAMP_DIFF,
Seq(new GenericTypeInfo(classOf[TimeUnit]), SqlTimeTypeInfo.DATE, SqlTimeTypeInfo.TIMESTAMP),
new TimestampDiffCallGen)
addSqlFunction(
TIMESTAMP_DIFF,
Seq(new GenericTypeInfo(classOf[TimeUnit]), SqlTimeTypeInfo.DATE, SqlTimeTypeInfo.DATE),
new TimestampDiffCallGen)
addSqlFunction(
FLOOR,
Seq(SqlTimeTypeInfo.DATE, new GenericTypeInfo(classOf[TimeUnitRange])),
new FloorCeilCallGen(
BuiltInMethod.FLOOR.method,
Some(BuiltInMethod.UNIX_DATE_FLOOR.method)))
addSqlFunction(
FLOOR,
Seq(SqlTimeTypeInfo.TIME, new GenericTypeInfo(classOf[TimeUnitRange])),
new FloorCeilCallGen(
BuiltInMethod.FLOOR.method,
Some(BuiltInMethod.UNIX_DATE_FLOOR.method)))
addSqlFunction(
FLOOR,
Seq(SqlTimeTypeInfo.TIMESTAMP, new GenericTypeInfo(classOf[TimeUnitRange])),
new FloorCeilCallGen(
BuiltInMethod.FLOOR.method,
Some(BuiltInMethod.UNIX_TIMESTAMP_FLOOR.method)))
// TODO: fixme if CALCITE-3199 fixed, use BuiltInMethod.UNIX_DATE_CEIL
// https://issues.apache.org/jira/browse/CALCITE-3199
addSqlFunction(
CEIL,
Seq(SqlTimeTypeInfo.DATE, new GenericTypeInfo(classOf[TimeUnitRange])),
new FloorCeilCallGen(
BuiltInMethod.CEIL.method,
Some(BuiltInMethods.UNIX_DATE_CEIL)))
addSqlFunction(
CEIL,
Seq(SqlTimeTypeInfo.TIME, new GenericTypeInfo(classOf[TimeUnitRange])),
new FloorCeilCallGen(
BuiltInMethod.CEIL.method,
Some(BuiltInMethod.UNIX_DATE_CEIL.method)))
addSqlFunction(
CEIL,
Seq(SqlTimeTypeInfo.TIMESTAMP, new GenericTypeInfo(classOf[TimeUnitRange])),
new FloorCeilCallGen(
BuiltInMethod.CEIL.method,
Some(BuiltInMethod.UNIX_TIMESTAMP_CEIL.method)))
addSqlFunction(
CURRENT_DATE,
Seq(),
new CurrentTimePointCallGen(SqlTimeTypeInfo.DATE, local = false))
addSqlFunction(
CURRENT_TIME,
Seq(),
new CurrentTimePointCallGen(SqlTimeTypeInfo.TIME, local = false))
addSqlFunction(
CURRENT_TIMESTAMP,
Seq(),
new CurrentTimePointCallGen(SqlTimeTypeInfo.TIMESTAMP, local = false))
addSqlFunction(
LOCALTIME,
Seq(),
new CurrentTimePointCallGen(SqlTimeTypeInfo.TIME, local = true))
addSqlFunction(
LOCALTIMESTAMP,
Seq(),
new CurrentTimePointCallGen(SqlTimeTypeInfo.TIMESTAMP, local = true))
addSqlFunction(
ScalarSqlFunctions.DATE_FORMAT,
Seq(SqlTimeTypeInfo.TIMESTAMP, STRING_TYPE_INFO),
new DateFormatCallGen
)
addSqlFunctionMethod(
ScalarSqlFunctions.LPAD,
Seq(STRING_TYPE_INFO, INT_TYPE_INFO, STRING_TYPE_INFO),
STRING_TYPE_INFO,
BuiltInMethods.LPAD)
addSqlFunctionMethod(
ScalarSqlFunctions.RPAD,
Seq(STRING_TYPE_INFO, INT_TYPE_INFO, STRING_TYPE_INFO),
STRING_TYPE_INFO,
BuiltInMethods.RPAD)
// ----------------------------------------------------------------------------------------------
// Cryptographic Hash functions
// ----------------------------------------------------------------------------------------------
addSqlFunction(
ScalarSqlFunctions.MD5,
Seq(STRING_TYPE_INFO),
new HashCalcCallGen("MD5")
)
addSqlFunction(
ScalarSqlFunctions.SHA1,
Seq(STRING_TYPE_INFO),
new HashCalcCallGen("SHA-1")
)
addSqlFunction(
ScalarSqlFunctions.SHA224,
Seq(STRING_TYPE_INFO),
new HashCalcCallGen("SHA-224")
)
addSqlFunction(
ScalarSqlFunctions.SHA256,
Seq(STRING_TYPE_INFO),
new HashCalcCallGen("SHA-256")
)
addSqlFunction(
ScalarSqlFunctions.SHA384,
Seq(STRING_TYPE_INFO),
new HashCalcCallGen("SHA-384")
)
addSqlFunction(
ScalarSqlFunctions.SHA512,
Seq(STRING_TYPE_INFO),
new HashCalcCallGen("SHA-512")
)
addSqlFunction(
ScalarSqlFunctions.SHA2,
Seq(STRING_TYPE_INFO, INT_TYPE_INFO),
new HashCalcCallGen("SHA-2")
)
// ----------------------------------------------------------------------------------------------
/**
* Returns a [[CallGenerator]] that generates all required code for calling the given
* [[SqlOperator]].
*
* @param sqlOperator SQL operator (might be overloaded)
* @param operandTypes actual operand types
* @param resultType expected return type
* @return [[CallGenerator]]
*/
def getCallGenerator(
sqlOperator: SqlOperator,
operandTypes: Seq[TypeInformation[_]],
resultType: TypeInformation[_])
: Option[CallGenerator] = sqlOperator match {
// user-defined scalar function
case ssf: ScalarSqlFunction =>
Some(
new ScalarFunctionCallGen(
ssf.getScalarFunction,
operandTypes,
resultType
)
)
// user-defined table function
case tsf: TableSqlFunction =>
Some(
new TableFunctionCallGen(
tsf.getTableFunction,
operandTypes,
resultType
)
)
// built-in scalar function
case _ =>
sqlFunctions.get((sqlOperator, operandTypes))
.orElse(sqlFunctions.find(entry => entry._1._1 == sqlOperator
&& entry._1._2.length == operandTypes.length
&& entry._1._2.zip(operandTypes).forall {
case (x: BasicTypeInfo[_], y: BasicTypeInfo[_]) => y.shouldAutocastTo(x) || x == y
case _ => false
}).map(_._2))
}
// ----------------------------------------------------------------------------------------------
private def addSqlFunctionMethod(
sqlOperator: SqlOperator,
operandTypes: Seq[TypeInformation[_]],
returnType: TypeInformation[_],
method: Method)
: Unit = {
sqlFunctions((sqlOperator, operandTypes)) = new MethodCallGen(returnType, method)
}
private def addSqlFunctionNotMethod(
sqlOperator: SqlOperator,
operandTypes: Seq[TypeInformation[_]],
method: Method)
: Unit = {
sqlFunctions((sqlOperator, operandTypes)) =
new NotCallGenerator(new MethodCallGen(BOOLEAN_TYPE_INFO, method))
}
private def addSqlFunction(
sqlOperator: SqlOperator,
operandTypes: Seq[TypeInformation[_]],
callGenerator: CallGenerator)
: Unit = {
sqlFunctions((sqlOperator, operandTypes)) = callGenerator
}
}
| GJL/flink | flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/codegen/calls/FunctionGenerator.scala | Scala | apache-2.0 | 21,942 |
/**
* File: OrdiniTest.scala
* Package: tests
* Autore: Francesco Burato
* Creazione: 01/lug/2013
*/
package tests
import org.scalatest.{FunSuite, BeforeAndAfter}
import constraintobjs._
import scala.collection.mutable.HashMap
/**
* Test di unità su Ordini e Comparator
* @author Francesco Burato
*
*/
class OrdiniTest extends FunSuite with BeforeAndAfter {
before{
Domain.addDomain(new Domain("x",Set("a","b","c")))
Domain.addDomain(new Domain("y",Set("a","b")))
}
test("Independent variable should work"){
val ord = new Ordini("x")
ord.add(Map("a"->0,"b"->1,"c"->2))
ord.getConstraints match {
case None => assert(false)
case Some(x) => assert(x.accepted.size===1)
}
}
test("Dependent variable with uncomplete order should not work") {
val ord = new Ordini("y",Vector("x"))
ord.add(Array("b"),Map("a"->0,"b"->1))
ord.getConstraints match {
case None => assert(true)
case _ => assert(false)
}
}
test("Dependent variable with complete order should work") {
val ord = new Ordini("y",Vector("x"))
assert(ord.add(Array("a"),Map("a"->0,"b"->1)))
assert(ord.add(Array("b"),Map("a"->1,"b"->0)))
assert(ord.add(Array("c"),Map("a"->2,"b"->1)))
ord.getConstraints match {
case None => assert(false)
case Some(x) =>
assert(x.accepted.size === 3)
}
}
test("Comparator with independent should work") {
val ord = new Ordini("y")
ord.add(Map("a"->0,"b"->1))
val comp = ord.comparator
// deve restituire true
assert(comp.put("x","a"))
comp.isMinor("a", "b") match {
case None => assert(false)
case Some(x) => assert(x)
}
comp.isMinor("b", "a") match {
case None => assert(false)
case Some(x) => assert(!x)
}
comp.isMinor("b", "b") match {
case None => assert(false)
case Some(x) => assert(x)
}
comp.isMinor("a", "a") match {
case None => assert(false)
case Some(x) => assert(x)
}
}
test("Comparator with dependent should work") {
val ord = new Ordini("y",Vector("x"))
assert(ord.add(Array("a"),Map("a"->0,"b"->1)))
assert(ord.add(Array("b"),Map("a"->1,"b"->0)))
assert(ord.add(Array("c"),Map("a"->2,"b"->1)))
val comp = ord.comparator
// deve restituire true
assert(! comp.put("y","a"))
assert(comp.put("x","a"))
comp.isMinor("a", "b") match {
case None => assert(false)
case Some(x) => assert(x)
}
comp.isMinor("b", "a") match {
case None => assert(false)
case Some(x) => assert(!x)
}
comp.isMinor("b", "b") match {
case None => assert(false)
case Some(x) => assert(x)
}
comp.isMinor("a", "a") match {
case None => assert(false)
case Some(x) => assert(x)
}
comp.reset
assert(comp.put("x","b"))
comp.isMinor("a", "b") match {
case None => assert(false)
case Some(x) => assert(!x)
}
comp.isMinor("b", "a") match {
case None => assert(false)
case Some(x) => assert(x)
}
comp.isMinor("b", "b") match {
case None => assert(false)
case Some(x) => assert(x)
}
comp.isMinor("a", "a") match {
case None => assert(false)
case Some(x) => assert(x)
}
comp.reset
assert(comp.put("x","c"))
comp.isMinor("a", "b") match {
case None => assert(false)
case Some(x) => assert(!x)
}
comp.isMinor("b", "a") match {
case None => assert(false)
case Some(x) => assert(x)
}
comp.isMinor("b", "b") match {
case None => assert(false)
case Some(x) => assert(x)
}
comp.isMinor("a", "a") match {
case None => assert(false)
case Some(x) => assert(x)
}
comp.reset
}
} | fburato/CPNetSolver | CPNetSolver/src/tests/OrdiniTest.scala | Scala | gpl-3.0 | 3,769 |
package com.sksamuel.elastic4s.testkit
import java.io.PrintWriter
import java.nio.file.{Path, Paths}
import java.util.UUID
import com.sksamuel.elastic4s.{ElasticDsl, ElasticClient}
import com.sksamuel.elastic4s.ElasticDsl._
import org.elasticsearch.action.admin.cluster.health.ClusterHealthStatus
import org.elasticsearch.action.admin.indices.refresh.RefreshResponse
import org.elasticsearch.common.settings.Settings
import org.slf4j.LoggerFactory
import scala.concurrent.duration._
import scala.concurrent.Future
/** @author Stephen Samuel */
trait NodeBuilder {
private val logger = LoggerFactory.getLogger(getClass)
/**
* Override this if you wish to change where the home directory for the local instance will be located.
*/
lazy val testNodeHomePath: Path = tempDirectoryPath resolve UUID.randomUUID().toString
def numberOfReplicas: Int = 0
def numberOfShards: Int = 1
def indexRefresh: FiniteDuration = 1.seconds
def httpEnabled: Boolean = true
lazy val tempDirectoryPath: Path = Paths get System.getProperty("java.io.tmpdir")
lazy val testNodeConfPath: Path = testNodeHomePath resolve "config"
/**
* Override this if you wish to control all the settings used by the client.
*/
protected def settings: Settings.Builder = {
val home = testNodeHomePath
logger.info(s"Elasticsearch test-server located at $home")
home.toFile.mkdirs()
home.toFile.deleteOnExit()
val conf = testNodeConfPath
conf.toFile.mkdirs()
conf.toFile.deleteOnExit()
// todo this needs to come out of here and into the analyzer test alone when we can isolate nodes
val newStopListFile = (testNodeConfPath resolve "stoplist.txt").toFile
val writer = new PrintWriter(newStopListFile)
writer.write("a\\nan\\nthe\\nis\\nand\\nwhich") // writing the stop words to the file
writer.close()
val builder = Settings.settingsBuilder()
.put("node.http.enabled", httpEnabled)
.put("http.enabled", httpEnabled)
.put("path.home", home.toFile.getAbsolutePath)
.put("path.repo", home.toFile.getAbsolutePath)
.put("path.conf", conf.toFile.getAbsolutePath)
.put("index.number_of_shards", numberOfShards)
.put("index.number_of_replicas", numberOfReplicas)
.put("script.inline", "on")
.put("script.indexed", "on")
.put("index.refresh_interval", indexRefresh.toSeconds + "s")
.put("discovery.zen.ping.multicast.enabled", "false")
.put("es.logger.level", "INFO")
.put("cluster.name", getClass.getSimpleName)
configureSettings(builder)
}
/**
* Invoked by the sugar trait to setup the settings builder that was created by settings()
*/
def configureSettings(builder: Settings.Builder): Settings.Builder = builder
/**
* Invoked to create a local client for the elastic node.
* Override to create the client youself.
*/
def createLocalClient: ElasticClient = ElasticClient.local(settings.build)
}
trait ElasticSugar extends NodeBuilder {
private val logger = LoggerFactory.getLogger(getClass)
private lazy val internalClient = createLocalClient
/**
* Is invoked when a test needs access to a client for the test node.
* Can override this if you wish to control precisely how the client is created.
*/
implicit def client: ElasticClient = internalClient
def refresh(indexes: String*): Future[RefreshResponse] = {
val i = indexes.size match {
case 0 => Seq("_all")
case _ => indexes
}
client.execute {
ElasticDsl.refresh index indexes
}
}
def blockUntilGreen(): Unit = {
blockUntil("Expected cluster to have green status") { () =>
client.execute {
get cluster health
}.await.getStatus == ClusterHealthStatus.GREEN
}
}
def blockUntil(explain: String)(predicate: () => Boolean): Unit = {
var backoff = 0
var done = false
while (backoff <= 16 && !done) {
if (backoff > 0) Thread.sleep(200 * backoff)
backoff = backoff + 1
try {
done = predicate()
} catch {
case e: Throwable => logger.warn("problem while testing predicate", e)
}
}
require(done, s"Failed waiting on: $explain")
}
def ensureIndexExists(index: String): Unit = {
val resp = client.execute {
indexExists(index)
}.await
if (!resp.isExists)
client.execute {
create index index
}.await
}
def blockUntilDocumentExists(id: String, index: String, `type`: String): Unit = {
blockUntil(s"Expected to find document $id") {
() =>
client.execute {
get id id from index / `type`
}.await.isExists
}
}
/**
* Will block until the given index and optional types have at least the given number of documents.
*/
def blockUntilCount(expected: Long, index: String, types: String*): Unit = {
blockUntil(s"Expected count of $expected") {
() =>
expected <= client.execute {
count from index types types
}.await.getCount
}
}
def blockUntilExactCount(expected: Long, index: String, types: String*): Unit = {
blockUntil(s"Expected count of $expected") {
() =>
expected == client.execute {
count from index types types
}.await.getCount
}
}
def blockUntilEmpty(index: String): Unit = {
blockUntil(s"Expected empty index $index") {
() =>
client.execute {
count from index
}.await.getCount == 0
}
}
def blockUntilIndexExists(index: String): Unit = {
blockUntil(s"Expected exists index $index") {
() ⇒
client.execute {
indexExists(index)
}.await.isExists
}
}
def blockUntilIndexNotExists(index: String): Unit = {
blockUntil(s"Expected not exists index $index") {
() ⇒
!client.execute {
indexExists(index)
}.await.isExists
}
}
def blockUntilDocumentHasVersion(index: String, `type`: String, id: String, version: Long): Unit = {
blockUntil(s"Expected document $id to have version $version") {
() =>
client.execute {
get id id from index -> `type`
}.await.getVersion == version
}
}
}
| nicoo/elastic4s | elastic4s-testkit/src/main/scala/com/sksamuel/elastic4s/testkit/ElasticSugar.scala | Scala | apache-2.0 | 6,196 |
package maker.utils
import org.scalatest.{Matchers, FreeSpec}
import maker.utils.FileUtils._
import java.util.jar.JarFile
import scala.collection.JavaConversions._
class BuildJarTests extends FreeSpec with Matchers{
"builds empty jar when given no directories " in {
FileUtils.withTempDir{
dir =>
val jarFile = file(dir, "a.jar")
BuildJar.build(jarFile, Nil)
new JarFile(jarFile).entries.toList should be ('empty)
}
}
"builds empty jar from empty directory" in {
FileUtils.withTempDir{
dir =>
val jarFile = file(dir, "a.jar")
val subDir = file(dir, "subDir").makeDirs()
BuildJar.build(jarFile, subDir :: Nil)
new JarFile(jarFile).entries.toList should be ('empty)
}
}
"Builds jar with files in it" in {
FileUtils.withTempDir{
dir =>
val jarFile = file(dir, "a.jar")
val subDir = file(dir, "subDir").makeDirs()
writeToFile(file(subDir, "a_file"), "some text")
writeToFile(file(subDir, "sub_sub", "another_file"), "some more text")
BuildJar.build(jarFile, subDir :: Nil)
val entryNames = new JarFile(jarFile).entries.toList.map(_.getName)
entryNames should contain ("a_file")
entryNames should contain ("sub_sub/another_file")
}
}
}
| cage433/maker | maker/tests/maker/utils/BuildJarTests.scala | Scala | bsd-2-clause | 1,330 |
object Solution {
def isSquare(n: Long): (Boolean, Long) = {
// fail fast
if (n < 0 || (n & 2) != 0 || (n & 7) == 5 || (n & 11) == 8) return (false, -1)
if (n == 0) return (true, 0)
val x = math.sqrt(n).toLong
(x * x == n, x)
}
def main(args: Array[String]) {
val Array(n, a, b) = readLine.split(" ").map(_.toLong)
(a, b) match {
case (3, 5) =>
var m, p = 1L
while (p < n) {
if (isSquare(8 * p + 1)._1) println(p)
m += 1
p = m * (3 * m - 1) / 2
}
case (5, 6) =>
var m, p = 1L
while (p < n) {
val (isSq, root) = isSquare(8 * p + 1)
if (isSq && ((root + 1) & 3) == 0) println(p)
m += 1
p = m * (3 * m - 1) / 2
}
case _ =>
}
}
}
| advancedxy/hackerrank | project-euler/problem-45/TrianPenAndHex.scala | Scala | mit | 804 |
import org.joda.time.DateTime
import play.api.libs.json.{Json, Writes, Reads, Format}
/**
* Created by alvarovilaplana on 21/12/2015.
*/
package object controllers {
val pattern = "dd/MM/yyyy hh:mm:ss"
implicit val dateFormat = Format[DateTime](
Reads.jodaDateReads(pattern),
Writes.jodaDateWrites(pattern))
implicit val authInFmt = Json.format[AuthorizationIn]
implicit val authOutFmt = Json.format[AuthorizationOut]
}
| tvlive/tv-auth | app/controllers/package.scala | Scala | apache-2.0 | 442 |
/*
* Copyright 2014–2017 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.physical.mongodb
import slamdata.Predef._
/** A subset of the information available from the collStats command. Many more
* fields are available, some only in particular MongoDB versions or with
* particular storage engines.
* @param count The number of documents in the collection.
* @param dataSize The total size "in memory" in bytes of all documents in the
* collection, not including headers or indexes.
*/
final case class CollectionStatistics(
count: Long,
dataSize: Long,
sharded: Boolean)
| drostron/quasar | mongodb/src/main/scala/quasar/physical/mongodb/CollectionStatistics.scala | Scala | apache-2.0 | 1,140 |
// Solution-2.scala
// Solution to Exercise 2 in "Values"
val v1 = 17
println(v1)
// This won't work:
v1 = 20
println("You can't change a val")
/* OUTPUT_SHOULD_CONTAIN
error: reassignment to val
v1 = 20
^
one error found
*/ | P7h/ScalaPlayground | Atomic Scala/atomic-scala-solutions/01_Values/Solution-2.scala | Scala | apache-2.0 | 229 |
package model
import model.impl.{Tile, TileNameEnum}
import org.scalatest._
import util.position.Position
class TileSpec extends FlatSpec with Matchers {
"A Tile" should "have a name and a position" in {
val tileName = TileNameEnum.CAT
val tilePos = new Position(1, 2)
val tile: Tile = new Tile(tileName, tilePos)
tile.name should be(tileName)
tile.pos should be(tilePos)
}
"toString" should "have given output" in {
val tileName = TileNameEnum.CAT
val tilePos = new Position(1, 2)
val tile: Tile = new Tile(tileName, tilePos)
val tileString = "C:{1,2}"
tile.toString should be(tileString)
}
"equal" should "objects, if name and position is equal" in {
val tile1: Tile = new Tile(TileNameEnum.RABBIT, new Position(1, 1))
val tile2: Tile = new Tile(TileNameEnum.RABBIT, new Position(1, 1))
tile1 should be(tile2)
}
it should "not equal, if name or position is different" in {
val tile1: Tile = new Tile(TileNameEnum.RABBIT, new Position(1, 1))
val tile2: Tile = new Tile(TileNameEnum.RABBIT, new Position(1, 2))
tile1 should not be tile2
}
}
| MartinLei/Arimaa | src/test/scala/model/TileSpec.scala | Scala | mit | 1,132 |
package org.bitcoins.marshallers.rpc.bitcoincore.blockchain
import org.bitcoins.marshallers.MarshallerUtil
import org.bitcoins.marshallers.rpc.bitcoincore.blockchain.softforks.SoftForkMarshaller
import org.bitcoins.protocol.rpc.bitcoincore.blockchain.softforks.SoftForks
import org.bitcoins.protocol.rpc.bitcoincore.blockchain.{BlockChainInfoImpl, BlockchainInfo}
import spray.json._
import SoftForkMarshaller._
/**
* Created by Tom on 1/11/2016.
*/
object BlockchainInfoMarshaller extends DefaultJsonProtocol with MarshallerUtil{
val chainKey = "chain"
val blockCountKey = "blocks"
val headerCountKey = "headers"
val bestBlockHashKey = "bestblockhash"
val difficultyKey = "difficulty"
val verificationProgressKey = "verificationprogress"
val chainWorkKey = "chainwork"
val prunedKey = "pruned"
val softForksKey = "softforks"
implicit object BlockchainInfoFormatter extends RootJsonFormat[BlockchainInfo] {
override def read (value : JsValue) : BlockchainInfo = {
val obj = value.asJsObject
val chain = obj.fields(chainKey).convertTo[String]
val blockCount = obj.fields(blockCountKey).convertTo[Int]
val headerCount = obj.fields(headerCountKey).convertTo[Int]
val bestBlockHash = obj.fields(bestBlockHashKey).convertTo[String]
val difficulty = obj.fields(difficultyKey).convertTo[Double]
val verificationProgress = obj.fields(verificationProgressKey).convertTo[Double]
val chainWork = obj.fields(chainWorkKey).convertTo[String]
val pruned = obj.fields(prunedKey).convertTo[Boolean]
val softForks : Seq[SoftForks] = convertToSoftForksList(obj.fields(softForksKey))
BlockChainInfoImpl(chain, blockCount, headerCount, bestBlockHash, difficulty, verificationProgress, chainWork, pruned, softForks)
}
override def write (detail : BlockchainInfo) : JsValue = {
val softForks : JsArray = convertToJsArray(detail.softForks)
val m : Map[String, JsValue] = Map (
chainKey -> JsString(detail.chain),
blockCountKey -> JsNumber(detail.blockCount),
headerCountKey -> JsNumber(detail.headerCount),
bestBlockHashKey -> JsString(detail.bestBlockHash),
difficultyKey -> JsNumber(detail.difficulty),
verificationProgressKey -> JsNumber(detail.verificationProgress),
chainWorkKey -> JsString(detail.chainWork),
prunedKey -> JsBoolean(detail.pruned),
softForksKey -> softForks
)
JsObject(m)
}
}
} | Christewart/scalacoin | src/main/scala/org/bitcoins/marshallers/rpc/bitcoincore/blockchain/BlockchainInfoMarshaller.scala | Scala | mit | 2,485 |
package lila.study
import chess.opening._
import chess.variant.Variant
import lila.tree
object TreeBuilder {
private val initialStandardDests = chess.Game(chess.variant.Standard).situation.destinations
def apply(root: Node.Root, variant: Variant): tree.Root = {
val dests =
if (variant.standard && root.fen.initial) initialStandardDests
else {
val sit = chess.Game(variant.some, root.fen.some).situation
sit.playable(false) ?? sit.destinations
}
makeRoot(root, variant).copy(dests = dests.some)
}
def toBranch(node: Node, variant: Variant): tree.Branch =
tree.Branch(
id = node.id,
ply = node.ply,
move = node.move,
fen = node.fen,
check = node.check,
shapes = node.shapes,
comments = node.comments,
gamebook = node.gamebook,
glyphs = node.glyphs,
clock = node.clock,
crazyData = node.crazyData,
eval = node.score.map(_.eval),
children = toBranches(node.children, variant),
opening = Variant.openingSensibleVariants(variant) ?? FullOpeningDB.findByFen(node.fen),
forceVariation = node.forceVariation
)
def makeRoot(root: Node.Root, variant: Variant): tree.Root =
tree.Root(
ply = root.ply,
fen = root.fen,
check = root.check,
shapes = root.shapes,
comments = root.comments,
gamebook = root.gamebook,
glyphs = root.glyphs,
clock = root.clock,
crazyData = root.crazyData,
eval = root.score.map(_.eval),
children = toBranches(root.children, variant),
opening = Variant.openingSensibleVariants(variant) ?? FullOpeningDB.findByFen(root.fen)
)
private def toBranches(children: Node.Children, variant: Variant): List[tree.Branch] =
children.nodes.view.map(toBranch(_, variant)).toList
}
| luanlv/lila | modules/study/src/main/TreeBuilder.scala | Scala | mit | 1,823 |
package org.hyperreal.yate
import java.awt.Font
import java.awt.Font._
import java.awt.Color
import collection.mutable.UnrolledBuffer
trait DocumentElement
{
val parent: DocumentElement
}
trait DocumentContentElement extends DocumentElement
case class DocumentRootElement( elems: DoubleLinkedList[DocumentContentElement] ) extends DocumentElement
{
val parent = null
}
trait DocumentLineContentElement extends DocumentContentElement
case class DocumentLineElement( elems: DoubleLinkedList[DocumentLineContentElement], parent: DocumentElement ) extends DocumentContentElement
case class DocumentRunElement( chars: UnrolledBuffer[Char], attr: Set[DocumentAttribute], parent: DocumentElement ) extends DocumentLineContentElement
trait DocumentAttribute
case class ForgroundAttribute( c: Color ) extends DocumentAttribute
case class StyleAttribute( style: Int ) extends DocumentAttribute
class Position( elem: DocumentRunElement, idx: Int )
{
}
| edadma/yate | src/main/scala/DocumentElement.scala | Scala | gpl-3.0 | 960 |
/*
* Copyright 2016 Dennis Vriend
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package akka.persistence.jdbc.util
import java.sql.Statement
import akka.persistence.jdbc.util.Schema.{ Oracle, SchemaType }
import slick.jdbc.JdbcBackend.{ Database, Session }
object Schema {
sealed trait SchemaType { def schema: String }
final case class Postgres(schema: String = "schema/postgres/postgres-schema.sql") extends SchemaType
final case class H2(schema: String = "schema/h2/h2-schema.sql") extends SchemaType
final case class MySQL(schema: String = "schema/mysql/mysql-schema.sql") extends SchemaType
final case class Oracle(schema: String = "schema/oracle/oracle-schema.sql") extends SchemaType
final case class SqlServer(schema: String = "schema/sqlserver/sqlserver-schema.sql") extends SchemaType
}
trait DropCreate extends ClasspathResources {
def db: Database
val listOfOracleDropQueries = List(
"""ALTER SESSION SET ddl_lock_timeout = 15""", // (ddl lock timeout in seconds) this allows tests which are still writing to the db to finish gracefully
"""DROP TABLE "journal" CASCADE CONSTRAINT""",
"""DROP TABLE "snapshot" CASCADE CONSTRAINT""",
"""DROP TABLE "deleted_to" CASCADE CONSTRAINT""",
"""DROP TRIGGER "ordering_seq_trigger"""",
"""DROP PROCEDURE "reset_sequence"""",
"""DROP SEQUENCE "ordering_seq"""")
def dropOracle(): Unit = withStatement { stmt =>
listOfOracleDropQueries.foreach { ddl =>
try stmt.executeUpdate(ddl) catch {
case t: java.sql.SQLException if t.getMessage contains "ORA-00942" => // suppress known error message in the test
case t: java.sql.SQLException if t.getMessage contains "ORA-04080" => // suppress known error message in the test
case t: java.sql.SQLException if t.getMessage contains "ORA-02289" => // suppress known error message in the test
case t: java.sql.SQLException if t.getMessage contains "ORA-04043" => // suppress known error message in the test
case t: java.sql.SQLException if t.getMessage contains "ORA-01418" => // suppress known error message in the test
}
}
}
def dropCreate(schemaType: SchemaType): Unit = schemaType match {
case Oracle(schema) =>
dropOracle()
create(schema, "/")
case s: SchemaType => create(s.schema)
}
def create(schema: String, separator: String = ";"): Unit = for {
schema <- Option(fromClasspathAsString(schema))
ddl <- for {
trimmedLine <- schema.split(separator) map (_.trim)
if trimmedLine.nonEmpty
} yield trimmedLine
} withStatement { stmt =>
try stmt.executeUpdate(ddl) catch {
case t: java.sql.SQLSyntaxErrorException if t.getMessage contains "ORA-00942" => // suppress known error message in the test
}
}
def withDatabase[A](f: Database => A): A =
f(db)
def withSession[A](f: Session => A): A = {
withDatabase { db =>
val session = db.createSession()
try f(session) finally session.close()
}
}
def withStatement[A](f: Statement => A): A =
withSession(session => session.withStatement()(f))
}
| gavares/akka-persistence-jdbc | src/test/scala/akka/persistence/jdbc/util/DropCreate.scala | Scala | apache-2.0 | 3,621 |
/*
* Copyright (c) 2021, salesforce.com, inc.
* All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause
* For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause
*/
package com.krux.hyperion.resource
trait ActionOnTaskFailure {
def serialize: String
override def toString = serialize
}
case object ContinueOnTaskFailure extends ActionOnTaskFailure {
val serialize: String = "continue"
}
case object TerminateOnTaskFailure extends ActionOnTaskFailure {
val serialize: String = "terminate"
}
| realstraw/hyperion | core/src/main/scala/com/krux/hyperion/resource/ActionOnTaskFailure.scala | Scala | bsd-3-clause | 565 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions
import scala.math._
import org.apache.spark.{SparkConf, SparkFunSuite}
import org.apache.spark.serializer.KryoSerializer
import org.apache.spark.sql.{RandomDataGenerator, Row}
import org.apache.spark.sql.catalyst.{CatalystTypeConverters, InternalRow}
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.expressions.codegen.{GenerateOrdering, LazilyGeneratedOrdering}
import org.apache.spark.sql.types._
class OrderingSuite extends SparkFunSuite with ExpressionEvalHelper {
def compareArrays(a: Seq[Any], b: Seq[Any], expected: Int): Unit = {
test(s"compare two arrays: a = $a, b = $b") {
val dataType = ArrayType(IntegerType)
val rowType = StructType(StructField("array", dataType, nullable = true) :: Nil)
val toCatalyst = CatalystTypeConverters.createToCatalystConverter(rowType)
val rowA = toCatalyst(Row(a)).asInstanceOf[InternalRow]
val rowB = toCatalyst(Row(b)).asInstanceOf[InternalRow]
Seq(Ascending, Descending).foreach { direction =>
val sortOrder = direction match {
case Ascending => BoundReference(0, dataType, nullable = true).asc
case Descending => BoundReference(0, dataType, nullable = true).desc
}
val expectedCompareResult = direction match {
case Ascending => signum(expected)
case Descending => -1 * signum(expected)
}
val kryo = new KryoSerializer(new SparkConf).newInstance()
val intOrdering = new InterpretedOrdering(sortOrder :: Nil)
val genOrdering = new LazilyGeneratedOrdering(sortOrder :: Nil)
val kryoIntOrdering = kryo.deserialize[InterpretedOrdering](kryo.serialize(intOrdering))
val kryoGenOrdering = kryo.deserialize[LazilyGeneratedOrdering](kryo.serialize(genOrdering))
Seq(intOrdering, genOrdering, kryoIntOrdering, kryoGenOrdering).foreach { ordering =>
assert(ordering.compare(rowA, rowA) === 0)
assert(ordering.compare(rowB, rowB) === 0)
assert(signum(ordering.compare(rowA, rowB)) === expectedCompareResult)
assert(signum(ordering.compare(rowB, rowA)) === -1 * expectedCompareResult)
}
}
}
}
// Two arrays have the same size.
compareArrays(Seq[Any](), Seq[Any](), 0)
compareArrays(Seq[Any](1), Seq[Any](1), 0)
compareArrays(Seq[Any](1, 2), Seq[Any](1, 2), 0)
compareArrays(Seq[Any](1, 2, 2), Seq[Any](1, 2, 3), -1)
// Two arrays have different sizes.
compareArrays(Seq[Any](), Seq[Any](1), -1)
compareArrays(Seq[Any](1, 2, 3), Seq[Any](1, 2, 3, 4), -1)
compareArrays(Seq[Any](1, 2, 3), Seq[Any](1, 2, 3, 2), -1)
compareArrays(Seq[Any](1, 2, 3), Seq[Any](1, 2, 2, 2), 1)
// Arrays having nulls.
compareArrays(Seq[Any](1, 2, 3), Seq[Any](1, 2, 3, null), -1)
compareArrays(Seq[Any](), Seq[Any](null), -1)
compareArrays(Seq[Any](null), Seq[Any](null), 0)
compareArrays(Seq[Any](null, null), Seq[Any](null, null), 0)
compareArrays(Seq[Any](null), Seq[Any](null, null), -1)
compareArrays(Seq[Any](null), Seq[Any](1), -1)
compareArrays(Seq[Any](null), Seq[Any](null, 1), -1)
compareArrays(Seq[Any](null, 1), Seq[Any](1, 1), -1)
compareArrays(Seq[Any](1, null, 1), Seq[Any](1, null, 1), 0)
compareArrays(Seq[Any](1, null, 1), Seq[Any](1, null, 2), -1)
// Test GenerateOrdering for all common types. For each type, we construct random input rows that
// contain two columns of that type, then for pairs of randomly-generated rows we check that
// GenerateOrdering agrees with RowOrdering.
{
val structType =
new StructType()
.add("f1", FloatType, nullable = true)
.add("f2", ArrayType(BooleanType, containsNull = true), nullable = true)
val arrayOfStructType = ArrayType(structType)
val complexTypes = ArrayType(IntegerType) :: structType :: arrayOfStructType :: Nil
(DataTypeTestUtils.atomicTypes ++ complexTypes ++ Set(NullType)).foreach { dataType =>
test(s"GenerateOrdering with $dataType") {
val rowOrdering = InterpretedOrdering.forSchema(Seq(dataType, dataType))
val genOrdering = GenerateOrdering.generate(
BoundReference(0, dataType, nullable = true).asc ::
BoundReference(1, dataType, nullable = true).asc :: Nil)
val rowType = StructType(
StructField("a", dataType, nullable = true) ::
StructField("b", dataType, nullable = true) :: Nil)
val maybeDataGenerator = RandomDataGenerator.forType(rowType, nullable = false)
assume(maybeDataGenerator.isDefined)
val randGenerator = maybeDataGenerator.get
val toCatalyst = CatalystTypeConverters.createToCatalystConverter(rowType)
for (_ <- 1 to 50) {
val a = toCatalyst(randGenerator()).asInstanceOf[InternalRow]
val b = toCatalyst(randGenerator()).asInstanceOf[InternalRow]
withClue(s"a = $a, b = $b") {
assert(genOrdering.compare(a, a) === 0)
assert(genOrdering.compare(b, b) === 0)
assert(rowOrdering.compare(a, a) === 0)
assert(rowOrdering.compare(b, b) === 0)
assert(signum(genOrdering.compare(a, b)) === -1 * signum(genOrdering.compare(b, a)))
assert(signum(rowOrdering.compare(a, b)) === -1 * signum(rowOrdering.compare(b, a)))
assert(
signum(rowOrdering.compare(a, b)) === signum(genOrdering.compare(a, b)),
"Generated and non-generated orderings should agree")
}
}
}
}
}
}
| gioenn/xSpark | sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/OrderingSuite.scala | Scala | apache-2.0 | 6,365 |
import scala.compiletime._
object Test {
def main(args: Array[String]): Unit = {
assert(code"abc ${println(34)} ..." == "abc println(34) ...")
assert(code"abc ${println(34)}" == "abc println(34)")
assert(code"${println(34)} ..." == "println(34) ...")
assert(code"${println(34)}" == "println(34)")
assert(code"..." == "...")
assert(testConstant(code"") == "")
}
inline def testConstant(inline msg: String): String = msg
}
| som-snytt/dotty | tests/run/i6622.scala | Scala | apache-2.0 | 454 |
package com.yetu.notification.client
import com.github.sstone.amqp.Amqp.{QueueParameters, ExchangeParameters}
import com.rabbitmq.client.ConnectionFactory
import com.typesafe.config.ConfigFactory
object Config {
val configObject = ConfigFactory.load()
val RABBITMQ_HOST = configObject.getString("rabbitmq.host")
val RABBITMQ_EXCHANGE_NAME = configObject.getString("rabbitmq.exchange.name")
val RABBITMQ_EXCHANGE_TYPE = configObject.getString("rabbitmq.exchange.type")
val RABBITMQ_EXCHANGE_PASSIVE = configObject.getBoolean("rabbitmq.exchange.passive")
val RABBITMQ_EXCHANGE_DURABLE = configObject.getBoolean("rabbitmq.exchange.durable")
val RABBITMQ_EXCHANGE_AUTODELETE = configObject.getBoolean("rabbitmq.exchange.autodelete")
// queue
val RABBIT_CONSUME_QUEUE_NAME = configObject.getString("rabbitmq.consumeQueue.name")
val RABBIT_CONSUME_QUEUE_PASSIVE = configObject.getBoolean("rabbitmq.consumeQueue.passive")
val RABBIT_CONSUME_QUEUE_DURABLE = configObject.getBoolean("rabbitmq.consumeQueue.durable")
val RABBIT_CONSUME_QUEUE_EXCLUSIVE = configObject.getBoolean("rabbitmq.consumeQueue.exclusive")
val RABBIT_CONSUME_QUEUE_AUTODELETE = configObject.getBoolean("rabbitmq.consumeQueue.autodelete")
val RABBITMQ_VIRTUALHOST = configObject.getString("rabbitmq.virtualhost")
val RABBITMQ_PASSWORD = configObject.getString("rabbitmq.password")
val RABBITMQ_USERNAME = configObject.getString("rabbitmq.username")
val RABBITMQ_USE_SSL = configObject.getBoolean("rabbitmq.useSsl")
val RABBITMQ_HEARTBEAT = configObject.getInt("rabbitmq.heartbeat")
val INBOX_PUBLISH_URL = configObject.getString("yetu.inboxPublishUrl")
val exchangeParams = ExchangeParameters(
name = Config.RABBITMQ_EXCHANGE_NAME,
exchangeType = Config.RABBITMQ_EXCHANGE_TYPE,
passive = Config.RABBITMQ_EXCHANGE_PASSIVE,
durable = Config.RABBITMQ_EXCHANGE_DURABLE,
autodelete = Config.RABBITMQ_EXCHANGE_AUTODELETE
)
val queueParams = QueueParameters(
name = Config.RABBIT_CONSUME_QUEUE_NAME,
passive = Config.RABBIT_CONSUME_QUEUE_PASSIVE,
durable = Config.RABBIT_CONSUME_QUEUE_DURABLE,
exclusive = Config.RABBIT_CONSUME_QUEUE_EXCLUSIVE,
autodelete = Config.RABBIT_CONSUME_QUEUE_AUTODELETE
)
def rabbitMQConnectionSettings(): ConnectionFactory = {
val factory = new ConnectionFactory()
factory.setHost(Config.RABBITMQ_HOST)
factory.setVirtualHost(Config.RABBITMQ_VIRTUALHOST)
factory.setUsername(Config.RABBITMQ_USERNAME)
factory.setPassword(Config.RABBITMQ_PASSWORD)
factory.setRequestedHeartbeat(Config.RABBITMQ_HEARTBEAT)
if (RABBITMQ_USE_SSL) {
factory.useSslProtocol()
}
factory
}
} | yetu/yetu-notification-client-scala | app/com/yetu/notification/client/Config.scala | Scala | mit | 2,691 |
package com.criteo.cuttle.timeseries.contrib
import doobie.implicits._
import com.criteo.cuttle._
import java.time._
class PersistInstant(xa: XA) {
def set(id: String, t: Instant): Instant = {
sql"REPLACE INTO instant_data VALUES (${id}, ${t})".update.run.transact(xa).unsafeRunSync
t
}
def get(id: String): Option[Instant] =
sql"SELECT instant FROM instant_data WHERE id = ${id}"
.query[Instant]
.option
.transact(xa)
.unsafeRunSync
}
object PersistInstant {
private val schemaUpgrades = List(
sql"""
CREATE TABLE instant_data (
id VARCHAR(1000) NOT NULL,
instant DATETIME NOT NULL,
PRIMARY KEY (id)
) ENGINE = INNODB
""".update.run
)
def apply(xa: XA): PersistInstant = {
utils.updateSchema("instant", schemaUpgrades).transact(xa).unsafeRunSync
new PersistInstant(xa)
}
}
| criteo/cuttle | timeseries/src/main/scala/com/criteo/cuttle/timeseries/contrib/PersistStartDate.scala | Scala | apache-2.0 | 890 |
package scalax.collection.io.json
package descriptor
import language.existentials
import reflect.ClassTag
import scalax.collection.GraphEdge._
import scalax.collection.edge._,
scalax.collection.edge.WBase._,
scalax.collection.edge.LBase._,
scalax.collection.edge.WLBase._
import error.JsonGraphError._
/**
* Contains string constants to denote node/edge sections in a JSON text.
*
* An individual instance of this class may be passed to `Descriptor` if
* non-default section id's are to be used.
*/
class SectionId(val nodesId: String,
val edgesId: String)
extends Tuple2[String,String](nodesId, edgesId)
{
/** Returns whether `id` is one of `nodesId` or `edgesId` of this `SectionId`. */
def contains(id: String) = productIterator contains id
/** Returns whether `id` equals to `nodesId` of this `SectionId`. */
def isNodes (id: String) = (productIterator indexOf id) == 0
/** Returns whether `id` equals to `edgesId` of this `SectionId`. */
def isEdges (id: String) = (productIterator indexOf id) == 1
}
/** The default section id's `"nodes"` and `"edges"`. */
object DefaultSectionId extends SectionId("nodes", "edges")
object Defaults {
val defaultId = "(default)"
}
import Defaults._
abstract class TypeId(val typeId: String)
/**
* Top level descriptor to be passed to Graph/JSON conversion methods, in particular to
* `fromJson` and `toJson`.
*
* @param defaultNodeDescriptor the only or default node descriptor accepting/producing a
* flat node list, that is a node list without a node typeId.
* @param defaultEdgeDescriptor the only or default edge descriptor accepting/producing a
* flat edge list, that is an edge list without an edge typeId.
* @param namedNodeDescriptors further optional node descriptors accepting/producing named
* node lists, that is node lists with an explicit node typeId.
* @param namedEdgeDescriptors further optional edge descriptors accepting/producing named
* edge lists, that is edge lists with an explicit edge typeId.
* @param sectionIds denotes node/edge sections in a JSON text defaulting to `"nodes"`
* and `"edges"`.
*/
class Descriptor[N]
(val defaultNodeDescriptor: NodeDescriptor[N],
val defaultEdgeDescriptor: GenEdgeDescriptor[N],
namedNodeDescriptors: Iterable[NodeDescriptor[N]] = Seq.empty[NodeDescriptor[N]],
namedEdgeDescriptors: Iterable[GenEdgeDescriptor[N]] = Seq.empty[GenEdgeDescriptor[N]],
val sectionIds: SectionId = DefaultSectionId)
{
def requireUniqueTypeIds(descriptors: Iterable[TypeId]) {
def duplicateTypeId =
(namedNodeDescriptors.map(_.typeId).toList).sorted sliding 2 find
(strings => if(strings.size == 2) strings.head == strings.tail else false)
val duplNodeTypeId = duplicateTypeId
require(duplNodeTypeId.isEmpty, "Duplicate typeId found: " + duplNodeTypeId.get.head)
}
requireUniqueTypeIds(namedNodeDescriptors)
requireUniqueTypeIds(namedEdgeDescriptors)
protected val nodeDescriptors = Seq(defaultNodeDescriptor) ++ namedNodeDescriptors
protected val edgeDescriptors = Seq(defaultEdgeDescriptor) ++ namedEdgeDescriptors
def nodeDescriptor(typeId: String): Option[NodeDescriptor[N]] =
if (typeId == defaultId ||
typeId == defaultNodeDescriptor.typeId)
Some(defaultNodeDescriptor)
else
namedNodeDescriptors find (_.typeId == typeId)
def edgeDescriptor(typeId: String): Option[GenEdgeDescriptor[N]] =
if (typeId == defaultId ||
typeId == defaultEdgeDescriptor.typeId)
Some(defaultEdgeDescriptor)
else
namedEdgeDescriptors find (_.typeId == typeId)
protected lazy val nodeDescriptorsByManifest: Map[ClassTag[_],NodeDescriptor[N]] = {
val ret = collection.mutable.Map.empty[ClassTag[_], NodeDescriptor[N]]
for (descr <- nodeDescriptors;
manifests = descr.manifests;
m <- manifests)
ret += (m -> descr)
ret.toMap
}
protected final def classManifest(any: Any): ClassTag[_] =
ClassTag( any match {
case r: AnyRef => r.getClass
case v => v.asInstanceOf[AnyRef].getClass
})
protected var lastNodeDescriptor: (Class[_], NodeDescriptor[N]) = (classOf[Null], null)
def nodeDescriptor(node: N): NodeDescriptor[N] = {
val clazz = node match {
case r: AnyRef => Some(r.getClass)
case _ => None
}
if (clazz.filter(_ == lastNodeDescriptor._1 ).isDefined)
lastNodeDescriptor._2
else {
val descr =
nodeDescriptorsByManifest.find(_._1.runtimeClass == classManifest(node).runtimeClass).
flatMap(kv => Some(kv._2)) getOrElse (
(nodeDescriptorsByManifest find (classTag => manifest.runtimeClass.isAssignableFrom(classTag._1.runtimeClass))).
flatMap(kv => Some(kv._2)) getOrElse (
throw err(NoNodeDescr,
clazz flatMap (c => Some(c.getName)) getOrElse ("AnyVal"))))
if (clazz.isDefined)
lastNodeDescriptor = (clazz.get, descr)
descr
}
}
def edgeDescriptor(clazz: Class[_]): GenEdgeDescriptor[N] = {
val className = clazz.getName
val classNameLength = className.length
edgeDescriptors find { d =>
val dClassName = d.edgeManifest.runtimeClass.getName
val dClassNameLength = dClassName.length
dClassName == (if (dClassNameLength < classNameLength)
className substring (0, dClassNameLength)
else
className)
} getOrElse (
throw err(NoEdgeDescr, clazz.getName))
}
}
| Calavoow/scala-graph | json/src/main/scala/scalax/collection/io/json/descriptor/Descriptor.scala | Scala | bsd-3-clause | 5,701 |