code
stringlengths
5
1M
repo_name
stringlengths
5
109
path
stringlengths
6
208
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
5
1M
package registration.auditor import models.{Topic, TopicTypes} import play.api.libs.ws.WSClient import scala.concurrent.{ExecutionContext, Future} import scala.util.Try case class LiveblogAuditor(wsClient: WSClient, config: AuditorApiConfig) extends Auditor { override def expiredTopics(topics: Set[Topic])(implicit ec: ExecutionContext): Future[Set[Topic]] = { val contentTopics = topics.filter(_.`type` == TopicTypes.Content) Future.traverse(contentTopics) { topic => isLiveBloggingNow(topic.name).map { case true => None case false => Some(topic) } }.map(_.flatten) } def isLiveBloggingNow(id: String)(implicit ec: ExecutionContext): Future[Boolean] = { val url = s"${config.url}/$id?show-fields=liveBloggingNow&api-key=${config.apiKey}" wsClient.url(url).get().map { response => val liveBloggingNowField = response.json \\ "response" \\ "content" \\ "fields" \\ "liveBloggingNow" liveBloggingNowField.validate[String].asOpt.flatMap(s => Try(s.toBoolean).toOption).getOrElse(false) } recover { case _ => true } } }
guardian/mobile-n10n
registration/app/registration/auditor/LiveBlogAuditor.scala
Scala
apache-2.0
1,092
package io.finch import java.util.UUID import cats.Eval import com.twitter.finagle.http.Method import com.twitter.util.{Base64StringEncoder, Future} import io.finch.internal.TooFastString import shapeless._ /** * A collection of [[Endpoint]] combinators. */ trait Endpoints { private[this] val hnilFutureOutput: Eval[Future[Output[HNil]]] = Eval.now(Future.value(Output.payload(HNil))) type Endpoint0 = Endpoint[HNil] type Endpoint2[A, B] = Endpoint[A :: B :: HNil] type Endpoint3[A, B, C] = Endpoint[A :: B :: C :: HNil] /** * An universal [[Endpoint]] that matches the given string. */ private[finch] class Matcher(s: String) extends Endpoint[HNil] { def apply(input: Input): Endpoint.Result[HNil] = input.headOption.flatMap { case `s` => Some((input.drop(1), hnilFutureOutput)) case _ => None } override def toString = s } implicit def stringToMatcher(s: String): Endpoint0 = new Matcher(s) implicit def intToMatcher(i: Int): Endpoint0 = new Matcher(i.toString) implicit def booleanToMatcher(b: Boolean): Endpoint0 = new Matcher(b.toString) /** * An universal extractor that extracts some value of type `A` if it's possible to fetch the value from the string. */ case class Extractor[A](name: String, f: String => Option[A]) extends Endpoint[A] { def apply(input: Input): Endpoint.Result[A] = for { ss <- input.headOption aa <- f(ss) } yield (input.drop(1), Eval.now(Future.value(Output.payload(aa)))) def apply(n: String): Endpoint[A] = copy[A](name = n) override def toString: String = s":$name" } private[finch] case class StringExtractor(name: String) extends Endpoint[String] { def apply(input: Input): Endpoint.Result[String] = input.headOption.map(s => (input.drop(1), Eval.now(Future.value(Output.payload(s))))) def apply(n: String): Endpoint[String] = copy(name = n) override def toString: String = s":$name" } /** * An extractor that extracts a value of type `Seq[A]` from the tail of the route. */ case class TailExtractor[A](name: String, f: String => Option[A]) extends Endpoint[Seq[A]] { def apply(input: Input): Endpoint.Result[Seq[A]] = Some((input.copy(path = Nil), Eval.now(Future.value(Output.payload(for { s <- input.path a <- f(s) } yield a))))) def apply(n: String): Endpoint[Seq[A]] = copy[A](name = n) override def toString: String = s":$name*" } private[this] def extractUUID(s: String): Option[UUID] = if (s.length != 36) None else try Some(UUID.fromString(s)) catch { case _: Exception => None } /** * An [[Endpoint]] that extract an integer value from the route. */ object int extends Extractor("int", _.tooInt) /** * An [[Endpoint]] that extract an integer tail from the route. */ object ints extends TailExtractor("int", _.tooInt) /** * An [[Endpoint]] that extract a long value from the route. */ object long extends Extractor("long", _.tooLong) /** * An [[Endpoint]] that extract a long tail from the route. */ object longs extends TailExtractor("long", _.tooLong) /** * An [[Endpoint]] that extract a string value from the route. */ object string extends StringExtractor("string") /** * An [[Endpoint]] that extract a string tail from the route. */ object strings extends TailExtractor("string", s => Some(s)) /** * An [[Endpoint]] that extract a boolean value from the route. */ object boolean extends Extractor("boolean", _.tooBoolean) /** * An [[Endpoint]] that extract a boolean tail from the route. */ object booleans extends TailExtractor("boolean", _.tooBoolean) /** * An [[Endpoint]] that extract an UUID value from the route. */ object uuid extends Extractor("uuid", extractUUID) /** * An [[Endpoint]] that extract an UUID tail from the route. */ object uuids extends TailExtractor("uuid", extractUUID) /** * An [[Endpoint]] that skips all path parts. */ object * extends Endpoint[HNil] { def apply(input: Input): Endpoint.Result[HNil] = Some((input.copy(path = Nil), hnilFutureOutput)) override def toString: String = "*" } /** * An identity [[Endpoint]]. */ object / extends Endpoint[HNil] { def apply(input: Input): Endpoint.Result[HNil] = Some((input, hnilFutureOutput)) override def toString: String = "" } private[this] def method[A](m: Method)(r: Endpoint[A]): Endpoint[A] = new Endpoint[A] { def apply(input: Input): Endpoint.Result[A] = if (input.request.method == m) r(input) else None override def toString: String = s"${m.toString().toUpperCase} /${r.toString}" } /** * A combinator that wraps the given [[Endpoint]] with additional check of the HTTP method. The resulting [[Endpoint]] * succeeds on the request only if its method is `GET` and the underlying router succeeds on it. */ def get[A]: Endpoint[A] => Endpoint[A] = method(Method.Get) /** * A combinator that wraps the given [[Endpoint]] with additional check of the HTTP method. The resulting [[Endpoint]] * succeeds on the request only if its method is `POST` and the underlying router succeeds on it. */ def post[A]: Endpoint[A] => Endpoint[A] = method(Method.Post) /** * A combinator that wraps the given [[Endpoint]] with additional check of the HTTP method. The resulting [[Endpoint]] * succeeds on the request only if its method is `PATCH` and the underlying router succeeds on it. */ def patch[A]: Endpoint[A] => Endpoint[A] = method(Method.Patch) /** * A combinator that wraps the given [[Endpoint]] with additional check of the HTTP method. The resulting [[Endpoint]] * succeeds on the request only if its method is `DELETE` and the underlying router succeeds on it. */ def delete[A]: Endpoint[A] => Endpoint[A] = method(Method.Delete) /** * A combinator that wraps the given [[Endpoint]] with additional check of the HTTP method. The resulting [[Endpoint]] * succeeds on the request only if its method is `HEAD` and the underlying router succeeds on it. */ def head[A]: Endpoint[A] => Endpoint[A] = method(Method.Head) /** * A combinator that wraps the given [[Endpoint]] with additional check of the HTTP method. The resulting [[Endpoint]] * succeeds on the request only if its method is `OPTIONS` and the underlying router succeeds on it. */ def options[A]: Endpoint[A] => Endpoint[A] = method(Method.Options) /** * A combinator that wraps the given [[Endpoint]] with additional check of the HTTP method. The resulting [[Endpoint]] * succeeds on the request only if its method is `PUT` and the underlying router succeeds on it. */ def put[A]: Endpoint[A] => Endpoint[A] = method(Method.Put) /** * A combinator that wraps the given [[Endpoint]] with additional check of the HTTP method. The resulting [[Endpoint]] * succeeds on the request only if its method is `CONNECT` and the underlying router succeeds on it. */ def connect[A]: Endpoint[A] => Endpoint[A] = method(Method.Connect) /** * A combinator that wraps the given [[Endpoint]] with additional check of the HTTP method. The resulting [[Endpoint]] * succeeds on the request only if its method is `TRACE` and the underlying router succeeds on it. */ def trace[A]: Endpoint[A] => Endpoint[A] = method(Method.Trace) /** * An [[Exception]] representing a failed authorization with [[BasicAuth]]. */ object BasicAuthFailed extends Exception { override def getMessage: String = "Wrong credentials" } /** * Maintains Basic HTTP Auth for an arbitrary [[Endpoint]]. */ case class BasicAuth(user: String, password: String) { private[this] val userInfo = s"$user:$password" private[this] val expected = "Basic " + Base64StringEncoder.encode(userInfo.getBytes) def apply[A](e: Endpoint[A]): Endpoint[A] = new Endpoint[A] { private[this] val failedOutput: Eval[Future[Output[A]]] = Eval.now(Future.value(Unauthorized(BasicAuthFailed))) def apply(input: Input): Endpoint.Result[A] = input.request.authorization.flatMap { case `expected` => e(input) case _ => Some((input.copy(path = Seq.empty), failedOutput)) } override def toString: String = s"BasicAuth($e)" } } }
BenWhitehead/finch
core/src/main/scala/io/finch/Endpoints.scala
Scala
apache-2.0
8,379
package org.broadinstitute.dsde.firecloud.page.methodrepo import org.broadinstitute.dsde.firecloud.FireCloudConfig import org.broadinstitute.dsde.firecloud.component._ import org.broadinstitute.dsde.firecloud.component.Component._ import org.broadinstitute.dsde.firecloud.page.methodcommon.SelectConfigurationView import org.broadinstitute.dsde.firecloud.page.workspaces.methodconfigs.WorkspaceMethodConfigDetailsPage import org.broadinstitute.dsde.firecloud.page.{BaseFireCloudPage, PageUtil} import org.openqa.selenium.WebDriver class MethodDetailPage(namespace: String, name: String)(implicit webDriver: WebDriver) extends BaseFireCloudPage with PageUtil[MethodDetailPage] { lazy override val url = s"${FireCloudConfig.FireCloud.baseUrl}#methods/$namespace/$name/${snapshotVersion}" private val exportButton = Button("export-to-workspace-button") private val redactButton = Button("redact-button") override def awaitReady(): Unit = { super.awaitReady() redactButton.awaitVisible() } def snapshotVersion: String = { val ver = CssSelectorQuery(s"[data-test-id=snapshot-dropdown] > span").element.underlying.getText ver } def startExport(): ExportModal = { exportButton.doClick() await ready new ExportModal(namespace, name) } def redact(): Unit = { redactButton.doClick() new OKCancelModal("confirm-redaction-modal").clickOk() // redact takes us back to the table: redactButton.awaitNotVisible() } } class ExportModal(methodNamespace: String, methodName: String)(implicit webDriver: WebDriver) extends Modal("export-config-to-workspace-modal") { val firstPage = new SelectConfigurationView(importing = false) def getPostExportModal: PostExportModal = await ready new PostExportModal(methodNamespace, methodName) } class PostExportModal(methodNamespace: String, methodName: String)(implicit webDriver: WebDriver) extends OKCancelModal("export-successful-modal") { def goToWorkspace(project: String, wsName: String): WorkspaceMethodConfigDetailsPage = { submit() await ready new WorkspaceMethodConfigDetailsPage(project, wsName, methodNamespace, methodName) } def stayHere(): Unit = { cancel() } }
broadinstitute/firecloud-ui
automation/src/test/scala/org/broadinstitute/dsde/firecloud/page/methodrepo/MethodDetailPage.scala
Scala
bsd-3-clause
2,199
package code package config import net.liftweb._ import common._ import http._ import json._ import mongodb._ import util.Props import com.mongodb.{DBAddress, MongoClient} object MongoConfig extends Factory with Loggable { // configure your MongoMetaRecords to use this. See lib/RogueMetaRecord.scala. val defaultId = new FactoryMaker[MongoIdentifier](DefaultMongoIdentifier) {} def init() { /** * First checks for existence of mongo.default.url. If not found, then * checks for mongo.default.host, port, and name. Uses defaults if those * are not found. */ val defaultDbAddress = Props.get("mongo.default.url") .map(url => new DBAddress(url)) .openOr(new DBAddress( Props.get("mongo.default.host", "127.0.0.1"), Props.getInt("mongo.default.port", 27017), Props.get("mongo.default.name", "riego") )) /* * If mongo.default.user, and pwd are defined, configure Mongo using authentication. */ (Props.get("mongo.default.user"), Props.get("mongo.default.pwd")) match { case (Full(user), Full(pwd)) => MongoDB.defineDbAuth( DefaultMongoIdentifier, new MongoClient(defaultDbAddress), defaultDbAddress.getDBName, user, pwd ) logger.info("MongoDB inited using authentication: %s".format(defaultDbAddress.toString)) case _ => MongoDB.defineDb( DefaultMongoIdentifier, new MongoClient(defaultDbAddress), defaultDbAddress.getDBName ) logger.info("MongoDB inited: %s".format(defaultDbAddress.toString)) } } }
jgenso/riego
src/main/scala/code/config/MongoConfig.scala
Scala
apache-2.0
1,647
package com.github.jeanadrien.gatling.mqtt.client import com.github.jeanadrien.gatling.mqtt.client.MqttQoS.MqttQoS import com.github.jeanadrien.gatling.mqtt.protocol.{MqttProtocolReconnectPart, MqttProtocolSocketPart, MqttProtocolThrottlingPart} import org.fusesource.mqtt.client.QoS._ /** * Concrete configuration after being generated by the Expression */ case class MqttClientConfiguration( host : String = "localhost", clientId : Option[String] = None, cleanSession : Boolean = true, username : Option[String] = None, password : Option[String] = None, will : Option[Will] = None, keepAlive : Int = 30, // seconds version : Option[String] = None, // default 3.1 reconnectConfig : MqttProtocolReconnectPart = MqttProtocolReconnectPart(), socketConfig : MqttProtocolSocketPart = MqttProtocolSocketPart(), throttlingConfig : MqttProtocolThrottlingPart = MqttProtocolThrottlingPart() ) case class Will( topic : String, message : String, // Array[Byte], qos : MqttQoS = MqttQoS.AtMostOnce, willRetain : Boolean = false // TODO : Check default value )
jeanadrien/gatling-mqtt-protocol
src/main/scala/com/github/jeanadrien/gatling/mqtt/client/MqttClientConfiguration.scala
Scala
apache-2.0
1,188
package chapter.five object ExerciseNine extends App { }
deekim/impatient-scala
src/main/scala/chapter/five/ExerciseNine.scala
Scala
apache-2.0
60
package fruits object Apple1 { def printMe(): Unit = println("Apple!") } class Apple2 { def printMe(): Unit = println("Pear") }
arcyfelix/Courses
18-10-18-Programming-in-Scala-by-Martin-Odersky-Lex-Spoon-and-Bill-Venners/37-Imports/src/fruits/Apple.scala
Scala
apache-2.0
131
/* * Copyright (C) 2020 MapRoulette contributors (see CONTRIBUTORS.md). * Licensed under the Apache License, Version 2.0 (see LICENSE). */ package org.maproulette.framework.model import org.joda.time.DateTime import org.maproulette.cache.CacheObject import org.maproulette.data.{ItemType} import org.maproulette.framework.psql.CommonField import org.maproulette.framework.model.Identifiable import play.api.libs.json.{Json, Reads, Writes} import play.api.libs.json.JodaWrites._ import play.api.libs.json.JodaReads._ /** * The project object is the root object of hierarchy, it is built to allow users to have personal * domains where they can create their own challenges and have a permissions model that allows * users to have and give control over what happens within that domain. * * @author cuthbertm */ case class Project( override val id: Long, owner: Long, override val name: String, created: DateTime = DateTime.now(), modified: DateTime = DateTime.now(), description: Option[String] = None, grants: List[Grant] = List.empty, enabled: Boolean = false, displayName: Option[String] = None, deleted: Boolean = false, isVirtual: Option[Boolean] = Some(false), featured: Boolean = false, isArchived: Boolean = false ) extends CacheObject[Long] with Identifiable { def grantsToType(granteeType: ItemType) = this.grants.filter(_.grantee.granteeType == granteeType) } object Project extends CommonField { implicit val grantWrites: Writes[Grant] = Grant.writes implicit val grantReads: Reads[Grant] = Grant.reads implicit val writes: Writes[Project] = Json.writes[Project] implicit val reads: Reads[Project] = Json.reads[Project] val TABLE = "projects" val KEY_GRANTS = "grants" val FIELD_OWNER = "owner_id" val FIELD_ENABLED = "enabled" val FIELD_DISPLAY_NAME = "display_name" val FIELD_DELETED = "deleted" val FIELD_VIRTUAL = "is_virtual" val FIELD_FEATURED = "featured" val FIELD_IS_ARCHIVED = "is_archived" def emptyProject: Project = Project(-1, User.DEFAULT_SUPER_USER_ID, "", DateTime.now(), DateTime.now()) }
mgcuthbert/maproulette2
app/org/maproulette/framework/model/Project.scala
Scala
apache-2.0
2,188
/* * The MIT License (MIT) * * Copyright (c) 2015 Cayde Dixon * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package net.cazzar.mods.jukeboxreloaded.network.message import io.netty.buffer.ByteBuf import net.cazzar.mods.jukeboxreloaded.JukeboxReloaded import net.cazzar.mods.jukeboxreloaded.Util._ import net.cazzar.mods.jukeboxreloaded.blocks.tileentity.TileJukebox import net.minecraft.util.BlockPos import net.minecraftforge.fml.common.network.simpleimpl.{IMessage, IMessageHandler, MessageContext} class ServerActionMessage(var action: Action.Value, var pos: BlockPos) extends IMessage { var currentRecord = 0 def this() = this(null, null) override def fromBytes(buf: ByteBuf): Unit = { pos = buf.readPos() action = Action(buf.readByte()) } override def toBytes(buf: ByteBuf): Unit = { buf.writePos(pos) buf.writeByte(action.id) } } object ServerActionMessage { class Handler extends IMessageHandler[ServerActionMessage, IMessage] { override def onMessage(message: ServerActionMessage, ctx: MessageContext): IMessage = { val tile = JukeboxReloaded.proxy.getWorld.get.getTile[TileJukebox](message.pos) message.action match { case Action.PLAY => tile.foreach(r => { r.record = message.currentRecord r.playRecord(fromServer = true) }) case Action.STOP => tile.foreach(_.stopPlayingRecord(serious = true)) case Action.NEXT => tile.foreach(_.nextRecord()) case Action.PREVIOUS => tile.foreach(_.prevRecord()) case Action.SHUFFLE_OFF => tile.foreach(_.shuffle = true) case Action.SHUFFLE_ON => tile.foreach(_.shuffle = false) case Action.REPEAT_ALL => tile.foreach(_.repeatMode = TileJukebox.RepeatMode.ALL) case Action.REPEAT_NONE => tile.foreach(_.repeatMode = TileJukebox.RepeatMode.NONE) case Action.REPEAT_ONE => tile.foreach(_.repeatMode = TileJukebox.RepeatMode.ONE) } null } } }
cazzar/JukeboxReloaded
src/main/scala/net/cazzar/mods/jukeboxreloaded/network/message/ServerActionMessage.scala
Scala
mit
2,994
/* * Copyright (c) 2017 Xavier Defago (Tokyo Institute of Technology) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ocelot.topology import ocelot.PID trait Topology { def processSet : Set[PID] def processes: Iterator[PID] def edges: Iterator[Edge] def isDirected: Boolean def isConnected: Boolean def isWeighted: Boolean def numberOfEdges: Int def numberOfVertices: Int def neighborsOf(process: PID): Option[Set[PID]] def contains(process: PID): Boolean def isEmpty = processSet.isEmpty def nonEmpty = processSet.nonEmpty def size: Int = numberOfVertices def union (that: Topology): Topology = Union(this, that) def diff (that: Topology): Topology = if (that.processSet.intersect(this.processSet).isEmpty) Topology.empty else ??? def intersect(that: Topology): Topology = ??? // if (that.processSet.intersect(this.processSet).isEmpty) this else ??? } object Topology { def empty: Topology = Empty case object Empty extends Topology { def processSet : Set[PID] = Set.empty def processes: Iterator[PID] = Iterator.empty def edges: Iterator[Edge] = Iterator.empty def isDirected: Boolean = false def isConnected: Boolean = true def isWeighted: Boolean = false def numberOfEdges: Int = 0 def numberOfVertices: Int = 0 def neighborsOf(process: PID): Option[Set[PID]] = None def contains(process: PID): Boolean = false override def union (that: Topology): Topology = that override def diff (that: Topology): Topology = this override def intersect(that: Topology): Topology = this } }
xdefago/ocelot
src/main/scala/ocelot/topology/Topology.scala
Scala
apache-2.0
2,155
/* * Copyright (C) 2014-2015 by Nokia. * See the LICENCE.txt file distributed with this work for additional * information regarding copyright ownership. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package wookie.spark.cli import org.apache.spark.SparkContext import org.apache.spark.rdd.RDD import org.apache.spark.sql.{SQLImplicits, SparkSession} import org.apache.spark.streaming.{StreamingContext, StreamingContextState} import org.junit.runner.RunWith import org.rogach.scallop.ScallopConf import org.specs2.mutable.Specification import org.specs2.runner.JUnitRunner import wookie.app.{CheckpointConf, DurationConf, NameConf} import scala.collection.mutable import scalaz.concurrent.Task @RunWith(classOf[JUnitRunner]) class SparkStreamingSpec extends Specification { "Should init Spark Streaming object" in { var localSc: SparkContext = null var localSQL: SparkSession = null var localStreaming: StreamingContext = null var appName: String = null var duration: Long = 0L val app = new SparkStreamingApp(new ScallopConf(_) with NameConf with DurationConf with CheckpointConf) { override def runStreaming(opt: ScallopConf with NameConf with DurationConf with CheckpointConf, spark: SparkSession, ssc: StreamingContext): Unit = { localSc = spark.sparkContext localSQL = spark localStreaming = ssc appName = opt.name() duration = opt.duration() val rdd = spark.sparkContext.parallelize(Seq(100, 200, 300)) val queue = new mutable.Queue[RDD[Int]]() queue.enqueue(rdd) ssc.queueStream(queue).print() } } System.setProperty("spark.master", "local") val sparkStop = Task.fork(Task.delay { val time1 = System.currentTimeMillis() while (localStreaming == null || localStreaming.getState() != StreamingContextState.ACTIVE) { Thread.sleep(300) } localStreaming.stop() }) sparkStop.unsafePerformAsync( out => ()) app.main(Array("--name", "xxx", "--duration", "1000")) localSc.stop() Option(localSc) must beSome Option(localSQL) must beSome Option(localStreaming) must beSome appName must_== "xxx" duration must_== 1000L } }
elyast/wookie
spark-api/src/test/scala/wookie/spark/cli/SparkStreamingSpec.scala
Scala
apache-2.0
2,735
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql import org.apache.commons.math3.stat.inference.ChiSquareTest import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.test.SharedSparkSession class ConfigBehaviorSuite extends QueryTest with SharedSparkSession { import testImplicits._ test("SPARK-22160 spark.sql.execution.rangeExchange.sampleSizePerPartition") { // In this test, we run a sort and compute the histogram for partition size post shuffle. // With a high sample count, the partition size should be more evenly distributed, and has a // low chi-sq test value. // Also the whole code path for range partitioning as implemented should be deterministic // (it uses the partition id as the seed), so this test shouldn't be flaky. val numPartitions = 4 def computeChiSquareTest(): Double = { val n = 10000 // Trigger a sort // Range has range partitioning in its output now. To have a range shuffle, we // need to run a repartition first. val data = spark.range(0, n, 1, 1).repartition(10).sort('id.desc) .selectExpr("SPARK_PARTITION_ID() pid", "id").as[(Int, Long)].collect() // Compute histogram for the number of records per partition post sort val dist = data.groupBy(_._1).map(_._2.length.toLong).toArray assert(dist.length == 4) new ChiSquareTest().chiSquare( Array.fill(numPartitions) { n.toDouble / numPartitions }, dist) } withSQLConf(SQLConf.SHUFFLE_PARTITIONS.key -> numPartitions.toString) { // The default chi-sq value should be low assert(computeChiSquareTest() < 100) withSQLConf(SQLConf.RANGE_EXCHANGE_SAMPLE_SIZE_PER_PARTITION.key -> "1") { // If we only sample one point, the range boundaries will be pretty bad and the // chi-sq value would be very high. assert(computeChiSquareTest() > 300) } } } }
pgandhi999/spark
sql/core/src/test/scala/org/apache/spark/sql/ConfigBehaviorSuite.scala
Scala
apache-2.0
2,709
/* * Copyright (C) 2014 - 2017 Contributors as noted in the AUTHORS.md file * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package com.wegtam.tensei.agent.parsers import java.nio.charset.Charset import java.time.{ OffsetDateTime, ZoneOffset } import javax.xml.xpath.{ XPath, XPathConstants, XPathFactory } import akka.actor.{ Actor, ActorLogging, ActorRef, Props } import akka.event.{ DiagnosticLoggingAdapter, Logging } import com.google.common.base.Charsets import com.wegtam.tensei.adt.{ ConnectionInformation, Cookbook, DFASDLReference } import com.wegtam.tensei.agent.DataTreeDocument.DataTreeDocumentMessages import com.wegtam.tensei.agent.adt._ import com.wegtam.tensei.agent.helpers.{ DatabaseHelpers, LoggingHelpers } import com.wegtam.tensei.agent.parsers.DatabaseParser.DatabaseParserCursorState import org.dfasdl.utils.{ AttributeNames, DataElementType, ElementNames, StructureElementType } import org.w3c.dom.{ Element, NodeList } import scala.collection.mutable import scalaz.Scalaz._ import scalaz._ object DatabaseParser { /** * Helper method to create an actor for database parsing. * * @param source The source connection to retrieve the data from. * @param cookbook The cookbook holding the source dfasdl. * @param dataTreeRef The actor ref to the data tree e.g. where to put the parsed data. * @param agentRunIdentifier An optional agent run identifier which is usually an uuid. * @return The props to generate the actor. */ def props(source: ConnectionInformation, cookbook: Cookbook, dataTreeRef: ActorRef, agentRunIdentifier: Option[String]): Props = Props(new DatabaseParser(source, cookbook, dataTreeRef, agentRunIdentifier)) /** * A sealed trait for the different states of a cursor. */ sealed trait DatabaseParserCursorState object DatabaseParserCursorState { final case class Active(cursor: java.sql.ResultSet) extends DatabaseParserCursorState case object Done extends DatabaseParserCursorState case object Uninitialized extends DatabaseParserCursorState } } /** * A simple database parser. * * @param source The source connection to retrieve the data from. * @param cookbook The cookbook holding the source dfasdl. * @param dataTreeRef The actor ref to the data tree e.g. where to put the parsed data. * @param agentRunIdentifier An optional agent run identifier which is usually an uuid. */ class DatabaseParser(source: ConnectionInformation, cookbook: Cookbook, dataTreeRef: ActorRef, agentRunIdentifier: Option[String]) extends Actor with ActorLogging with BaseParser with DatabaseHelpers { override val log : DiagnosticLoggingAdapter = Logging(this) // Override the standard logger to be able to add stuff via MDC. log.mdc(LoggingHelpers.generateMdcEntryForRunIdentifier(agentRunIdentifier)) @throws[Exception](classOf[Exception]) override def postStop(): Unit = { super.postStop() connection match { case -\\/(failure) => log.error(failure, "Could not close database connection!") case \\/-(success) => success.close() } log.clearMDC() } // Try to establish a database connection. val connection = connect(source) val statement: Option[java.sql.Statement] = connection match { case -\\/(failure) => log.error(failure, "Could not create statement from db connection!") None case \\/-(success) => val stm = success.createStatement() log.debug("Created statement from db connection: {}", stm) Option(stm) } // Holds the database cursors for the sequences (e.g. the tables). val cursors: mutable.Map[String, DatabaseParserCursorState] = initializeCursorMap(cookbook, source.dfasdlRef.get) var defaultEncoding: Charset = Charsets.UTF_8 override def receive: Receive = { case BaseParserMessages.SubParserInitialize => sender() ! BaseParserMessages.SubParserInitialized case BaseParserMessages.Start => log.debug("Starting database parser.") parseDb() sender() ! ParserStatusMessage(ParserStatus.COMPLETED, Option(context.self)) case BaseParserMessages.Stop => log.debug("Stopping database parser.") context stop self case BaseParserMessages.Status => log.error("Status request not yet implemented!") } /** * If we reach a sequence and there exists no database cursor then we create one using a generated query * and store it in the cursors map. * * @param e The element discovered. */ override def parserStructuralElementHandler(e: Element): Unit = if (StructureElementType.isSequence(getStructureElementType(e.getNodeName))) { val id = e.getAttribute("id") if (cursors .get(id) .isDefined && cursors(id) == DatabaseParserCursorState.Uninitialized && statement.isDefined) { generateSqlSelect(e) match { case Success(query) => log.debug("Generated SQL for sequence {}: {}", id, query) val results = statement.get.executeQuery(query) if (results.next()) cursors.update(id, DatabaseParserCursorState.Active(results)) else cursors.update(id, DatabaseParserCursorState.Done) // The query returned no results, therefore we are done with it. case Failure(errors) => log.error("SQL query could not be created: {}", errors.toList.mkString) } } } /** * We try to move the database cursor to next row. If the cursors returns `false` there is no more data * and we remove it from the map. * * @param s The element that describes the sequence. */ override def parserFinishSequenceRowHandler(s: Element): Unit = { val id = s.getAttribute("id") if (cursors.get(id).isDefined) { log.debug("Checking cursor for sequence {}.", id) cursors(id) match { case DatabaseParserCursorState.Active(cursor) => if (!cursor.next()) { log.debug("No more data from cursor after {} rows.", state.getSequenceRowCount(id)) cursors.update(id, DatabaseParserCursorState.Done) log.info("Table '{}' has '{}' rows.", id, state.getSequenceRowCount(id)) // FIXME Logging for internal information (Protokollierung) val _ = state.resetSequenceRowCount(id) } case DatabaseParserCursorState.Done => log.debug("Cursor already done.") case DatabaseParserCursorState.Uninitialized => log.warning("Cursor for sequence {} is uninitialized! This should never happen!", id) } } } override def save(data: ParserDataContainer, dataHash: Long, referenceId: Option[String] = None): Unit = { val sourceSequenceRow = if (state.isInSequence) Option(state.getCurrentSequenceRowCount) else None // Is the sequence `Done`? -> We do not have to save anymore! val sequenceDone: Boolean = if (state.isInSequence) cursors(state.getCurrentSequence.asInstanceOf[Element].getAttribute("id")) match { case DatabaseParserCursorState.Done => true case _ => false } else false if (!sequenceDone) { if (referenceId.isDefined) { dataTreeRef ! DataTreeDocumentMessages.SaveReferenceData(data, dataHash, referenceId.get, sourceSequenceRow) } else dataTreeRef ! DataTreeDocumentMessages.SaveData(data, dataHash) } } override def readDataElement(structureElement: Element, useOffset: Long, isInChoice: Boolean): BaseParserResponse = { val parentSequence = getParentSequence(structureElement) val results: Option[java.sql.ResultSet] = if (parentSequence.isDefined && cursors.get(parentSequence.get.getAttribute("id")).isDefined) cursors(parentSequence.get.getAttribute("id")) match { case DatabaseParserCursorState.Active(cursor) => Option(cursor) case DatabaseParserCursorState.Done => log.debug("Database cursor already done for {} in {}.", structureElement.getAttribute("id"), parentSequence.get.getAttribute("id")) None case DatabaseParserCursorState.Uninitialized => log.warning("Database cursor uninitialized for {} in {}!", structureElement.getAttribute("id"), parentSequence.get.getAttribute("id")) None } else None val response = if (results.isDefined) { // Get encoding or set it to default. val encoding = if (structureElement.hasAttribute(AttributeNames.ENCODING)) { Charset.forName(structureElement.getAttribute(AttributeNames.ENCODING)) } else defaultEncoding val format: String = if (structureElement.hasAttribute(AttributeNames.FORMAT)) structureElement.getAttribute(AttributeNames.FORMAT) else "" val columnName = if (structureElement.hasAttribute(AttributeNames.DB_COLUMN_NAME)) structureElement.getAttribute(AttributeNames.DB_COLUMN_NAME) else structureElement.getAttribute("id") /** * If a formatted number has the attributes max-digits and max-precision * set to a value other than "0" then the JDBC driver can use a * `getBigDecimal` operation to receive the value. Otherwise the `getDouble` * method is used. * * @return Returns only `true` if the `getBigDecimal` method can be used to retrieve the element from JDBC. */ def canUseFormatNumAsDecimalExtractor: Boolean = { val digits = structureElement.getAttribute(AttributeNames.MAX_DIGITS) val precision = structureElement.getAttribute(AttributeNames.MAX_PRECISION) digits != null && digits != "0" && precision != null && precision != "0" } val rawStringRepresentation: String = structureElement.getTagName match { case ElementNames.NUMBER => val c = results.get.getLong(columnName).toString // Extra check because "0" is returned if the column is 0 but also if it is NULL. :-( if (c == "0" && results.get.getString(columnName) == null) null else c case ElementNames.FORMATTED_NUMBER => \\/.fromTryCatch { if (canUseFormatNumAsDecimalExtractor) { val c = results.get.getBigDecimal(columnName) if (c != null) c.toPlainString else null } else { val c = results.get.getDouble(columnName).toString // Extra check because "0" is returned if the column is 0 but also if it is NULL. :-( if (c == "0" && results.get.getString(columnName) == null) null else c } } match { case -\\/(failure) => results.get.getString(columnName) // Fall back to string parsing because of errors which might result from fancy things like parsing a varchar column with a formatnum element. case \\/-(success) => success } case ElementNames.DATETIME => \\/.fromTryCatch { OffsetDateTime .of(results.get.getTimestamp(columnName).toLocalDateTime, ZoneOffset.UTC) .toString } match { case -\\/(failure) => results.get.getString(columnName) // Fall back to string parsing because of errors which might result from fancy things like parsing a varchar column with a formatnum element. case \\/-(success) => success } case _ => results.get.getString(columnName) } val bytes = if (rawStringRepresentation == null) null else rawStringRepresentation.getBytes(encoding) val data = if (bytes == null) None else { if (format.isEmpty) Option(new String(bytes, encoding)) else { val response: Option[String] = getDataElementType(structureElement.getTagName) match { case DataElementType.BinaryDataElement => Option(new String(bytes, encoding)) case DataElementType.StringDataElement => structureElement.getTagName match { case ElementNames.FORMATTED_STRING | ElementNames.FORMATTED_NUMBER => val tmpString = new String(bytes, encoding) val pattern = s"(?s)$format".r val m = pattern.findFirstMatchIn(tmpString) if (m.isDefined) Option(m.get.group(1)) else { log.warning("Could not apply format of element {}!", structureElement.getAttribute("id")) log.debug("Element input was: {}", rawStringRepresentation) None } case _ => Option(new String(bytes, encoding)) } case DataElementType.UnknownElement => throw new RuntimeException( s"Unknown element ${structureElement.getTagName} (${structureElement.getAttribute("id")})!" ) } response } } log.debug("Parsed element {} with data: >{}<.", structureElement.getAttribute("id"), data) BaseParserResponse(data, DataElementType.StringDataElement) } else { val doneCursors = cursors.count(_._2 == DatabaseParserCursorState.Done) if (doneCursors == cursors.size) BaseParserResponse(data = None, elementType = DataElementType.StringDataElement, status = BaseParserResponseStatus.END_OF_DATA) else BaseParserResponse(data = None, elementType = DataElementType.StringDataElement, status = BaseParserResponseStatus.END_OF_SEQUENCE) } response } private def parseDb(): Unit = if (source.dfasdlRef.isDefined && cookbook.findDFASDL(source.dfasdlRef.get).isDefined) { val xml = createNormalizedDocument(cookbook.findDFASDL(source.dfasdlRef.get).get.content) val root = xml.getDocumentElement if (root.hasAttribute(AttributeNames.DEFAULT_ENCODING)) defaultEncoding = Charset.forName(root.getAttribute(AttributeNames.DEFAULT_ENCODING)) else defaultEncoding = Charsets.UTF_8 traverseTree(xml, log) } else log.error("No DFASDL defined for {} in cookbook {}", source.uri, cookbook.id) /** * Generates the SQL SELECT statement for the given `seq` or `fixseq` element. * * @param e A DFASDL element which must be a `seq` or a `fixseq`. * @return A validation holding the SQL string or an error message. */ private def generateSqlSelect(e: Element): ValidationNel[String, String] = if (isStructuralElement(e.getNodeName) && StructureElementType.isSequence( getStructureElementType(e.getNodeName) )) { if (e.hasAttribute(AttributeNames.DB_SELECT) && e .getAttribute(AttributeNames.DB_SELECT) .length > 0) e.getAttribute(AttributeNames.DB_SELECT).successNel[String] else { // FIXME What do we do if we have stacked sequences?!? (see #T-126) val dataElements = getChildDataElementsFromElement(e) val sql = s"SELECT ${dataElements .map( e => if (e.hasAttribute(AttributeNames.DB_COLUMN_NAME)) e.getAttribute(AttributeNames.DB_COLUMN_NAME) else e.getAttribute("id") ) .mkString(",")} FROM ${e.getAttribute("id")}" val sqlWhere = if (e.hasAttribute(AttributeNames.FILTER)) s"$sql WHERE ${e.getAttribute(AttributeNames.FILTER)}" else sql if (getStructureElementType(e.getNodeName) == StructureElementType.FixedSequence) s"$sqlWhere LIMIT ${e.getAttribute(AttributeNames.FIXED_SEQUENCE_COUNT)}}" .successNel[String] else if (e.hasAttribute(AttributeNames.SEQUENCE_MAX)) s"$sqlWhere LIMIT ${e.getAttribute(AttributeNames.SEQUENCE_MAX)}".successNel[String] else sqlWhere.successNel[String] } } else { s"Element ${e.getNodeName} is no sequence!".failNel[String] } /** * Find all sequences within the given DFASDL and prepare a "cursor map" for them. * * @param c A cookbook. * @param r A reference to a DFASDL. * @return A Map that has a key entry for each sequence. */ private def initializeCursorMap( c: Cookbook, r: DFASDLReference ): mutable.Map[String, DatabaseParserCursorState] = { val whitelist = getSourceParentSequences(c, r) c.findDFASDL(r) .map { dfasdl => val cursors = new mutable.HashMap[String, DatabaseParserCursorState]() val xpath: XPath = XPathFactory.newInstance().newXPath() val xml = createNormalizedDocument(dfasdl.content, useSchema = false) // Disable the schema to avoid confusing xpath. val tables = xpath .evaluate( s"/${ElementNames.ROOT}/${ElementNames.SEQUENCE} | /${ElementNames.ROOT}/${ElementNames.FIXED_SEQUENCE}", xml.getDocumentElement, XPathConstants.NODESET ) .asInstanceOf[NodeList] if (tables.getLength == 0) log.warning("No table definitions found in DFASDL {}!", dfasdl.id) else { log.debug("Found {} table definitions in DFASDL {}.", tables.getLength, dfasdl.id) for (count <- 0 until tables.getLength) { val table = tables.item(count).asInstanceOf[Element] val id = table.getAttribute("id") if (whitelist.contains(id)) cursors.update(table.getAttribute("id"), DatabaseParserCursorState.Uninitialized) else { log.debug("Marking unused database table {} as done.", id) cursors.update(table.getAttribute("id"), DatabaseParserCursorState.Done) } } } cursors } .getOrElse(mutable.Map.empty[String, DatabaseParserCursorState]) } }
Tensei-Data/tensei-agent
src/main/scala/com/wegtam/tensei/agent/parsers/DatabaseParser.scala
Scala
agpl-3.0
19,742
package shardakka.keyvalue import akka.actor.{ ActorRef, Props } import shardakka.ShardakkaExtension object SimpleKeyValueRoot { def props(name: String): Props = Props(classOf[SimpleKeyValueRoot], name) } final class SimpleKeyValueRoot(name: String) extends Root { override def persistenceId = ShardakkaExtension.KVPersistencePrefix + "_" + name + "_root" protected override def handleCustom: Receive = { case cmd @ ValueCommands.Upsert(key, _) ⇒ create[ValueCommands.Ack](key, cmd) case cmd @ ValueCommands.Delete(key) ⇒ delete[ValueCommands.Ack](key, cmd) case query: ValueQuery ⇒ valueActorOf(query.key) forward query } protected override def valueActorOf(key: String): ActorRef = { context.child(key).getOrElse(context.actorOf(ValueActor.props(name), key)) } }
dsaved/africhat-platform-0.1
actor-server/shardakka/src/main/scala/shardakka/keyvalue/SimpleKeyValueRoot.scala
Scala
mit
826
package metaconfig import scala.language.higherKinds import scala.collection.compat._ import scala.reflect.ClassTag import metaconfig.Configured._ import metaconfig.Extractors.Number import metaconfig.generic.Settings import metaconfig.internal.CanBuildFromDecoder import metaconfig.internal.NoTyposDecoder import java.nio.file.Path import java.nio.file.Paths trait ConfDecoder[A] { self => def read(conf: Conf): Configured[A] final def read(conf: Configured[Conf]): Configured[A] = conf.andThen(self.read) final def map[B](f: A => B): ConfDecoder[B] = self.flatMap(x => Ok(f(x))) final def orElse(other: ConfDecoder[A]): ConfDecoder[A] = ConfDecoder.orElse(this, other) final def flatMap[TT](f: A => Configured[TT]): ConfDecoder[TT] = new ConfDecoder[TT] { override def read(any: Conf): Configured[TT] = self.read(any) match { case Ok(x) => f(x) case NotOk(x) => Configured.NotOk(x) } } /** * Fail this decoder on unknown fields. * * By default, a decoder ignores unknown fields. With .noTypos, the decoder * will fail if an object contains unknown fields, which typically hint the * user entered a typo in the config file. */ final def noTypos(implicit ev: Settings[A]): ConfDecoder[A] = NoTyposDecoder[A](self) } object ConfDecoder { @deprecated("Use ConfDecoder[T].read instead", "0.6.1") def decode[T](conf: Conf)(implicit ev: ConfDecoder[T]): Configured[T] = ev.read(conf) def apply[T](implicit ev: ConfDecoder[T]): ConfDecoder[T] = ev // TODO(olafur) remove in favor of instanceExpect. def instance[T]( f: PartialFunction[Conf, Configured[T]] )(implicit ev: ClassTag[T]): ConfDecoder[T] = instanceExpect(ev.runtimeClass.getName)(f) def instanceF[T]( f: Conf => Configured[T] )(implicit ev: ClassTag[T]): ConfDecoder[T] = instance[T] { case x => f(x) } def instanceExpect[T](expect: String)( f: PartialFunction[Conf, Configured[T]] )(implicit ev: ClassTag[T]): ConfDecoder[T] = new ConfDecoder[T] { override def read(any: Conf): Configured[T] = f.applyOrElse( any, (x: Conf) => { NotOk(ConfError.typeMismatch(expect, x)) } ) } def constant[T](value: T): ConfDecoder[T] = new ConfDecoder[T] { override def read(conf: Conf): Configured[T] = Configured.ok(value) } implicit val confDecoder: ConfDecoder[Conf] = new ConfDecoder[Conf] { override def read(conf: Conf): Configured[Conf] = Configured.Ok(conf) } implicit val intConfDecoder: ConfDecoder[Int] = instanceExpect[Int]("Number") { case Conf.Num(x) => Ok(x.toInt) case Conf.Str(Number(n)) => Ok(n.toInt) } implicit val bigDecimalConfDecoder: ConfDecoder[BigDecimal] = instanceExpect[BigDecimal]("Number") { case Conf.Num(x) => Ok(x) } implicit val stringConfDecoder: ConfDecoder[String] = instanceExpect[String]("String") { case Conf.Str(x) => Ok(x) } implicit val unitConfDecoder: ConfDecoder[Unit] = instanceExpect[Unit]("Unit") { case _ => Ok(()) } implicit val booleanConfDecoder: ConfDecoder[Boolean] = instanceExpect[Boolean]("Bool") { case Conf.Bool(x) => Ok(x) case Conf.Str("true" | "on" | "yes") => Ok(true) case Conf.Str("false" | "off" | "no") => Ok(false) } implicit lazy val pathConfDecoder: ConfDecoder[Path] = stringConfDecoder.flatMap { path => Configured.fromExceptionThrowing(Paths.get(path)) } implicit def canBuildFromOption[A]( implicit ev: ConfDecoder[A], classTag: ClassTag[A] ): ConfDecoder[Option[A]] = new ConfDecoder[Option[A]] { override def read(conf: Conf): Configured[Option[A]] = conf match { case Conf.Null() => Configured.ok(None) case _ => ev.read(conf).map(Some(_)) } } implicit def canBuildFromMapWithStringKey[A]( implicit ev: ConfDecoder[A], classTag: ClassTag[A] ): ConfDecoder[Map[String, A]] = CanBuildFromDecoder.map[A] implicit def canBuildFromConfDecoder[C[_], A]( implicit ev: ConfDecoder[A], factory: Factory[A, C[A]], classTag: ClassTag[A] ): ConfDecoder[C[A]] = CanBuildFromDecoder.list[C, A] def orElse[A](a: ConfDecoder[A], b: ConfDecoder[A]): ConfDecoder[A] = new ConfDecoder[A] { override def read(conf: Conf): Configured[A] = a.read(conf) match { case ok @ Configured.Ok(_) => ok case Configured.NotOk(notOk) => b.read(conf) match { case ok2 @ Configured.Ok(_) => ok2 case Configured.NotOk(notOk2) => notOk.combine(notOk2).notOk } } } }
olafurpg/metaconfig
metaconfig-core/shared/src/main/scala/metaconfig/ConfDecoder.scala
Scala
apache-2.0
4,712
/** * An example MapReduce application using Scala and HBase. * * * The MIT License (MIT) * * Copyright (c) 2014 Jeremy Fisher * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. * * @author Jeremy Fisher <[email protected]> */ package com.rentawebgeek.hbmr import java.util.StringTokenizer import org.apache.hadoop.conf.Configuration import org.apache.hadoop.hbase._ import org.apache.hadoop.hbase.client._ import org.apache.hadoop.hbase.io._ import org.apache.hadoop.hbase.mapreduce._ import org.apache.hadoop.io._ import org.apache.hadoop.mapreduce._ import scala.collection.JavaConversions._ class WordCountMapper extends TableMapper[Text, LongWritable] { type Contxt = Mapper[ImmutableBytesWritable, Result, Text, LongWritable]#Context val word = new Text val one = new LongWritable(1) override def map(key: ImmutableBytesWritable, value: Result, context: Contxt) { val cell = value.getColumnLatestCell(Families.content, Qualifiers.text) val text = new String(CellUtil.cloneValue(cell)).toLowerCase val tokenizer = new StringTokenizer(text) while (tokenizer.hasMoreTokens) { // converting to scala Enumerable gives us foreach(Object => Unit), not foreach(String => Unit) val term = tokenizer.nextToken if (term.matches("[a-zA-Z0-9]+")) { word.set(term) context.write(word, one) } } } }
rawg/scala-hbase-wordcount
src/main/scala/WordCountMapper.scala
Scala
mit
2,433
package org.embulk.parser.twitter_ads_stats import spray.json.{pimpAny, JsObject, JsString} class SegmentJsonSpec extends UnitSpec { "json write" in { import SegmentJson._ val v: Segment = Map("segment_name" -> "hoge", "segment_value" -> "fuga") val actual = v.toJson val expected = JsObject( "segment_name" -> JsString("hoge"), "segment_value" -> JsString("fuga") ) assert(actual == expected) } }
septeni-original/embulk-parser-twitter_ads_stats
src/test/scala/org/embulk/parser/twitter_ads_stats/SegmentJsonSpec.scala
Scala
mit
444
package api import javax.inject.Inject import com.payu.shorturl.model.Error import com.payu.shorturl.util.Logging import play.api.i18n._ import play.api.libs.json._ import play.api.mvc._ import scala.concurrent.{ExecutionContext, Future} class Api extends Controller with JsonSupport with I18nSupport with ImplicityHelpers with Logging { @Inject var messagesApi: MessagesApi = _ protected val Action = play.api.mvc.Action } trait ImplicityHelpers { @Inject implicit protected var execution: ExecutionContext = _ } trait JsonSupport extends ImplicityHelpers { self: Controller with I18nSupport => implicit class ResultAsJson(status: Status)(implicit requestHeader: RequestHeader) { def asJson[T: Writes](o: T): Result = o match { case true => Ok case false | None => NotFound case () => status case _ => status(Json.toJson(o)) } def asJson[T: Writes](f: Future[T]): Future[Result] = { f.map(r => asJson(r)) } } object json { private val InvalidCode = "input.invalid" def apply[T: Reads]: BodyParser[T] = { BodyParser("json input") { implicit request => parse.json(request).mapFuture { case Left(simpleResult) => Future.successful(Left(simpleResult)) case Right(jsValue) => jsValue.validate match { case JsSuccess(v, _) => Future.successful(Right(v)) case e: JsError => val errors = JsError.toFlatForm(e).flatMap { case (code, error) => error.map(e => Error(code, e.message)) } ErrorRequest[T](errors.toSet) } } } } private def ErrorRequest[T](errors: Set[Error]): Future[Either[Result, T]] = { val error = Error(InvalidCode, Messages(InvalidCode), Some(errors)) val result = BadRequest(Json.toJson(error)) Future.successful(Left(result)) } } }
felipehaack/shorturl
payu-api/app/api/Api.scala
Scala
gpl-3.0
1,939
package com.featurefm.riversong.tracing import akka.http.scaladsl.model.HttpHeader case class ContextPropagation( headers: Seq[HttpHeader] = Seq() ) object ContextPropagation{ val dtHeadersSet = Set( "x-request-id", "x-b3-traceid", "x-b3-spanid", "x-b3-parentspanid", "x-b3-sampled", "x-b3-flags", "x-ot-span-context", "x-cloud-trace-context", "traceparent", "grpc-trace-bin" ) }
ListnPlay/RiverSong
src/main/scala/com/featurefm/riversong/tracing/ContextPropagation.scala
Scala
mit
393
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ly.stealth.mesos.kafka import org.apache.mesos.{ExecutorDriver, MesosExecutorDriver} import org.apache.mesos.Protos._ import java.io._ import java.util import scala.collection.immutable.HashMap import org.apache.log4j._ object Executor extends org.apache.mesos.Executor { val logger: Logger = Logger.getLogger(Executor.getClass) @volatile var server: KafkaServer = null def registered(driver: ExecutorDriver, executor: ExecutorInfo, framework: FrameworkInfo, slave: SlaveInfo): Unit = { logger.info("[registered] framework:" + MesosStr.framework(framework) + " slave:" + MesosStr.slave(slave)) } def reregistered(driver: ExecutorDriver, slave: SlaveInfo): Unit = { logger.info("[reregistered] " + MesosStr.slave(slave)) } def disconnected(driver: ExecutorDriver): Unit = { logger.info("[disconnected]") } def launchTask(driver: ExecutorDriver, task: TaskInfo): Unit = { logger.info("[launchTask] " + MesosStr.task(task)) new Thread { override def run() { setName("KafkaServer") runKafkaServer(driver, task) } }.start() } def killTask(driver: ExecutorDriver, id: TaskID): Unit = { logger.info("[killTask] " + id.getValue) if (server != null) server.stop() } def frameworkMessage(driver: ExecutorDriver, data: Array[Byte]): Unit = { logger.info("[frameworkMessage] " + new String(data)) } def shutdown(driver: ExecutorDriver): Unit = { logger.info("[shutdown]") if (server != null) server.stop() } def error(driver: ExecutorDriver, message: String): Unit = { logger.info("[error] " + message) } private def runKafkaServer(driver: ExecutorDriver, task: TaskInfo): Unit = { try { server = new KafkaServer(task.getTaskId.getValue, props(task)) server.start() var status = TaskStatus.newBuilder.setTaskId(task.getTaskId).setState(TaskState.TASK_RUNNING).build driver.sendStatusUpdate(status) server.waitFor() status = TaskStatus.newBuilder.setTaskId(task.getTaskId).setState(TaskState.TASK_FINISHED).build driver.sendStatusUpdate(status) } catch { case t: Throwable => logger.warn("", t) sendTaskFailed(driver, task, t) } finally { if (server != null) { server.stop() server = null } } } private def sendTaskFailed(driver: ExecutorDriver, task: TaskInfo, t: Throwable) { val stackTrace = new StringWriter() t.printStackTrace(new PrintWriter(stackTrace, true)) driver.sendStatusUpdate(TaskStatus.newBuilder .setTaskId(task.getTaskId).setState(TaskState.TASK_FAILED) .setMessage("" + stackTrace) .build ) } private def props(taskInfo: TaskInfo): Map[String, String] = { val buffer = new StringReader(taskInfo.getData.toStringUtf8) val p: util.Properties = new util.Properties() p.load(buffer) import scala.collection.JavaConversions._ var props = new HashMap[String, String]() for (k <- p.keySet()) props += ("" + k -> p.getProperty("" + k)) props } def main(args: Array[String]) { configureLogging() val driver = new MesosExecutorDriver(Executor) val status = if (driver.run eq Status.DRIVER_STOPPED) 0 else 1 System.exit(status) } private def configureLogging() { System.setProperty("log4j.ignoreTCL", "true") // fix log4j class loading issue BasicConfigurator.resetConfiguration() val root = Logger.getRootLogger root.setLevel(Level.INFO) val logger = Logger.getLogger(Executor.getClass.getPackage.getName) logger.setLevel(if (System.getProperty("debug") != null) Level.DEBUG else Level.INFO) val layout = new PatternLayout("%d [%t] %-5p %c %x - %m%n") root.addAppender(new ConsoleAppender(layout)) } }
asteris-llc/kafka
src/scala/ly/stealth/mesos/kafka/Executor.scala
Scala
apache-2.0
4,595
package uk.gov.gds.ier.transaction.crown.previousAddress import uk.gov.gds.ier.model.MovedHouseOption import uk.gov.gds.ier.transaction.crown.InprogressCrown import uk.gov.gds.ier.step.StepTemplate trait PreviousAddressFirstMustache extends StepTemplate[InprogressCrown] { val title = "Have you changed your UK address in the last 12 months?" case class PreviousAddressFirstModel( question: Question, previousYes: Field, previousNo: Field ) extends MustacheData val mustache = MustacheTemplate("crown/previousAddressFirst") { (form, post) => implicit val progressForm = form PreviousAddressFirstModel( question = Question( postUrl = post.url, title = title, errorMessages = form.globalErrors.map { _.message }), previousYes = RadioField( key = keys.previousAddress.movedRecently, value = MovedHouseOption.Yes.name), previousNo = RadioField( key = keys.previousAddress.movedRecently, value = MovedHouseOption.NotMoved.name) ) } }
michaeldfallen/ier-frontend
app/uk/gov/gds/ier/transaction/crown/previousAddress/PreviousAddressFirstMustache.scala
Scala
mit
1,045
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.graphx.util import scala.annotation.tailrec import scala.collection.mutable import scala.reflect.ClassTag import scala.util._ import org.apache.spark._ import org.apache.spark.graphx._ import org.apache.spark.internal.Logging import org.apache.spark.rdd.RDD /** A collection of graph generating functions. */ object GraphGenerators extends Logging { val RMATa = 0.45 val RMATb = 0.15 val RMATd = 0.25 /** * Generate a graph whose vertex out degree distribution is log normal. * * The default values for mu and sigma are taken from the Pregel paper: * * Grzegorz Malewicz, Matthew H. Austern, Aart J.C Bik, James C. Dehnert, * Ilan Horn, Naty Leiser, and Grzegorz Czajkowski. 2010. * Pregel: a system for large-scale graph processing. SIGMOD '10. * * If the seed is -1 (default), a random seed is chosen. Otherwise, use * the user-specified seed. * * @param sc Spark Context * @param numVertices number of vertices in generated graph * @param numEParts (optional) number of partitions * @param mu (optional, default: 4.0) mean of out-degree distribution * @param sigma (optional, default: 1.3) standard deviation of out-degree distribution * @param seed (optional, default: -1) seed for RNGs, -1 causes a random seed to be chosen * @return Graph object */ def logNormalGraph( sc: SparkContext, numVertices: Int, numEParts: Int = 0, mu: Double = 4.0, sigma: Double = 1.3, seed: Long = -1): Graph[Long, Int] = { val evalNumEParts = if (numEParts == 0) sc.defaultParallelism else numEParts // Enable deterministic seeding val seedRand = if (seed == -1) new Random() else new Random(seed) val seed1 = seedRand.nextInt() val seed2 = seedRand.nextInt() val vertices: RDD[(VertexId, Long)] = sc.parallelize(0 until numVertices, evalNumEParts).map { src => (src, sampleLogNormal(mu, sigma, numVertices, seed = (seed1 ^ src))) } val edges = vertices.flatMap { case (src, degree) => generateRandomEdges(src.toInt, degree.toInt, numVertices, seed = (seed2 ^ src)) } Graph(vertices, edges, 0) } // Right now it just generates a bunch of edges where // the edge data is the weight (default 1) val RMATc = 0.15 def generateRandomEdges( src: Int, numEdges: Int, maxVertexId: Int, seed: Long = -1): Array[Edge[Int]] = { val rand = if (seed == -1) new Random() else new Random(seed) Array.fill(numEdges) { Edge[Int](src, rand.nextInt(maxVertexId), 1) } } /** * Randomly samples from the given mean and standard deviation of the normal distribution. * It uses the formula `X = exp(mu+sigma*Z)` where `mu`, * `sigma` are the mean, standard deviation of the normal distribution and * `Z ~ N(0, 1)`. * * @param mu the mean of the normal distribution * @param sigma the standard deviation of the normal distribution * @param maxVal exclusive upper bound on the value of the sample * @param seed optional seed */ private[spark] def sampleLogNormal( mu: Double, sigma: Double, maxVal: Int, seed: Long = -1): Int = { val rand = if (seed == -1) new Random() else new Random(seed) // Z ~ N(0, 1) var X: Double = maxVal while (X >= maxVal) { val Z = rand.nextGaussian() X = math.exp(mu + sigma*Z) } math.floor(X).toInt } /** * A random graph generator using the R-MAT model, proposed in * "R-MAT: A Recursive Model for Graph Mining" by Chakrabarti et al. * * See http://www.cs.cmu.edu/~christos/PUBLICATIONS/siam04.pdf. */ def rmatGraph(sc: SparkContext, requestedNumVertices: Int, numEdges: Int): Graph[Int, Int] = { // let N = requestedNumVertices // the number of vertices is 2^n where n=ceil(log2[N]) // This ensures that the 4 quadrants are the same size at all recursion levels val numVertices = math.round( math.pow(2.0, math.ceil(math.log(requestedNumVertices) / math.log(2.0)))).toInt val numEdgesUpperBound = math.pow(2.0, 2 * ((math.log(numVertices) / math.log(2.0)) - 1)).toInt if (numEdgesUpperBound < numEdges) { throw new IllegalArgumentException( s"numEdges must be <= $numEdgesUpperBound but was $numEdges") } val edges = mutable.Set.empty[Edge[Int]] while (edges.size < numEdges) { if (edges.size % 100 == 0) { logDebug(edges.size + " edges") } edges += addEdge(numVertices) } outDegreeFromEdges(sc.parallelize(edges.toList)) } private def outDegreeFromEdges[ED: ClassTag](edges: RDD[Edge[ED]]): Graph[Int, ED] = { val vertices = edges.flatMap { edge => List((edge.srcId, 1)) } .reduceByKey(_ + _) .map{ case (vid, degree) => (vid, degree) } Graph(vertices, edges, 0) } /** * @param numVertices Specifies the total number of vertices in the graph (used to get * the dimensions of the adjacency matrix */ private def addEdge(numVertices: Int): Edge[Int] = { // val (src, dst) = chooseCell(numVertices/2.0, numVertices/2.0, numVertices/2.0) val v = math.round(numVertices.toFloat/2.0).toInt val (src, dst) = chooseCell(v, v, v) Edge[Int](src, dst, 1) } /** * This method recursively subdivides the adjacency matrix into quadrants * until it picks a single cell. The naming conventions in this paper match * those of the R-MAT paper. There are a power of 2 number of nodes in the graph. * The adjacency matrix looks like: * <pre> * * dst -> * (x,y) *************** _ * | | | | * | a | b | | * src | | | | * | *************** | T * \\|/ | | | | * | c | d | | * | | | | * *************** - * </pre> * * where this represents the subquadrant of the adj matrix currently being * subdivided. (x,y) represent the upper left hand corner of the subquadrant, * and T represents the side length (guaranteed to be a power of 2). * * After choosing the next level subquadrant, we get the resulting sets * of parameters: * {{{ * quad = a, x'=x, y'=y, T'=T/2 * quad = b, x'=x+T/2, y'=y, T'=T/2 * quad = c, x'=x, y'=y+T/2, T'=T/2 * quad = d, x'=x+T/2, y'=y+T/2, T'=T/2 * }}} */ @tailrec private def chooseCell(x: Int, y: Int, t: Int): (Int, Int) = { if (t <= 1) { (x, y) } else { val newT = math.round(t.toFloat/2.0).toInt pickQuadrant(RMATa, RMATb, RMATc, RMATd) match { case 0 => chooseCell(x, y, newT) case 1 => chooseCell(x + newT, y, newT) case 2 => chooseCell(x, y + newT, newT) case 3 => chooseCell(x + newT, y + newT, newT) } } } private def pickQuadrant(a: Double, b: Double, c: Double, d: Double): Int = { if (a + b + c + d != 1.0) { throw new IllegalArgumentException("R-MAT probability parameters sum to " + (a + b + c + d) + ", should sum to 1.0") } val rand = new Random() val result = rand.nextDouble() result match { case x if x < a => 0 // 0 corresponds to quadrant a case x if (x >= a && x < a + b) => 1 // 1 corresponds to b case x if (x >= a + b && x < a + b + c) => 2 // 2 corresponds to c case _ => 3 // 3 corresponds to d } } /** * Create `rows` by `cols` grid graph with each vertex connected to its * row+1 and col+1 neighbors. Vertex ids are assigned in row major * order. * * @param sc the spark context in which to construct the graph * @param rows the number of rows * @param cols the number of columns * * @return A graph containing vertices with the row and column ids * as their attributes and edge values as 1.0. */ def gridGraph(sc: SparkContext, rows: Int, cols: Int): Graph[(Int, Int), Double] = { // Convert row column address into vertex ids (row major order) def sub2ind(r: Int, c: Int): VertexId = r * cols + c val vertices: RDD[(VertexId, (Int, Int))] = sc.parallelize(0 until rows).flatMap { r => (0 until cols).map( c => (sub2ind(r, c), (r, c)) ) } val edges: RDD[Edge[Double]] = vertices.flatMap{ case (vid, (r, c)) => (if (r + 1 < rows) { Seq( (sub2ind(r, c), sub2ind(r + 1, c))) } else { Seq.empty }) ++ (if (c + 1 < cols) { Seq( (sub2ind(r, c), sub2ind(r, c + 1))) } else { Seq.empty }) }.map{ case (src, dst) => Edge(src, dst, 1.0) } Graph(vertices, edges) } // end of gridGraph /** * Create a star graph with vertex 0 being the center. * * @param sc the spark context in which to construct the graph * @param nverts the number of vertices in the star * * @return A star graph containing `nverts` vertices with vertex 0 * being the center vertex. */ def starGraph(sc: SparkContext, nverts: Int): Graph[Int, Int] = { val edges: RDD[(VertexId, VertexId)] = sc.parallelize(1 until nverts).map(vid => (vid, 0)) Graph.fromEdgeTuples(edges, 1) } // end of starGraph } // end of Graph Generators
ueshin/apache-spark
graphx/src/main/scala/org/apache/spark/graphx/util/GraphGenerators.scala
Scala
apache-2.0
9,857
package molt.tokenize // We find all of the terminal symbols and make sure we split on // them, then assume everything in between is contiguous. // We are restricting atoms from containing any of our terminal // symbols. class BasicTokenizer(tokens: Set[String]) extends Tokenizer { private def getOverlaps(toks: List[String]): List[(String, String)] = toks match { case Nil => Nil case head :: tail => { val prefixes = head.scanLeft("")(_ + _).filterNot(_.isEmpty) val suffixes = head.scanRight("")(_ + _).filterNot(_.isEmpty) val culprits = tail.filter(_.contains(head)) ++ tail.filter(tok => prefixes.exists(prefix => tok.endsWith(prefix))) ++ tail.filter(tok => suffixes.exists(suffix => tok.startsWith(suffix))) culprits.map(x => (head, x)) ++ getOverlaps(tail) } } val overlaps = getOverlaps(tokens.toList).toSet val warning = if(!overlaps.isEmpty) { val str = s"Warning: tokens have overlap: $overlaps" Console.err.println(str) str } else "No token overlap detected :)" // tokenization as described above override def tokenizations(s: String): Set[Seq[String]] = { // split on spaces. this is a reversible decision. val unTokenizedStringVector = s.split("\\\\s+").toList // to turn a single string into a list with the specified terminal split out def splitSingleString(str: String, tok: String): List[String] = { if (str.isEmpty) Nil else if (!str.contains(tok) || str.equals(tok)) List(str) else { val (head, tail) = str.splitAt(str.indexOf(tok)) val remainder = tok :: splitSingleString(tail.substring(tok.length), tok) if (head.isEmpty) remainder else head :: remainder } } // does the above function over a list of strings to get a new list with all of the tokens def splitOutToken(strs: List[String], tok: String): List[String] = { strs.flatMap(splitSingleString(_, tok)) } // we do the splitting for every terminal and get our final token list Set(tokens.foldLeft(unTokenizedStringVector)(splitOutToken)) } }
julianmichael/molt
molt/shared/src/main/scala/molt/tokenize/BasicTokenizer.scala
Scala
mit
2,160
/* * Copyright (c) 2015 Snowplow Analytics Ltd. * All rights reserved. * * This program is licensed to you under the Apache License Version 2.0, * and you may not use this file except in compliance with the Apache * License Version 2.0. * You may obtain a copy of the Apache License Version 2.0 at * http://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, * software distributed under the Apache License Version 2.0 is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. * * See the Apache License Version 2.0 for the specific language * governing permissions and limitations there under. */ package com.snowplowanalytics.snowplow.bigquery.loader //TODO - Change "STRING", "INT", etc to algebraic data types object SnowplowEnrichedEventSchema { val fields = Array( ("app_id", "STRING"), ("platform", "STRING"), ("etl_tstamp", "TIMESTAMP"), ("collector_tstamp", "TIMESTAMP"), ("dvce_tstamp", "TIMESTAMP"), ("event", "STRING"), ("event_id", "STRING"), ("txn_id", "INTEGER"), ("name_tracker", "STRING"), ("v_tracker", "STRING"), ("v_collector", "STRING"), ("v_etl", "STRING"), ("user_id", "STRING"), ("user_ipaddress", "STRING"), ("user_fingerprint", "STRING"), ("domain_userid", "STRING"), ("domain_sessionidx","INTEGER"), ("network_userid", "STRING"), ("geo_country", "STRING"), ("geo_region", "STRING"), ("geo_city", "STRING"), ("geo_zipcode", "STRING"), ("geo_latitude", "FLOAT"), ("geo_longitude", "FLOAT"), ("geo_region_name", "STRING"), ("ip_isp", "STRING"), ("ip_organization", "STRING"), ("ip_domain", "STRING"), ("ip_netspeed", "STRING"), ("page_url", "STRING"), ("page_title", "STRING"), ("page_referrer", "STRING"), ("page_urlscheme", "STRING"), ("page_urlhost", "STRING"), ("page_urlport","INTEGER"), ("page_urlpath", "STRING"), ("page_urlquery", "STRING"), ("page_urlfragment", "STRING"), ("refr_urlscheme", "STRING"), ("refr_urlhost", "STRING"), ("refr_urlport","INTEGER"), ("refr_urlpath", "STRING"), ("refr_urlquery", "STRING"), ("refr_urlfragment", "STRING"), ("refr_medium", "STRING"), ("refr_source", "STRING"), ("refr_term", "STRING"), ("mkt_medium", "STRING"), ("mkt_source", "STRING"), ("mkt_term", "STRING"), ("mkt_content", "STRING"), ("mkt_campaign", "STRING"), ("contexts", "STRING"), ("se_category", "STRING"), ("se_action", "STRING"), ("se_label", "STRING"), ("se_property", "STRING"), ("se_value", "FLOAT"), ("unstruct_event", "STRING"), ("tr_orderid", "STRING"), ("tr_affiliation", "STRING"), ("tr_total", "FLOAT"), ("tr_tax", "FLOAT"), ("tr_shipping", "FLOAT"), ("tr_city", "STRING"), ("tr_state", "STRING"), ("tr_country", "STRING"), ("ti_orderid", "STRING"), ("ti_sku", "STRING"), ("ti_name", "STRING"), ("ti_category", "STRING"), ("ti_price", "FLOAT"), ("ti_quantity", "INTEGER"), ("pp_xoffset_min", "INTEGER"), ("pp_xoffset_max", "INTEGER"), ("pp_yoffset_min", "INTEGER"), ("pp_yoffset_max", "INTEGER"), ("useragent", "STRING"), ("br_name", "STRING"), ("br_family", "STRING"), ("br_version", "STRING"), ("br_type", "STRING"), ("br_renderengine", "STRING"), ("br_lang", "STRING"), ("br_features_pdf", "BOOLEAN"), ("br_features_flash", "BOOLEAN"), ("br_features_java", "BOOLEAN"), ("br_features_director", "BOOLEAN"), ("br_features_quicktime", "BOOLEAN"), ("br_features_realplayer", "BOOLEAN"), ("br_features_windowsmedia", "BOOLEAN"), ("br_features_gears", "BOOLEAN"), ("br_features_silverlight", "BOOLEAN"), ("br_cookies", "BOOLEAN"), ("br_colordepth", "STRING"), ("br_viewwidth", "INTEGER"), ("br_viewheight", "INTEGER"), ("os_name", "STRING"), ("os_family", "STRING"), ("os_manufacturer", "STRING"), ("os_timezone", "STRING"), ("dvce_type", "STRING"), ("dvce_ismobile", "BOOLEAN"), ("dvce_screenwidth", "INTEGER"), ("dvce_screenheight", "INTEGER"), ("doc_charset", "STRING"), ("doc_width", "INTEGER"), ("doc_height", "INTEGER") ) def names: Array[String] = { fields.map(_._1) } def types: Array[String] = { fields.map(_._2) } }
snowplow/bigquery-loader-cli
src/main/scala/com.snowplowanalytics.snowplow.storage/BasicSchema.scala
Scala
apache-2.0
4,461
package me.axiometry.blocknet.entity trait CaveSpider extends Spider
Axiometry/Blocknet
blocknet-api/src/main/scala/me/axiometry/blocknet/entity/CaveSpider.scala
Scala
bsd-2-clause
69
package com.socrata.datacoordinator.resources import com.socrata.datacoordinator.id.{DatasetId, DatasetInternalName} import com.socrata.datacoordinator.service.ServiceConfig import com.socrata.http.server._ import com.socrata.http.server.responses._ import com.socrata.http.server.implicits._ case class DatasetSecondaryStatusResource(storeIdOpt: Option[String], datasetId: DatasetId, secondaries: Set[String], secondariesNotAcceptingNewDatasets: Set[String], versionInStore: (String, DatasetId) => Option[Long], serviceConfig: ServiceConfig, ensureInSecondary: (String, DatasetId) => Boolean, ensureInSecondaryGroup: (String, DatasetId, Option[DatasetInternalName]) => Boolean, deleteFromSecondary: (String, DatasetId) => Boolean, formatDatasetId: DatasetId => String) extends ErrorHandlingSodaResource(formatDatasetId) { override val log = org.slf4j.LoggerFactory.getLogger(classOf[DatasetSecondaryStatusResource]) override def get = storeIdOpt match { case Some(storeId: String) => r:HttpRequest => doGetDataVersionInSecondary(r)(storeId) case None => _: HttpRequest => NotFound } override def post = storeIdOpt match { case Some(storeId: String) => r:HttpRequest => doUpdateVersionInSecondary(r, storeId) case None => _: HttpRequest => NotFound } override def delete = storeIdOpt match { case Some(storeId: String) => r:HttpRequest => doDeleteInSecondary(r, storeId) case None => _: HttpRequest => NotFound } private def doGetDataVersionInSecondary(req: HttpRequest)(storeId: String): HttpResponse = { if(!secondaries(storeId)) return NotFound versionInStore(storeId, datasetId) match { case Some(v) => OK ~> Json(Map("version" -> v)) case None => NotFound } } private def doUpdateVersionInSecondary(req: HttpRequest, storeId: String): HttpResponse = { val defaultSecondaryGroups: Set[String] = serviceConfig.secondary.defaultGroups val groupRe = "_(.*)_".r val secondariesLike = req.queryParameter("secondaries_like").flatMap(DatasetInternalName(_)) val found = storeId match { case "_DEFAULT_" => defaultSecondaryGroups.toVector.map(ensureInSecondaryGroup(_, datasetId, secondariesLike)).forall(identity) // no side effects in forall case groupRe(g) if serviceConfig.secondary.groups.contains(g) => ensureInSecondaryGroup(g, datasetId, secondariesLike) case _ if secondariesNotAcceptingNewDatasets(storeId) => return Forbidden case secondary if secondaries(storeId) => ensureInSecondary(secondary, datasetId) case _ => false } if (found) OK else NotFound } private def doDeleteInSecondary(req: HttpRequest, storeId: String): HttpResponse = storeId match { case secondary if secondaries(storeId) => if (deleteFromSecondary(secondary, datasetId)) OK else NotFound case _ => NotFound } }
socrata-platform/data-coordinator
coordinator/src/main/scala/com/socrata/datacoordinator/resources/DatasetSecondaryStatusResource.scala
Scala
apache-2.0
3,255
package org.jetbrains.plugins.scala.lang.dfa.invocationInfo.tests import org.jetbrains.plugins.scala.lang.dfa.invocationInfo.InvocationInfoTestBase import org.jetbrains.plugins.scala.lang.dfa.invocationInfo.arguments.Argument.{PassByName, PassByValue} class MultipleParameterListsInfoTest extends InvocationInfoTestBase { def testBasicCallsWithMultipleArgumentLists(): Unit = { val sugaredSyntax = "manyParamLists(4, 99)(15)(4, 9, true) { \"Hi\" }" val desugaredSyntax = "manyParamLists(4, 99)(15)(4, 9, true)(\"Hi\")" val code = (invocationSyntax: String) => s""" |object SomeObject { | def manyParamLists(a: Int, b: => Int)(c: Int)(d: Int, e: => Int, f: Boolean)(g: String): Int = 55 | | def main(): Int = { | ${markerStart}${invocationSyntax}${markerEnd} | } |} |""".stripMargin for ((invocationSyntax, lastArgText) <- List((sugaredSyntax, "{ \"Hi\" }"), (desugaredSyntax, "\"Hi\""))) { val invocationInfo = generateInvocationInfoFor(code(invocationSyntax)) val expectedArgCount = List(1 + 2, 1, 3, 1) val expectedProperArgsInText = List(List("4", "99"), List("15"), List("4", "9", "true"), List(lastArgText)) val expectedMappedParamNames = List(List("a", "b"), List("c"), List("d", "e", "f"), List("g")) val expectedPassingMechanisms = List(List(PassByValue, PassByValue, PassByName), List(PassByValue), List(PassByValue, PassByName, PassByValue), List(PassByValue)) val expectedParamToArgMapping = (0 until expectedArgCount.sum - 1).toList verifyInvokedElement(invocationInfo, "SomeObject#manyParamLists") verifyArgumentsWithMultipleArgLists(invocationInfo, expectedArgCount, expectedProperArgsInText, expectedMappedParamNames, expectedPassingMechanisms, expectedParamToArgMapping) } } def testMultipleParameterListsWithNamedParameters(): Unit = { val invocationInfo = generateInvocationInfoFor { s""" |object SomeObject { | def manyParamLists(a: Int, b: => Int)(c: Int)(d: Int, e: => Int, f: Boolean)(g: Int = 100): Int = 55 | | def main(): Int = { | ${markerStart}manyParamLists(b = 99, a = 4)(15)(4, f = true, e = 9)(g = 100)${markerEnd} | } |} |""".stripMargin } val expectedArgCount = List(1 + 2, 1, 3, 1) val expectedProperArgsInText = List(List("99", "4"), List("15"), List("4", "true", "9"), List("100")) val expectedMappedParamNames = List(List("b", "a"), List("c"), List("d", "f", "e"), List("g")) val expectedPassingMechanisms = List(List(PassByValue, PassByName, PassByValue), List(PassByValue), List(PassByValue, PassByValue, PassByName), List(PassByValue)) val expectedParamToArgMapping = List(1, 0, 2, 3, 5, 4, 6) verifyInvokedElement(invocationInfo, "SomeObject#manyParamLists") verifyArgumentsWithMultipleArgLists(invocationInfo, expectedArgCount, expectedProperArgsInText, expectedMappedParamNames, expectedPassingMechanisms, expectedParamToArgMapping) } def testMultipleArgumentListsWithDefaultParameters(): Unit = { val sugaredSyntax = "manyParamLists(4, 99)(15)(4)()" val desugaredSyntax = "manyParamLists(4, 99)(15)(4, 9, true)(100)" val code = (invocationSyntax: String) => s""" |object SomeObject { | def manyParamLists(a: Int, b: => Int)(c: Int)(d: Int, e: => Int = 9, f: Boolean = true)(g: Int = 100): Int = 55 | | def main(): Int = { | ${markerStart}${invocationSyntax}${markerEnd} | } |} |""".stripMargin for (invocationSyntax <- List(sugaredSyntax, desugaredSyntax)) { val invocationInfo = generateInvocationInfoFor(code(invocationSyntax)) val expectedArgCount = List(1 + 2, 1, 3, 1) val expectedProperArgsInText = List(List("4", "99"), List("15"), List("4", "9", "true"), List("100")) val expectedMappedParamNames = List(List("a", "b"), List("c"), List("d", "e", "f"), List("g")) val expectedPassingMechanisms = List(List(PassByValue, PassByValue, PassByName), List(PassByValue), List(PassByValue, PassByName, PassByValue), List(PassByValue)) val expectedParamToArgMapping = (0 until expectedArgCount.sum - 1).toList verifyInvokedElement(invocationInfo, "SomeObject#manyParamLists") verifyArgumentsWithMultipleArgLists(invocationInfo, expectedArgCount, expectedProperArgsInText, expectedMappedParamNames, expectedPassingMechanisms, expectedParamToArgMapping) } } def testMultipleArgumentListsWithInfixOperators(): Unit = { val sugaredSyntax = "(obj1 ++++ obj2)(5, 9)(obj2.x, 333)" val desugaredSyntax = "obj1.++++(obj2)(5, 9)(obj2.x, 333)" val code = (invocationSyntax: String) => s""" |case class Something(x: Int) { | def ++++(other: Something)(a: Int, b: Int)(c: Int, d: Int): Int = a * c + b * d - x |} | | def main(): Int = { | val obj1 = Something(3) | val obj2 = Something(6) | ${markerStart}${invocationSyntax}${markerEnd} | } |} |""".stripMargin for (invocationSyntax <- List(sugaredSyntax, desugaredSyntax)) { val invocationInfo = generateInvocationInfoFor(code(invocationSyntax)) val expectedArgCount = List(1 + 1, 2, 2) val expectedProperArgsInText = List(List("obj2"), List("5", "9"), List("obj2.x", "333")) val expectedMappedParamNames = List(List("other"), List("a", "b"), List("c", "d")) val expectedPassingMechanisms = (1 to 3).map(_ => List(PassByValue, PassByValue)).toList val expectedParamToArgMapping = (0 until expectedArgCount.sum - 1).toList verifyInvokedElement(invocationInfo, "Something#++++") verifyArgumentsWithMultipleArgLists(invocationInfo, expectedArgCount, expectedProperArgsInText, expectedMappedParamNames, expectedPassingMechanisms, expectedParamToArgMapping) verifyThisExpression(invocationInfo, "obj1") } } def testMultipleArgumentListsWithRightAssociativeInfixOperators(): Unit = { val sugaredSyntax = "(obj1 ++++: obj2)(5, 9)(obj2.x, 333)" val desugaredSyntax = "obj2.++++:(obj1)(5, 9)(obj2.x, 333)" val code = (invocationSyntax: String) => s""" |case class Something(x: Int) { | def ++++:(other: Something)(a: Int, b: Int)(c: Int, d: Int): Int = a * c + b * d - x |} | | def main(): Int = { | val obj1 = Something(3) | val obj2 = Something(6) | ${markerStart}${invocationSyntax}${markerEnd} | } |} |""".stripMargin for ((invocationSyntax, evaluationOrderReversed) <- List((sugaredSyntax, true), (desugaredSyntax, false))) { val invocationInfo = generateInvocationInfoFor(code(invocationSyntax)) val expectedArgCount = List(1 + 1, 2, 2) val expectedProperArgsInText = List(List("obj1"), List("5", "9"), List("obj2.x", "333")) val expectedMappedParamNames = List(List("other"), List("a", "b"), List("c", "d")) val expectedPassingMechanisms = (1 to 3).map(_ => List(PassByValue, PassByValue)).toList val expectedParamToArgMapping = (0 until expectedArgCount.sum - 1).toList verifyInvokedElement(invocationInfo, "Something#++++:") verifyArgumentsWithMultipleArgLists(invocationInfo, expectedArgCount, expectedProperArgsInText, expectedMappedParamNames, expectedPassingMechanisms, expectedParamToArgMapping, isRightAssociative = evaluationOrderReversed) verifyThisExpression(invocationInfo, "obj2") } } def testMultipleParameterListsWithVarargs(): Unit = { val invocationInfo = generateInvocationInfoFor { s""" |object SomeObject { | def withVarargs(a: Int, b: => Int)(c: Int)(d: Int, e: => Boolean, f: Int*)(g: Int = 100): Int = 55 | | def main(): Int = { | val x = -2 | ${markerStart}withVarargs(b = 99, a = 4)(15)(4, true, 8, 333, x, 23 * 10000)(100)${markerEnd} | } |} |""".stripMargin } val expectedArgCount = List(1 + 2, 1, 3, 1) val expectedProperArgsInText = List(List("99", "4"), List("15"), List("4", "true", "8 :: 333 :: x :: 23 * 10000 :: Nil: _*"), List("100")) val expectedMappedParamNames = List(List("b", "a"), List("c"), List("d", "e", "f"), List("g")) val expectedPassingMechanisms = List(List(PassByValue, PassByName, PassByValue), List(PassByValue), List(PassByValue, PassByName, PassByValue), List(PassByValue)) val expectedParamToArgMapping = List(1, 0, 2, 3, 4, 5, 6) verifyInvokedElement(invocationInfo, "SomeObject#withVarargs") verifyArgumentsWithMultipleArgLists(invocationInfo, expectedArgCount, expectedProperArgsInText, expectedMappedParamNames, expectedPassingMechanisms, expectedParamToArgMapping) } }
JetBrains/intellij-scala
scala/scala-impl/test/org/jetbrains/plugins/scala/lang/dfa/invocationInfo/tests/MultipleParameterListsInfoTest.scala
Scala
apache-2.0
8,983
package 练习16 object Runner { class Tag[T] def i[T](i1: Tag[T], i2: Tag[T]): List[Tag[T]] = List(i1, i2) class Item1 class Item2 class Item3 class Item4 class Item5 class Item6 class Item7 class Item8 class Item9 type 被减数1 = 被减数Positive[被减数Positive[被减数Zero, Item1], Item2] type 减数1 = 减数Positive[ 减数Positive[减数Positive[减数Positive[减数Positive[减数Positive[减数Positive[减数Zero, Item3], Item4], Item5], Item6], Item7], Item8], Item9 ] type 差1 = 减数Positive[减数Positive[减数Positive[减数Positive[减数Positive[减数Zero, Item3], Item4], Item5], Item6], Item7] i(new Tag[减数1#减[被减数1]], new Tag[差1]) type 被减数2 = 被减数Positive[被减数Positive[被减数Positive[被减数Positive[被减数Positive[被减数Positive[被减数Zero, Item1], Item2], Item3], Item4], Item5], Item6] type 减数2 = 减数Positive[减数Positive[减数Positive[减数Zero, Item7], Item8], Item9] type 差2 = 被减数Positive[被减数Positive[被减数Positive[被减数Zero, Item1], Item2], Item3] i(new Tag[减数2#减[被减数2]], new Tag[差2]) }
djx314/ubw
a28-练习/src/main/scala/练习16/Runner.scala
Scala
bsd-3-clause
1,177
package scalapt import scalapt.core._ object GenerateScenes { def main(args : Array[String]) : Unit = { SceneIO.save(Cornell.scene, "scenes/cornell.json") SceneIO.save(Cornell2.scene, "scenes/cornell2.json") SceneIO.save(Horizon.scene, "scenes/horizon.json") SceneIO.save(RedGreenBlue.scene, "scenes/rgb.json") } } /** * Original smallpt Cornell box scene. */ object Cornell { val objects : List[Shape] = List( Plane("left", Material.diffuse(0.75, 0.25, 0.25), Axis.X, true, 1), Plane("right", Material.diffuse(0.25, 0.25, 0.75), Axis.X, false, 99), Plane("back", Material.diffuse(0.75, 0.75, 0.75), Axis.Z, true, 0), Plane("front", Material.diffuse(RGB.black), Axis.Z, false, 170), Plane("bottom", Material.diffuse(0.75, 0.75, 0.75), Axis.Y, true, 0), Plane("top", Material.diffuse(0.75, 0.75, 0.75), Axis.Y, false, 81.6), Sphere("mirror", Material.reflective(RGB.white * 0.999), Point3(27, 16.5, 47), 16.5), Sphere("glass", Material.refractive(RGB.white * 0.999), Point3(73, 16.5, 78), 16.5), Sphere("light", Material.emissive(RGB.white * 12), Point3(50, 681.6 - 0.27, 81.6), 600.0) ) val scene = Scene( Camera( Ray( Point3(50, 52, 295.6), Vector3(0, -0.042612, -1) ), 0.5135 ), objects) } object Cornell2 { val objects : List[Shape] = List( Plane("left", Material.diffuse(0.75, 0.25, 0.25), Axis.X, true, 1), Plane("right", Material.diffuse(0.25, 0.25, 0.75), Axis.X, false, 99), Plane("back", Material.diffuse(0.75, 0.75, 0.75), Axis.Z, true, 0), Plane("front", Material.diffuse(RGB.black), Axis.Z, false, 170), Plane("bottom", Material.diffuse(0.75, 0.75, 0.75), Axis.Y, true, 0), Plane("top", Material.diffuse(0.75, 0.75, 0.75), Axis.Y, false, 81.6), Sphere("mirror", Material.reflective(RGB.white * 0.999), Point3(27, 60, 47), 16.5), Sphere("glass", Material.refractive(RGB.white * 0.999), Point3(73, 16.5, 78), 16.5), Sphere("diff", Material.diffuse(0.75, 0.75, 0.25), Point3(27, 16.5, 100), 16.5), Sphere("light", Material.emissive(RGB.white * 12), Point3(50, 681.6 - 0.27, 81.6), 600.0) ) val scene = Scene( Camera( Ray( Point3(50, 52, 295.6), Vector3(0, -0.042612, -1) ), 0.5135 ), objects) } object Horizon { val W = 100.0 val W2 = W * 2.0 / 3.0 val D = W / 2.0 val R = W / 4.0 val R2 = W * 16.0 val centre = Point3(0.0, R, 0.0) val sky = RGB(135.0/256, 206.0/256, 250.0/256) val objects : List[Shape] = List( Plane("ground", Material.diffuse(RGB.white * 0.999), Axis.Y, true, 0.0), Sphere("refl", Material.reflective(RGB.white * 0.999), Point3(0.0, W2, -W2), W2), Sphere("lglass", Material.refractive(0.75, 0.25, 0.25), Point3(-W2, R, W), R), Sphere("mglass", Material.refractive(0.25, 0.75, 0.25), Point3(0, R, W * (3.0 / 2.0)), R), Sphere("rglass", Material.refractive(0.25, 0.25, 0.75), Point3(W2, R, W), R), Sphere("light", Material.emissive(RGB.white * 12.0), centre + Vector3.YUnit * W * 2, R), Sphere("sky", Material.diffuse(sky), Point3(0.0, W * 3 + R2, 0.0), R2) ) val cam = Point3(0.0, W * 1.5, 4 * W) val lookAt = Point3(0.0, R, 0.0) val scene = Scene( Camera( Ray( cam, lookAt - cam ), 0.7 ), objects) } object RedGreenBlue { val cos30 = Math.cos(math.Pi / 6.0) val sin30 = Math.sin(math.Pi / 6.0) val W = 100.0 val W2 = W * 2.0 / 3.0 val D = W / 2.0 val R = W / 4.0 val R2 = W * 16.0 val sky = RGB(135.0/256, 206.0/256, 250.0/256) val circR = W2 val pL = Point3(circR * -cos30, R, circR * sin30) val pM = Point3(0, R, -circR) val pR = Point3(circR * cos30, R, circR * sin30) val objects : List[Shape] = List( Plane("ground", Material.diffuse(RGB.white * 0.999), Axis.Y, true, 0.0), Sphere("lglass", Material.refractive(0.75, 0.25, 0.25), pL, R), Sphere("mglass", Material.refractive(0.25, 0.75, 0.25), pM, R), Sphere("rglass", Material.refractive(0.25, 0.25, 0.75), pR, R), Sphere("llight", Material.emissive(RGB.white * 12.0), pL * 2.0, R / 2.0), Sphere("mlight", Material.emissive(RGB.white * 12.0), pM * 2.0, R / 2.0), Sphere("rlight", Material.emissive(RGB.white * 12.0), pR * 2.0, R / 2.0), //Sphere("light", Material.emissive(RGB.white * 12.0), Point3(0.0, W * 2, 0.0), R), Sphere("sky", Material.diffuse(sky), Point3(0.0, W * 3 + R2, 0.0), R2) ) val cam = Point3(0.0, W * 2.5, 1.5 * W) val lookAt = Point3(0.0, R, 0.0) val scene = Scene( Camera( Ray( cam, lookAt - cam ), 0.6 ), objects) }
jon-hanson/ScalaPT
core/src/test/scala/scalapt/Scenes.scala
Scala
mit
5,193
/* * Copyright (c) 2014 Contributor. All rights reserved. */ package scala.tools.nsc.classpath import scala.tools.nsc.util.ClassPath.RootPackage /** * Common methods related to package names represented as String */ object PackageNameUtils { /** * @param fullClassName full class name with package * @return (package, simple class name) */ def separatePkgAndClassNames(fullClassName: String): (String, String) = { val lastDotIndex = fullClassName.lastIndexOf('.') if (lastDotIndex == -1) (RootPackage, fullClassName) else (fullClassName.substring(0, lastDotIndex), fullClassName.substring(lastDotIndex + 1)) } def packagePrefix(inPackage: String): String = if (inPackage == RootPackage) "" else inPackage + "." }
felixmulder/scala
src/compiler/scala/tools/nsc/classpath/PackageNameUtils.scala
Scala
bsd-3-clause
762
package korolev.effect trait Close[F[_], -T] { def onClose(that: T): F[Unit] def close(that: T): F[Unit] } object Close { def apply[F[_], T](implicit ev: Close[F, T]): Close[F, T] = implicitly[Close[F, T]] }
fomkin/korolev
modules/effect/src/main/scala/korolev/effect/Close.scala
Scala
apache-2.0
220
package kafka.consumer import com.softwaremill.react.kafka.{ConsumerProperties, ConsumerProperties$} import kafka.serializer.DefaultDecoder import kafka.utils.Logging /** * Copied from https://github.com/stealthly/scala-kafka, 0.8.2-beta (not released at the moment) */ class KafkaConsumer[T](val props: ConsumerProperties[T]) extends Logging { val connector = Consumer.create(props.toConsumerConfig) val filterSpec = new Whitelist(props.topic) info("setup:start topic=%s for zk=%s and groupId=%s".format(props.topic, props.zookeeperConnect.getOrElse(""), props.groupId)) val stream = connector.createMessageStreamsByFilter(filterSpec, 1, new DefaultDecoder(), new DefaultDecoder()).head info("setup:complete topic=%s for zk=%s and groupId=%s".format(props.topic, props.zookeeperConnect.getOrElse(""), props.groupId)) def read(write: (Array[Byte]) => Unit) = { info("reading on stream now") for (messageAndTopic <- stream) { try { info("writing from stream") write(messageAndTopic.message()) info("written to stream") } catch { case e: Throwable => if (true) { //this is objective even how to conditionalize on it error("Error processing message, skipping this message: ", e) } else { throw e } } } } def iterator() = stream.iterator() def close(): Unit = { connector.shutdown() } }
dbuschman7/reactive-kafka
src/main/scala/kafka/consumer/KafkaConsumer.scala
Scala
apache-2.0
1,447
package nl.dekkr.pagefetcher.actors import akka.actor.Actor import akka.event.Logging import nl.dekkr.pagefetcher.messages.{RemoveOldPages, StorePage} import nl.dekkr.pagefetcher.services.StorageService class PersistenceActor extends Actor { val log = Logging(context.system, this) def receive = { case RemoveOldPages(nrOfHours) => log.info(s"Cleaning everything older than $nrOfHours hours.") StorageService.deleteOlderThan(nrOfHours) case StorePage(url, content, raw) => StorageService.write(url, content, raw) } }
dekkr/pagefetcher
src/main/scala/nl/dekkr/pagefetcher/actors/PersistenceActor.scala
Scala
mit
554
package org.talkingpuffin.apix case class SourceDetails(raw: String, url: Option[String], name: String) object SourceDetails { /** * From the “source” string, which oddly may contain either a simple string, such as “web,” * or an anchor tag with an href and a source name, extract: * <ol> * <li>the entire contents into {@link #source}, for backward compatibility * <li>a URL, if found, into {@link #sourceUrl} * <li>the source name into {@link #sourceName} * </ol> * */ def apply(text: String): SourceDetails = { // XML.loadString might have been used instead of this regex, but it throws exceptions because of the contents val anchorRegex = """<a.*href=["'](.*?)["'].*?>(.*?)</a>""".r val (url, name) = text match { case anchorRegex(u,s) => (Some(u), s) case _ => (None, text) } SourceDetails(text, url, name) } }
dcbriccetti/talking-puffin
common/src/main/scala/org/talkingpuffin/apix/SourceDetails.scala
Scala
mit
892
package lila.simul import play.api.data._ import play.api.data.Forms._ import play.api.data.validation.Constraints._ import chess.Mode import lila.common.Form._ final class DataForm { val clockTimes = (5 to 15 by 5) ++ (20 to 90 by 10) ++ (120 to 240 by 30) val clockTimeDefault = 20 val clockTimeChoices = options(clockTimes, "%d minute{s}") val clockIncrements = (0 to 2 by 1) ++ (3 to 7) ++ (10 to 30 by 5) ++ (40 to 60 by 10) ++ (90 to 180 by 30) val clockIncrementDefault = 60 val clockIncrementChoices = options(clockIncrements, "%d second{s}") val clockExtras = (0 to 60 by 10) ++ (90 to 120 by 30) val clockExtraChoices = options(clockExtras, "%d minute{s}") val clockExtraDefault = 0 def create = Form(mapping( "clockTime" -> numberIn(clockTimeChoices), "clockIncrement" -> numberIn(clockIncrementChoices), "clockExtra" -> numberIn(clockExtraChoices), "variants" -> list { number.verifying(Set(chess.variant.Standard.id, chess.variant.Chess960.id, chess.variant.KingOfTheHill.id, chess.variant.ThreeCheck.id, chess.variant.Antichess.id, chess.variant.Atomic.id, chess.variant.Horde.id) contains _) }.verifying("At least one variant", _.nonEmpty) )(SimulSetup.apply)(SimulSetup.unapply) ) fill SimulSetup( clockTime = clockTimeDefault, clockIncrement = clockIncrementDefault, clockExtra = clockExtraDefault, variants = List(chess.variant.Standard.id)) } case class SimulSetup( clockTime: Int, clockIncrement: Int, clockExtra: Int, variants: List[Int])
danilovsergey/i-bur
modules/simul/src/main/DataForm.scala
Scala
mit
1,550
/* * Copyright 2015 The kdtree authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.asoem.kdtree import scala.concurrent.duration.Duration import scala.concurrent.{Await, ExecutionContext, Future} /** * A KD-Tree implementation * * Tree construction is done in less than O([k-1]n log n) * * @param dim the dimension of the points in tree nodes * @param pointValueInput a sequence of point x value pairs to construct the tree from * @param forkJoinThreshold the threshold above which the construction will be done asynchronously * @tparam A the type of values the nodes will hold */ final class KDTree[A](val dim: Int, pointValueInput: Seq[Product2[HyperPoint, A]], forkJoinThreshold: Int) (implicit xc: ExecutionContext = ExecutionContext.global) extends Tree[KDNode[A]] with Immutable { require(dim > 0, "Dimension must be > 0") require(pointValueInput != null, "Argument 'pointValueInput' must not be null") /** The size of this tree which is the number of nodes accessible via root * */ val size = pointValueInput.length /** The root node * */ val root = { val splitAxisFunction = if (dim == 2) (depth: Int) => { depth & 1 } else (depth: Int) => { depth % dim } def createTree(sublist: Seq[Product2[HyperPoint, A]], depth: Int = 0): Option[KDNode[A]] = sublist.length match { case 0 => Option.empty case 1 => val head: Product2[HyperPoint, A] = sublist.head require(head._1.dim == dim, "Dimension mismatch") Some(LeafNode(head._1, head._2, splitAxisFunction(depth))) case sublistLength => val axis = splitAxisFunction(depth) val indexOfSplit = sublistLength / 2 val (left, rightWithMedian) = sublist.sortWith(_._1(axis) < _._1(axis)).splitAt(indexOfSplit) val newDepth: Int = depth + 1 val current: Product2[HyperPoint, A] = rightWithMedian.head require(current._1.dim == dim, "Dimension mismatch") val node: KDNode[A] = if (sublistLength > forkJoinThreshold) { val resultLeft = Future { createTree(left, newDepth) } val resultRight = Future { createTree(rightWithMedian.tail, newDepth) } Await.result(for { leftNode <- resultLeft rightNode <- resultRight } yield KDNode( current, axis, leftNode, rightNode ) , Duration.Inf) } else { KDNode( current, axis, createTree(left, newDepth), createTree(rightWithMedian.tail, newDepth) ) } Option(node) } createTree(pointValueInput) } /** * Filter all Nodes whose point is contained by the given Shape. * * The complexity of this algorithm is O(N) * * @param shape the Shape that defines the search range * @return a list of NNResult objects */ def filterRange(shape: Shape): List[KDNode[A]] = { require(shape != null, "shape must not be null") def search(nodeOption: Option[KDNode[A]]): List[KDNode[A]] = nodeOption match { case None => Nil case Some(node) => val prefix = if (shape.contains(node.point)) List(node) else Nil val postfix = if (!node.isLeaf) { search(node.leftChild) ::: search(node.rightChild) // TODO: Can this be optimized? } else Nil prefix ::: postfix } search(root) } /** * Filter all Nodes whose point is contained by the given HyperSphere. * * The complexity of this algorithm is O(log N) * * @param sphere the HyperSphere that defines the search range * @return a list of NNResult objects */ def filterRange(sphere: HyperSphere): List[NNResult[A]] = { require(sphere != null, "Argument 'range' must not be null") require(sphere.dim == dim, "Dimension of 'searchPoint' (%d) does not match dimension of this tree (%d).".format(sphere.dim, dim)) def search(nodeOption: Option[KDNode[A]]): List[NNResult[A]] = nodeOption match { case None => Nil case Some(node) => val dist = sphere.origin.distance(node.point) val prefix = if (dist <= sphere.radius) List(new NNResult(node, dist)) else Nil val postfix = if (!node.isLeaf) { val distance = sphere.origin(node.splitDim) - node.point(node.splitDim) val closeChild = if (distance <= 0) node.leftChild else node.rightChild val farChild = if (distance <= 0) node.rightChild else node.leftChild // search in the HyperRect containing searchPoint val resultCloseChild = search(closeChild) // TODO: is the close child always contained by the sphere? val resultFarChild = if (math.abs(distance) <= sphere.radius) search(farChild) else Nil resultCloseChild ::: resultFarChild } else Nil prefix ::: postfix } search(root) } def findNeighbours(searchPoint: HyperPoint, k: Int = 1): List[NNResult[A]] = { require(searchPoint != null, "Argument 'searchPoint' must not be null") require(searchPoint.dim == dim, "Dimension of 'searchPoint' (%d) does not match dimension of this tree (%d).".format(searchPoint.dim, dim)) require(k <= size, "k=%d must be <= size=%d".format(k, size)) if (k == 0) return Nil // list of result-nodes, ordered by distance var resultList = List[NNResult[A]]() def search(node: Option[KDNode[A]]) { if (node.isEmpty) return if (!node.get.isLeaf) { val translationInSplitDim = searchPoint(node.get.splitDim) - node.get.point(node.get.splitDim) val closeChild = if (translationInSplitDim <= 0) node.get.leftChild else node.get.rightChild val farChild = if (translationInSplitDim <= 0) node.get.rightChild else node.get.leftChild // search in the HyperRect containing searchPoint search(closeChild) // search in the HyperRect not containing searchPoint // if it intersects with searchRange if (resultList.isEmpty || new HyperSphere(searchPoint, resultList.last.distance).contains(farChild.get.point)) search(farChild) } val distanceToSearchPoint = searchPoint.distance(node.get.point) if (resultList.size < k) { val element = new NNResult(node.get, distanceToSearchPoint) resultList = element :: resultList } else if (distanceToSearchPoint < resultList.last.distance) { val element = new NNResult(node.get, distanceToSearchPoint) val insertAt = resultList.indexWhere(r => r.distance > distanceToSearchPoint) resultList = (resultList.take(insertAt) ::: element :: resultList.drop(insertAt)).take(k) } } search(root) resultList } } object KDTree { def defaultThreshold(size: Int) = math.max(size, 1000) / Runtime.getRuntime.availableProcessors def apply[A](dim: Int, pointValueTuples: Seq[Product2[HyperPoint, A]]): KDTree[A] = { new KDTree[A](dim, pointValueTuples, defaultThreshold(pointValueTuples.length)) } /** * Creates a new KDTree from `pointValueTuples`. * Assumes that the dimension of the first point in `pointValueTuples` is the desired dimension of the tree. * Uses the DEFAULT_THRESHOLD for parallelization * * @param pointValueTuples the point x value mappings the nodes will hold * @tparam A The type of the values the nodes will hold * @return a new KDTree of the assumed dimension */ def apply[A](pointValueTuples: Seq[Product2[HyperPoint, A]]): KDTree[A] = { if (pointValueTuples.isEmpty) throw new IllegalArgumentException("Cannot derive tree dimension for an empty sequence") else KDTree[A](pointValueTuples.head._1.dim, pointValueTuples) } def empty[A](dim: Int): KDTree[A] = new KDTree[A](dim, Nil, 0) }
hoesler/kdtree
src/main/scala/org/asoem/kdtree/KDTree.scala
Scala
apache-2.0
8,661
package org.jetbrains.plugins.scala.failed.typeInference import org.jetbrains.plugins.scala.base.ScalaLightCodeInsightFixtureTestAdapter /** * @author Roman.Shein * @since 01.04.2016. */ class SetConformanceTest extends ScalaLightCodeInsightFixtureTestAdapter { override protected def shouldPass: Boolean = false def testSCL4941(): Unit = checkTextHasNoErrors( s""" |def f(collect: Iterable[Int]): Unit = { | collect.zipWithIndex.foldLeft(mutable.LinkedHashMap.empty[Int, Set[Int]]) { | case (m, (t1, _)) => m += (t1 -> { | val s = m.getOrElse(t1, mutable.LinkedHashSet.empty) | s | }) | } |} |//true """.stripMargin) def testSCL11139(): Unit = checkTextHasNoErrors( s""" |import scala.reflect.Manifest |object App { | def tryCast[T](o: Any)(implicit manifest: Manifest[T]): Option[T] = { | val clazz = manifest.runtimeClass.asInstanceOf[Class[T]] | if (clazz.isAssignableFrom(o.getClass)) { | Some(o.asInstanceOf[T]) | } else { | None | } | } | | def main(arg: Array[String]) = { | val text: String = Seq("a", 1) | .flatMap(tryCast[String]) | .mkString | println(text) | } |} |//true """.stripMargin) //component(3) = "thing" line makes the test fail with some exception from test framework, it has too many errors def testSCL13432(): Unit = checkTextHasNoErrors( s""" |import scala.reflect.ClassTag |import scala.collection.mutable | |def component[T: ClassTag]: mutable.HashMap[Int, T] = ??? | |component.update(3, "thing") |//component(3) = "thing" | |//true """.stripMargin) def testSCL9738(): Unit = { checkTextHasNoErrors( s""" |sealed trait FeedbackReason |case object CostReason extends FeedbackReason |case object BugsReason extends FeedbackReason |case object OtherReason extends FeedbackReason | |object FeedbackTypes { | def asMap(): Map[FeedbackReason, String] = { | val reasons = Map( | CostReason -> "It's too expensive", | BugsReason -> "It's buggy" | ) | reasons ++ Map(OtherReason -> "Some other reason") | } |} """.stripMargin) } }
JetBrains/intellij-scala
scala/scala-impl/test/org/jetbrains/plugins/scala/failed/typeInference/SetConformanceTest.scala
Scala
apache-2.0
2,485
package test import org.scalatest.FunSuite import scala.offheap._ @data class ArrayContainer(var arr: Array[Int]) class ArraySuite extends FunSuite { implicit val alloc = malloc test("uninit") { val arr = Array.uninit[Int](10) assert(arr.nonEmpty) assert(arr.size == 10) } test("uninit empty") { assert(Array.uninit[Int](0).isEmpty) } test("vararg") { var arr = Array(1, 2, 3, 4) assert(arr.nonEmpty) assert(arr.size == 4) assert(arr(0) == 1) assert(arr(1) == 2) assert(arr(2) == 3) assert(arr(3) == 4) } test("fill") { var arr = Array.fill(10)(42) assert(arr.nonEmpty) assert(arr.size == 10) arr.foreach { v => assert(v == 42) } } test("fill empty") { assert(Array.fill(0)(0).isEmpty) } test("map") { val arr = Array(1, 2, 3, 4) val narr = arr.map(v => (v + 1).toLong) assert(narr.nonEmpty) assert(narr.size == 4) assert(narr(0) == 2L) assert(narr(1) == 3L) assert(narr(2) == 4L) assert(narr(3) == 5L) } test("map empty") { assert(Array.empty[Int].map(_ * 2).isEmpty) } test("transform") { val arr = Array(1, 2, 3, 4) val narr = arr.transform(v => v * 2) val exp = Array(2, 4, 6, 8) assert(arr == narr) assert(narr.sameElements(exp)) } test("transform empty") { assert(Array.empty[Int].transform(_ * 2).isEmpty) } test("read out of bounds") { val arr = Array(1, 2, 3, 4) intercept[IndexOutOfBoundsException] { arr(-1) } intercept[IndexOutOfBoundsException] { arr(4) } } test("write out of bounds") { val arr = Array(1, 2, 3, 4) intercept[IndexOutOfBoundsException] { arr(-1) = 42 } intercept[IndexOutOfBoundsException] { arr(4) = 42 } } test("empty read out of bounds") { intercept[IndexOutOfBoundsException] { Array.empty[Int].apply(0) } } test("empty write out of bounds") { intercept[IndexOutOfBoundsException] { Array.empty[Int].update(0, 42) } } test("copy") { val arr1 = Array(0, 0, 0, 0, 0, 0, 0, 0) val arr2 = Array(1, 1, 1, 1, 1, 1, 1, 1) Array.copy(arr2, 1, arr1, 2, 3) val arr3 = Array(0, 0, 1, 1, 1, 0, 0, 0) for (i <- 0 to 7) assert(arr1(i) == arr3(i)) } test("copy out of bounds") { val arr1 = Array(0, 0, 0) val arr2 = Array(1, 1, 1, 1, 1, 1, 1, 1) intercept[IndexOutOfBoundsException] { Array.copy(arr2, 1, arr1, 2, 3) } } test("copy empty") { val arr1 = Array(0, 0, 0, 0) val arr2 = Array.empty[Int] intercept[IllegalArgumentException] { Array.copy(arr2, 0, arr1, 0, 4) } intercept[IllegalArgumentException] { Array.copy(arr1, 0, arr2, 0, 4) } } test("arrays can be fields in data classes") { val arr1 = Array(1, 2, 3) val arr2 = Array(2, 3, 4) val cont = ArrayContainer(arr1) assert(cont.arr == arr1) cont.arr = arr2 assert(cont.arr == arr2) } test("empty array is empty") { assert(Array.empty[Int].isEmpty) assert(!Array.empty[Int].nonEmpty) } test("non-empty array is not empty") { assert(Array(1).nonEmpty) assert(!Array(1).isEmpty) } test("offheap to onheap") { val arr = Array(1, 2, 3, 4, 5) val jarr = arr.toArray assert(jarr.length == 5) val jarr2 = scala.Array(1, 2, 3, 4, 5) for (i <- 0 to 4) assert(jarr(i) == jarr2(i)) } test("empty offheap to onheap") { assert(Array.empty[Int].toArray.isEmpty) } test("onheap to offheap") { val jarr = scala.Array(1, 2, 3, 4, 5) val arr = Array.fromArray(jarr) assert(arr.length == 5) val arr2 = Array(1, 2, 3, 4, 5) for (i <- 0 to 4) assert(arr(i) == arr2(i)) } test("empty onheap to offheap") { assert(Array.fromArray(scala.Array.empty[Int]).isEmpty) } test("clone") { val arr = Array(1, 2, 3, 4, 5) val arr2 = arr.clone assert(arr2.size == 5) for (i <- 0 to 4) assert(arr(i) == arr(i)) } test("clone empty") { assert(Array.empty[Int].clone.isEmpty) } test("empty size") { assert(Array.empty[Int].size == 0) } test("filter") { val arr = Array(1, 2, 3, 4, 5, 7, 9) val narr = arr.filter(x => x % 2 == 0) assert(narr.nonEmpty) assert(narr.size == 2) assert(narr(0) == 2) assert(narr(1) == 4) val narr2 = arr.filter(x => x == 3) assert(narr2.nonEmpty) assert(narr2.size == 1) assert(narr2(0) == 3) } test("filter no matching predicate") { val arr = Array(1, 2, 3, 4) val narr = arr.filter(x => x > 10) assert(narr.isEmpty) assert(narr.length == 0) } test("filter empty") { assert(Array.empty[Int].filter(_ => true).isEmpty) assert(Array.empty[Int].filter(_ => false).isEmpty) } test("foldLeft") { val arr = Array(1, 2, 3) assert(arr.foldLeft[Int](0)((acc, el) => (acc + el) * el) == 27) } test("foldLeft empty") { assert(Array.empty[Int].foldLeft[Int](3)((_, _) => 1) == 3) } test("foldRight") { val arr = Array(1, 2, 3) assert(arr.foldRight[Int](0)((el, acc) => (acc + el) * el) == 23) } test("foldRight empty") { assert(Array.empty[Int].foldRight[Int](5)((_, _) => 1) == 5) } test("forall") { val arr = Array(1, 3, 5, 7) assert(arr.forall(_ < 10)) assert(arr.forall(x => x % 2 == 1)) assert(!arr.forall(_ < 6)) } test("forall on empty array returns true") { assert(Array.empty[Int].forall(_ < 10)) assert(Array.empty[Double].forall(_ => false)) } test("exists") { val arr = Array(1, 3, 5, 7) assert(arr.exists(_ < 10)) assert(arr.exists(_ == 5)) assert(!arr.exists(x => x % 2 == 0)) } test("exists on empty array returns false") { assert(!Array.empty[Int].exists(_ < 10)) assert(!Array.empty[Double].exists(_ => true)) } test("sameElements") { var arr1 = Array(1, 3, 5, 7) val arr2 = Array(1, 3, 5, 7) val arr3 = Array(1, 3, 5, 8) val arr4 = Array(1, 3, 5) assert(arr1.sameElements(arr1)) assert(arr1.sameElements(arr2)) assert(!arr1.sameElements(arr3)) assert(!arr1.sameElements(arr4)) assert(!arr1.sameElements(Array.empty[Int])) assert(!Array.empty[Int].sameElements(arr1)) assert(Array.empty[Int].sameElements(Array.empty[Int])) } test("startsWith") { val arr = Array(1, 3, 5, 7) val arr2 = Array(1, 3) val arr3 = Array(5, 7) val arr4 = Array(1, 5) assert(arr.startsWith(arr)) assert(arr.startsWith(arr2)) assert(arr.startsWith(arr3, 2)) assert(!arr.startsWith(arr3, 3)) assert(!arr.startsWith(arr4)) assert(!arr4.startsWith(arr)) assert(arr.startsWith(Array.empty[Int])) assert(!Array.empty[Int].startsWith(arr)) intercept[IndexOutOfBoundsException] { arr.startsWith(arr2, -1) } } test("endsWith") { val arr = Array(1, 3, 5, 7) val arr2 = Array(3, 5, 7) val arr3 = Array(1, 5) assert(arr.endsWith(arr)) assert(arr.endsWith(arr2)) assert(!arr.endsWith(arr3)) assert(!arr3.endsWith(arr)) assert(arr.endsWith(Array.empty[Int])) assert(!Array.empty[Int].endsWith(arr)) } }
adamwy/scala-offheap
tests/src/test/scala/ArraySuite.scala
Scala
bsd-3-clause
7,101
/* * Copyright 2007-2011 WorldWide Conferencing, LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.liftweb package record package field import scala.xml._ import net.liftweb.common._ import net.liftweb.http.{S} import json._ import net.liftweb.util._ import Helpers._ import S._ trait DoubleTypedField extends NumericTypedField[Double] { def setFromAny(in: Any): Box[Double] = setNumericFromAny(in, _.doubleValue) def setFromString(s: String): Box[Double] = if(s == null || s.isEmpty) { if(optional_?) setBox(Empty) else setBox(Failure(notOptionalErrorMessage)) } else { setBox(tryo(java.lang.Double.parseDouble(s))) } def defaultValue = 0.0 def asJValue: JValue = valueBox.map(JDouble) openOr (JNothing: JValue) def setFromJValue(jvalue: JValue) = jvalue match { case JNothing|JNull if optional_? => setBox(Empty) case JDouble(d) => setBox(Full(d)) case JInt(i) => setBox(Full(i.toDouble)) case other => setBox(FieldHelpers.expectedA("JDouble", other)) } } class DoubleField[OwnerType <: Record[OwnerType]](rec: OwnerType) extends Field[Double, OwnerType] with MandatoryTypedField[Double] with DoubleTypedField { def this(rec: OwnerType, value: Double) = { this(rec) set(value) } def owner = rec } class OptionalDoubleField[OwnerType <: Record[OwnerType]](rec: OwnerType) extends Field[Double, OwnerType] with OptionalTypedField[Double] with DoubleTypedField { def this(rec: OwnerType, value: Box[Double]) = { this(rec) setBox(value) } def owner = rec }
lzpfmh/framework-2
persistence/record/src/main/scala/net/liftweb/record/field/DoubleField.scala
Scala
apache-2.0
2,174
package com.socrata.testcommon package mocks package util import java.util.Collections import javax.servlet._ import javax.servlet.http._ import scala.collection.JavaConverters._ import UnusedSugarCommon._ // scalastyle:off number.of.methods class AugmentedServletRequest private (private val headers: Map[String, Seq[String]], private val params: Map[String, String], private val method: String = "GET") extends HttpServletRequest { def getQueryString(): String = params map { case (k, v) => s"$k=$v" } mkString "&" def getHeaderNames(): java.util.Enumeration[String] = Collections.enumeration(headers.keys.toSeq.asJava) def getHeader(name: String): String = headers.get(name).map(_.head).orNull def getHeaders(name: String): java.util.Enumeration[String] = Collections.enumeration(headers.asJava.get(name).asJava) def getIntHeader(name: String): Int = getHeader(name).toInt def getDateHeader(name: String): Long = getHeader(name).toLong def getMethod(): String = method // Whee Boilerplate! def authenticate(ignored: HttpServletResponse): Boolean = Unused def changeSessionId(): String = Unused def getAsyncContext(): AsyncContext = throw new UnsupportedOperationException() def getAttribute(x$1: String): Object = Unused def getAttributeNames(): java.util.Enumeration[String] = Unused def getAuthType(): String = Unused def getCharacterEncoding(): String = Unused def getContentLength(): Int = Unused def getContentLengthLong(): Long = Unused def getContentType(): String = Unused def getContextPath(): String = Unused def getCookies(): Array[Cookie] = Unused def getDispatcherType(): DispatcherType = DispatcherType.ASYNC def getInputStream(): ServletInputStream = StaticServletInputStream() def getLocalAddr(): String = Unused def getLocalName(): String = Unused def getLocalPort(): Int = Unused def getLocale(): java.util.Locale = java.util.Locale.getDefault() def getLocales(): java.util.Enumeration[java.util.Locale] = Unused def getParameter(x$1: String): String = Unused def getParameterMap(): java.util.Map[String,Array[String]] = Map.empty.asJava def getParameterNames(): java.util.Enumeration[String] = Unused def getParameterValues(x$1: String): Array[String] = Unused def getPart(x$1: String): Part = throw new UnsupportedOperationException() def getParts(): java.util.Collection[Part] = Unused: java.util.List[Part] def getPathInfo(): String = Unused def getPathTranslated(): String = Unused def getProtocol(): String = Unused def getReader(): java.io.BufferedReader = Unused def getRealPath(x$1: String): String = Unused def getRemoteAddr(): String = Unused def getRemoteHost(): String = Unused def getRemotePort(): Int = Unused def getRemoteUser(): String = Unused def getRequestDispatcher(x$1: String): RequestDispatcher = throw new UnsupportedOperationException() def getRequestURI(): String = Unused def getRequestURL(): StringBuffer = Unused def getRequestedSessionId(): String = Unused def getScheme(): String = Unused def getServerName(): String = Unused def getServerPort(): Int = Unused def getServletContext(): ServletContext = throw new UnsupportedOperationException() def getServletPath(): String = Unused def getSession(): HttpSession = throw new UnsupportedOperationException() def getSession(x$1: Boolean): HttpSession = throw new UnsupportedOperationException() def getUserPrincipal(): java.security.Principal = throw new UnsupportedOperationException() def isAsyncStarted(): Boolean = Unused def isAsyncSupported(): Boolean = Unused def isRequestedSessionIdFromCookie(): Boolean = Unused def isRequestedSessionIdFromURL(): Boolean = Unused def isRequestedSessionIdFromUrl(): Boolean = Unused def isRequestedSessionIdValid(): Boolean = Unused def isSecure(): Boolean = Unused def isUserInRole(x$1: String): Boolean = Unused def login(x$1: String, x$2: String): Unit = {} def logout(): Unit = {} def removeAttribute(x$1: String): Unit = {} def setAttribute(x$1: String, x$2: Any): Unit = {} def setCharacterEncoding(x$1: String): Unit = {} def startAsync(): AsyncContext = throw new UnsupportedOperationException() def startAsync(x$1: ServletRequest, x$2: ServletResponse): AsyncContext = throw new UnsupportedOperationException() def upgrade[T](x$1: Class[T]): T = throw new UnsupportedOperationException() } // scalastyle:on number.of.methods object AugmentedServletRequest { private def listify(m: Map[String, String]): Map[String, Seq[String]] = m.map { case (k, v) => k -> Seq(v) } private[testcommon] def apply(headers: Map[String, String], params: Map[String, String]): AugmentedServletRequest = new AugmentedServletRequest(listify(headers), params) private[testcommon] def apply(header: (String, String), param: (String, String)): AugmentedServletRequest = apply(Map(header), Map(param)) private[testcommon] def apply(): AugmentedServletRequest = apply(Map.empty[String, String], Map.empty[String, String]) }
socrata-platform/socrata-test-common
src/main/scala/com.socrata.testcommon/mocks/util/AugmentedServletRequest.scala
Scala
apache-2.0
5,150
package com.softwaremill.macwire import org.scalatest.matchers.ShouldMatchers import org.scalatest.FlatSpec import io.Source import tools.reflect.ToolBoxError class CompileTests extends FlatSpec with ShouldMatchers { val GlobalImports = "import com.softwaremill.macwire.MacwireMacros._\\n\\n" val DirectiveRegexp = "#include ([a-zA-Z]+)".r val EmptyResult = "\\n\\n()" def ambiguousResMsg(depClassName: String): String = s"Found multiple values of type [$depClassName]" def valueNotFound(depClassName: String): String = s"Cannot find a value of type: [$depClassName]" type CompilationResult = List[String] val success: CompilationResult = Nil def compileErr(messageParts: String*): CompilationResult = List.apply(messageParts: _*) val tests = List( ("simpleValsOkInTrait", success), ("simpleValsOkInObject", success), ("simpleValsOkInClass", success), ("simpleValsErrorMissingValue", compileErr(valueNotFound("B"))), ("simpleValsErrorDuplicateValue", compileErr(ambiguousResMsg("B"), "theB1", "theB2")), ("simpleDefsOkInTrait", success), ("simpleLazyValsOkInTrait", success), ("simpleWithAbstractOk", success), ("simpleValsReferenceWithAscriptionOk", success), ("simpleLazyValsNotInOrderOk", success), ("simpleValsMultipleParameterLists", success), ("simpleValsImplicitParameterLists", success), ("classesWithTraitsLazyValsOkInTrait", success), ("inheritanceSimpleLazyValsOkInTraits", success), ("inheritanceSimpleDefsOkInTraits", success), ("inheritanceTwoLevelSimpleLazyValsOkInTraits", success), ("inheritanceDoubleSimpleLazyValsOkInTraits", success), ("inheritanceClassesWithTraitsLazyValsOkInTraits", success), ("simpleWithAbstractScopeOk", success), ("methodSingleParamOk", success), ("methodParamsOk", success), ("methodParamsInApplyOk", success), ("methodMixedOk", success), ("wiredSimple", success), ("wiredLazy", success), ("wiredPrimitive", success), ("wiredWithWire", success), ("wiredInherited", success), ("wiredDefs", success), ("wiredFromClass", success), ("wiredClassWithTypeParameters", success), // explicit param should not be resolved with implicit value when dependency cannot be found during plain, old regular lookup ("explicitDepsNotWiredWithImplicitVals", compileErr(valueNotFound("A"))), // non-implicit params should be resolved with implicit values if are in scope ("explicitDepsWiredWithImplicitValsFromMethodScope", compileErr(ambiguousResMsg("A"), "dependency", "implicitDependencyA")), ("explicitDepsWiredWithImplicitValsFromEnclosingModuleScope", success), ("explicitDepsWiredWithImplicitValsFromParentsScope", success), // implicit params should be resolved with implicit values or defs ("implicitDepsWiredWithImplementedImplicitVals", success), ("implicitDepsWiredWithImplicitDefs", success), ("implicitDepsWiredWithImplicitVals", success), ("implicitDepsWiredWithImplicitValsFromMethodScope", compileErr(ambiguousResMsg("Dependency"), "dependency", "implicitDependency")), ("implicitDepsWiredWithImplicitValsFromEnclosingModuleScope", success), ("implicitDepsWiredWithImplicitValsFromParentsScope", success), // implicit params should be resolved with regular values ("implicitDepsWiredWithExplicitVals", success), ("implicitDepsWiredWithExplicitValsFromEnclosingModuleScope", success), ("implicitDepsWiredWithExplicitValsFromParentsScope", success), // dependency resolution should abort compilation when there are ambiguous dependencies in scope ("implicitDepsNotWiredWithExplicitAndImplicitValsInEnclosingClassScope", compileErr(ambiguousResMsg("Dependency"), "regularDependency", "implicitDependency")), ("implicitDepsNotWiredWithExplicitAndImplicitValsInParentsScope", compileErr(ambiguousResMsg("Dependency"), "regularDependency", "implicitDependency")), ("implicitDepsNotWiredWithoutAnyValsInScope", compileErr(valueNotFound("Dependency"))), ("diamondInheritance", success), ("simpleWireWithImplicits", success), ("simpleWireWithImplicitsErrorDuplicateValue", compileErr(ambiguousResMsg("B"), "B.defaultB", "bDep")), ("taggedOk", success), ("taggedPrimitiveOk", success), ("taggedErrorNoValueWithTag", compileErr(valueNotFound("com.softwaremill.macwire.Tagging.@@[Berry,Blue]"))), ("multipleMethodParametersFail", compileErr(ambiguousResMsg("A"), "a1", "a2")), ("anonFuncArgsWiredOk", success), ("anonFuncAndMethodsArgsWiredOk", success), ("nestedAnonFuncsWiredOk", success), ("nestedMethodsWiredOk", success), ("nestedMethodsWiredFail", compileErr(ambiguousResMsg("A"), "outerA", "innerA")), ("nestedWithManyMatchingParamsWiredFail", compileErr(ambiguousResMsg("A"), "a1", "a2", "a3")), ("methodWithWiredWithinIfThenElseOk", success), ("methodWithWiredWithinPatternMatchOk", success), ("methodWithSingleImplicitParamOk", success), ("methodWithTaggedParamsOk", success), ("methodWithTaggedParamsNotFoundFail", compileErr(valueNotFound("com.softwaremill.macwire.Tagging.@@[Berry,Blue]"))), ("methodWithTaggedParamsAmbiguousFail", compileErr(ambiguousResMsg("com.softwaremill.macwire.Tagging.@@[Berry,Blue]"), "blueberryArg1", "blueberryArg2")) ) for ((testName, expectedErrors) <- tests) addTest(testName, expectedErrors) addTest("simpleValsOkInTraitExtendingMacwire", Nil, "/* Note no additional import needed */") def addTest(testName: String, expectedResult: CompilationResult, imports: String = GlobalImports) { testName should (if (expectedResult == success) "compile & run" else "cause a compile error") in { import scala.reflect.runtime._ val cm = universe.runtimeMirror(getClass.getClassLoader) import scala.tools.reflect.ToolBox val tb = cm.mkToolBox() val source = loadTest(testName, imports) try { tb.eval(tb.parse(source)) if (expectedResult != success) { fail(s"Expected the following compile errors: $expectedResult") } } catch { case e: ToolBoxError => { if (expectedResult == success) { fail(s"Expected compilation & evaluation to be successful, but got an error: ${e.message}", e) } else { expectedResult.foreach(expectedError => e.message should include (expectedError)) } } } } } def loadTest(name: String, imports: String) = imports + resolveDirectives(loadResource(name)) + EmptyResult def loadResource(name: String) = { val resource = this.getClass.getResourceAsStream("/" + name) if (resource == null) throw new IllegalArgumentException(s"Cannot find resource: $name") Source.fromInputStream(resource).getLines().mkString("\\n") } def resolveDirectives(in: String): String = { DirectiveRegexp.findAllMatchIn(in).foldLeft(in)((acc, m) => { val includeName = m.group(1) val replacement = loadResource(includeName) acc.replaceAll("#include " + includeName + "(?!\\\\w)", replacement) }) } }
rcirka/macwire
tests/src/test/scala/com/softwaremill/macwire/CompileTests.scala
Scala
apache-2.0
7,095
package sbt import java.io.File import java.util.regex.{ Pattern, PatternSyntaxException } import complete.Parser import complete.DefaultParsers._ object CommandUtil { def readLines(files: Seq[File]): Seq[String] = files flatMap (line => IO.readLines(line)) flatMap processLine def processLine(s: String) = { val trimmed = s.trim; if (ignoreLine(trimmed)) None else Some(trimmed) } def ignoreLine(s: String) = s.isEmpty || s.startsWith("#") private def canRead = (_: File).canRead def notReadable(files: Seq[File]): Seq[File] = files filterNot canRead def readable(files: Seq[File]): Seq[File] = files filter canRead // slightly better fallback in case of older launcher def bootDirectory(state: State): File = try { state.configuration.provider.scalaProvider.launcher.bootDirectory } catch { case e: NoSuchMethodError => new File(".").getAbsoluteFile } def aligned(pre: String, sep: String, in: Seq[(String, String)]): Seq[String] = if (in.isEmpty) Nil else { val width = in.map(_._1.length).max in.map { case (a, b) => (pre + fill(a, width) + sep + b) } } def fill(s: String, size: Int) = s + " " * math.max(size - s.length, 0) def withAttribute[T](s: State, key: AttributeKey[T], ifMissing: String)(f: T => State): State = (s get key) match { case None => s.log.error(ifMissing); s.fail case Some(nav) => f(nav) } def singleArgument(exampleStrings: Set[String]): Parser[String] = { val arg = (NotSpaceClass ~ any.*) map { case (ns, s) => (ns +: s).mkString } token(Space) ~> token(arg examples exampleStrings) } def detail(selected: String, detailMap: Map[String, String]): String = detailMap.get(selected) match { case Some(exactDetail) => exactDetail case None => try { val details = searchHelp(selected, detailMap) if (details.isEmpty) "No matches for regular expression '" + selected + "'." else layoutDetails(details) } catch { case pse: PatternSyntaxException => sys.error("Invalid regular expression (java.util.regex syntax).\n" + pse.getMessage) } } def searchHelp(selected: String, detailMap: Map[String, String]): Map[String, String] = { val pattern = Pattern.compile(selected, HelpPatternFlags) detailMap flatMap { case (k, v) => val contentMatches = Highlight.showMatches(pattern)(v) val keyMatches = Highlight.showMatches(pattern)(k) val keyString = Highlight.bold(keyMatches getOrElse k) val contentString = contentMatches getOrElse v if (keyMatches.isDefined || contentMatches.isDefined) (keyString, contentString) :: Nil else Nil } } def layoutDetails(details: Map[String, String]): String = details.map { case (k, v) => k + "\n\n " + v } mkString ("\n", "\n\n", "\n") final val HelpPatternFlags = Pattern.CASE_INSENSITIVE | Pattern.UNICODE_CASE }
pdalpra/sbt
main/command/src/main/scala/sbt/CommandUtil.scala
Scala
bsd-3-clause
2,981
package com.karasiq.shadowcloud.test.utils import java.io.FileNotFoundException import java.nio.file.Files import scala.concurrent.Future import scala.util.Try import akka.stream.IOResult import akka.stream.scaladsl.{FileIO, Source} import akka.util.ByteString object ResourceUtils { def getPathOption(name: String): Option[java.nio.file.Path] = { Try(new java.io.File(getClass.getClassLoader.getResource(name).toURI).toPath).toOption } def getPath(name: String): java.nio.file.Path = { getPathOption(name).getOrElse(throw new FileNotFoundException(name)) } def toBytes(name: String): ByteString = { getPathOption(name).fold(ByteString.empty)(path ⇒ ByteString.fromArrayUnsafe(Files.readAllBytes(path))) } def toString(name: String): String = { toBytes(name).utf8String } def toStream(name: String): Source[ByteString, Future[IOResult]] = { getPathOption(name) match { case Some(path) ⇒ FileIO.fromPath(path) case None ⇒ Source.empty[ByteString].mapMaterializedValue(_ ⇒ Future.successful(IOResult.createSuccessful(0L))) } } }
Karasiq/shadowcloud
utils/test/.jvm/src/main/scala/com/karasiq/shadowcloud/test/utils/ResourceUtils.scala
Scala
apache-2.0
1,116
/** * CPSTextInterpreter - parses and interprets the CPSText DSL. * Copyright (C) 2011 Max Leuthaeuser * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package de.qualitune.parser import java.net.InetAddress import scala.util.parsing.combinator._ import de.qualitune.ast.cps.CPSType._ import de.qualitune.ast.cps.{CPS, CPSType} import de.qualitune.ast.{CPSProgram, Context} import de.qualitune.ast.variable.{VariableDeclAccessType, EmptyVariableDecl, InitVariableDecl, VariableDecl} import de.qualitune.ast.rule.{ActivationRuleBinding, ActivationRuleVariable, ActivationRule, Settings} import de.qualitune.ast.role._ import de.qualitune.ast.callable.{Behavior, Operation} import scala.ScalaObject /** * Parser for parsing CPSText and creating an instance of the corresponding AST. * * @author Max Leuthaeuser * @since 22.11.2011 */ object CPSTextParser extends JavaTokenParsers { // ignore whitespaces and all c-style comments protected override val whiteSpace = """(\\s|//.*|(?m)/\\*(\\*(?!/)|[^*])*\\*/)+""".r def cpsProgram: Parser[CPSProgram] = imports ~ robots ~ contexts ^^ { case i ~ r ~ c => CPSProgram(i, r, c) } def imports: Parser[List[String]] = opt("import {" ~> rep1(importItem) <~ "}") ^^ { _.getOrElse(List[String]()) } def importItem: Parser[String] = """[a-zA-Z_\\.\\*]+""".r <~ ";" def robots: Parser[List[CPS]] = rep(robot) def robot: Parser[CPS] = cpsType ~ ident ~ "IP" ~ ip ~ "PORT" ~ port ^^ { case t ~ n ~ "IP" ~ i ~ "PORT" ~ p => CPS(t, n, i, p) } def cpsType: Parser[CPSType] = ("Nao" | "Mindstorm") ^^ { case "Nao" => CPSType.Nao case "Mindstorm" => CPSType.Mindstorm } def ip: Parser[String] = ipv4Address | ipv6Address def ipv4Address: Parser[String] = """[0-9]{1,3}.[0-9]{1,3}.[0-9]{1,3}.[0-9]{1,3}""".r ^^ { InetAddress.getByName(_).toString.replace("/", "") } def ipv6Address: Parser[String] = """[:%a-z0-9]+""".r ^^ { InetAddress.getByName(_).toString.replace("/", "") } def port: Parser[Int] = decimalNumber ^^ { case s: String => { val p = s.toInt if (p <= 0) throw new Exception("Invalid Port: " + s) p } } def contexts: Parser[List[Context]] = rep1(context) def codeLine: Parser[String] = """[^\\{^\\}^;]*""".r ^^ { _.trim } def codeBlock: Parser[String] = "{" ~> expr <~ "}" | "{}" def expr: Parser[String] = """[^\\{^\\}]*;{0,1}""".r ~ opt(parens) ^^ { case a ~ b => { val body = b.getOrElse("") if (body.isEmpty) { a } else { a + "{" + body + "}" } } } def parens: Parser[String] = "{" ~> expr <~ "}" def activationRuleVariable: Parser[ActivationRuleVariable] = ident ~ ident ^^ { case r ~ n => ActivationRuleVariable(r, n) } def activationRuleVariables: Parser[List[ActivationRuleVariable]] = rep1(activationRuleVariable <~ ";") def activationRuleBinding: Parser[ActivationRuleBinding] = ident ~ "->" ~ ident ^^ { case n ~ "->" ~ r => ActivationRuleBinding(n, r) } def activationRuleBindings: Parser[List[ActivationRuleBinding]] = rep1(activationRuleBinding <~ ";") def activationRule: Parser[ActivationRule] = "activate for {" ~ activationRuleVariables ~ "} when" ~ "{" ~ codeLine ~ "} with bindings {" ~ activationRuleBindings ~ "}" ~ settings ^^ { case "activate for {" ~ av ~ "} when" ~ "{" ~ c ~ "} with bindings {" ~ ab ~ "}" ~ s => ActivationRule(av, c, ab, s) } def settings: Parser[Settings] = "with settings {" ~> setting <~ "}" def setting: Parser[Settings] = interval ~ after ~ continuously ^^ { case i ~ t ~ c => Settings(i, t, c) } def interval: Parser[Int] = opt("interval " ~> decimalNumber <~ ";") ^^ { _.getOrElse("100").toInt } def after: Parser[Int] = opt("after " ~> decimalNumber <~ ";") ^^ { _.getOrElse("0").toInt } def continuously: Parser[Boolean] = opt("continuously " ~> ident <~ ";") ^^ { _.getOrElse("false") match { case "true" => true case "false" => false case _ => throw new IllegalArgumentException("Setting \\"continuously\\" can only be \\"true\\" or \\"false\\"!") } } def context: Parser[Context] = "context" ~ ident ~ "{" ~ rep1(activationRule) ~ contextContent ~ "}" ^^ { case "context" ~ n ~ "{" ~ a ~ c ~ "}" => Context.build(n, c, a) } def variableValue: Parser[String] = opt("=" ~> codeLine) ^^ { _.getOrElse("") } def variableDecl: Parser[VariableDecl] = ("var" | "val") ~ ident ~ ":" ~ ident ~ variableValue ^^ { case "var" ~ n ~ ":" ~ t ~ "" => EmptyVariableDecl(VariableDeclAccessType.modifiable, n, t) case "var" ~ n ~ ":" ~ t ~ v => InitVariableDecl(VariableDeclAccessType.modifiable, n, t, v) case "val" ~ n ~ ":" ~ t ~ "" => EmptyVariableDecl(VariableDeclAccessType.unmodifiable, n, t) case "val" ~ n ~ ":" ~ t ~ v => InitVariableDecl(VariableDeclAccessType.unmodifiable, n, t, v) } def variableDecls: Parser[List[VariableDecl]] = rep(variableDecl <~ ";") def optVariableDecls: Parser[List[VariableDecl]] = opt(variableDecls) ^^ { _.getOrElse(List[VariableDecl]()) } def optRoles: Parser[List[Role]] = opt(roles) ^^ { _.getOrElse(List[Role]()) } def optContexts: Parser[List[Context]] = opt(contexts) ^^ { _.getOrElse(List[Context]()) } def optConstraints: Parser[List[RoleConstraint]] = opt(constraints) ^^ { _.getOrElse(List[RoleConstraint]()) } def contextContent: Parser[List[ScalaObject]] = rep((variableDecl <~ ";") | (constraint <~ ";") | role | context) def behavior: Parser[Behavior] = "behavior " ~> codeBlock ^^ { Behavior(_) } def method: Parser[Operation] = ident ~ ident ~ "() " ~ codeBlock ^^ { case t ~ n ~ "() " ~ c => Operation(n, t, c) } def methods: Parser[List[Operation]] = rep(method) def optMethods: Parser[List[Operation]] = opt(methods) ^^ { _.getOrElse(List[Operation]()) } def role: Parser[Role] = opt(singleton) ~ "role" ~ ident ~ "{" ~ behavior ~ roleContent ~ "}" ^^ { case s ~ "role" ~ n ~ "{" ~ b ~ c ~ "}" => Role.build(s.getOrElse(false), n, b, c) } def singleton: Parser[Boolean] = "singleton" ^^^ true def roleContent: Parser[List[ScalaObject]] = rep((variableDecl <~ ";") | method) def roles: Parser[List[Role]] = rep(role) def constraint: Parser[RoleConstraint] = ident ~ ("implies" | "prohibits" | "equals") ~ ident ^^ { case ra ~ "implies" ~ rb => ImplicationConstraint(ra, rb) case ra ~ "prohibits" ~ rb => ProhibitionConstraint(ra, rb) case ra ~ "equals" ~ rb => EquivalenceConstraint(ra, rb) } def constraints: Parser[List[RoleConstraint]] = rep(constraint <~ ";") /** * Will parse a String and return an instance of the CPS AST. * * @param p: a String representing a piece of CPSText code. * @return an instance of CPSProgram representing the concrete syntax tree for a given CPSText program. */ def parse(p: String): CPSProgram = { parseAll(cpsProgram, p) match { case Success(r, _) => r.asInstanceOf[CPSProgram] case e => throw new Exception("Invalid CPSText source:\\n" + e.toString) } } }
max-leuthaeuser/CPSTextInterpreter
src/main/scala/de/qualitune/parser/CPSTextParser.scala
Scala
gpl-3.0
7,916
package net.sansa_stack.inference.spark.rules.plan import org.apache.jena.graph.Node /** * A join between two triple patterns. * * @author Lorenz Buehmann */ case class Join(tp1: org.apache.jena.graph.Triple, tp2: org.apache.jena.graph.Triple, joinVar: Node) { override def toString: String = tp1.toString + " JOIN " + tp2.toString + " ON " + joinVar }
SANSA-Stack/SANSA-RDF
sansa-inference/sansa-inference-spark/src/main/scala/net/sansa_stack/inference/spark/rules/plan/Join.scala
Scala
apache-2.0
365
/* * MUSIT is a museum database to archive natural and cultural history data. * Copyright (C) 2016 MUSIT Norway, part of www.uio.no (University of Oslo) * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, * or any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with this program; if not, write to the Free Software Foundation, Inc., * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. */ package models.storage.event import play.api.data.validation.ValidationError import play.api.libs.json._ case class EventType(name: String) extends AnyVal { def registeredEventId: EventTypeId = EventTypeRegistry.withNameInsensitive(name).id } object EventType { def fromEventTypeId(id: EventTypeId): EventType = EventType(EventTypeRegistry.unsafeFromId(id).name) def fromInt(i: Int): EventType = fromEventTypeId(EventTypeId(i)) implicit val reads: Reads[EventType] = __.read[String] .filter(ValidationError("Unsupported event type")) { et => EventTypeRegistry.withNameInsensitiveOption(et).isDefined } .map(EventType.apply) implicit val writes: Writes[EventType] = Writes { et => JsString(et.name) } }
kpmeen/musit
service_storagefacility/app/models/storage/event/EventType.scala
Scala
gpl-2.0
1,630
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql import scala.collection.JavaConverters._ import org.apache.spark.sql.test.SharedSQLContext class DataFrameNaFunctionsSuite extends QueryTest with SharedSQLContext { import testImplicits._ def createDF(): DataFrame = { Seq[(String, java.lang.Integer, java.lang.Double)]( ("Bob", 16, 176.5), ("Alice", null, 164.3), ("David", 60, null), ("Nina", 25, Double.NaN), ("Amy", null, null), (null, null, null) ).toDF("name", "age", "height") } test("drop") { val input = createDF() val rows = input.collect() checkAnswer( input.na.drop("name" :: Nil).select("name"), Row("Bob") :: Row("Alice") :: Row("David") :: Row("Nina") :: Row("Amy") :: Nil) checkAnswer( input.na.drop("age" :: Nil).select("name"), Row("Bob") :: Row("David") :: Row("Nina") :: Nil) checkAnswer( input.na.drop("age" :: "height" :: Nil), rows(0) :: Nil) checkAnswer( input.na.drop(), rows(0)) // dropna on an a dataframe with no column should return an empty data frame. val empty = input.sparkSession.emptyDataFrame.select() assert(empty.na.drop().count() === 0L) // Make sure the columns are properly named. assert(input.na.drop().columns.toSeq === input.columns.toSeq) } test("drop with how") { val input = createDF() val rows = input.collect() checkAnswer( input.na.drop("all").select("name"), Row("Bob") :: Row("Alice") :: Row("David") :: Row("Nina") :: Row("Amy") :: Nil) checkAnswer( input.na.drop("any"), rows(0) :: Nil) checkAnswer( input.na.drop("any", Seq("age", "height")), rows(0) :: Nil) checkAnswer( input.na.drop("all", Seq("age", "height")).select("name"), Row("Bob") :: Row("Alice") :: Row("David") :: Row("Nina") :: Nil) } test("drop with threshold") { val input = createDF() val rows = input.collect() checkAnswer( input.na.drop(2, Seq("age", "height")), rows(0) :: Nil) checkAnswer( input.na.drop(3, Seq("name", "age", "height")), rows(0)) // Make sure the columns are properly named. assert(input.na.drop(2, Seq("age", "height")).columns.toSeq === input.columns.toSeq) } test("fill") { val input = createDF() val fillNumeric = input.na.fill(50.6) checkAnswer( fillNumeric, Row("Bob", 16, 176.5) :: Row("Alice", 50, 164.3) :: Row("David", 60, 50.6) :: Row("Nina", 25, 50.6) :: Row("Amy", 50, 50.6) :: Row(null, 50, 50.6) :: Nil) // Make sure the columns are properly named. assert(fillNumeric.columns.toSeq === input.columns.toSeq) // string checkAnswer( input.na.fill("unknown").select("name"), Row("Bob") :: Row("Alice") :: Row("David") :: Row("Nina") :: Row("Amy") :: Row("unknown") :: Nil) assert(input.na.fill("unknown").columns.toSeq === input.columns.toSeq) // fill double with subset columns checkAnswer( input.na.fill(50.6, "age" :: Nil).select("name", "age"), Row("Bob", 16) :: Row("Alice", 50) :: Row("David", 60) :: Row("Nina", 25) :: Row("Amy", 50) :: Row(null, 50) :: Nil) // fill string with subset columns checkAnswer( Seq[(String, String)]((null, null)).toDF("col1", "col2").na.fill("test", "col1" :: Nil), Row("test", null)) } test("fill with map") { val df = Seq[(String, String, java.lang.Integer, java.lang.Long, java.lang.Float, java.lang.Double, java.lang.Boolean)]( (null, null, null, null, null, null, null)) .toDF("stringFieldA", "stringFieldB", "integerField", "longField", "floatField", "doubleField", "booleanField") val fillMap = Map( "stringFieldA" -> "test", "integerField" -> 1, "longField" -> 2L, "floatField" -> 3.3f, "doubleField" -> 4.4d, "booleanField" -> false) val expectedRow = Row("test", null, 1, 2L, 3.3f, 4.4d, false) checkAnswer(df.na.fill(fillMap), expectedRow) checkAnswer(df.na.fill(fillMap.asJava), expectedRow) // Test Java version // Ensure replacement values are cast to the column data type. checkAnswer(df.na.fill(Map( "integerField" -> 1d, "longField" -> 2d, "floatField" -> 3d, "doubleField" -> 4d)), Row(null, null, 1, 2L, 3f, 4d, null)) // Ensure column types do not change. Columns that have null values replaced // will no longer be flagged as nullable, so do not compare schemas directly. assert(df.na.fill(fillMap).schema.fields.map(_.dataType) === df.schema.fields.map(_.dataType)) } test("replace") { val input = createDF() // Replace two numeric columns: age and height val out = input.na.replace(Seq("age", "height"), Map( 16 -> 61, 60 -> 6, 164.3 -> 461.3 // Alice is really tall )).collect() assert(out(0) === Row("Bob", 61, 176.5)) assert(out(1) === Row("Alice", null, 461.3)) assert(out(2) === Row("David", 6, null)) assert(out(3).get(2).asInstanceOf[Double].isNaN) assert(out(4) === Row("Amy", null, null)) assert(out(5) === Row(null, null, null)) // Replace only the age column val out1 = input.na.replace("age", Map( 16 -> 61, 60 -> 6, 164.3 -> 461.3 // Alice is really tall )).collect() assert(out1(0) === Row("Bob", 61, 176.5)) assert(out1(1) === Row("Alice", null, 164.3)) assert(out1(2) === Row("David", 6, null)) assert(out1(3).get(2).asInstanceOf[Double].isNaN) assert(out1(4) === Row("Amy", null, null)) assert(out1(5) === Row(null, null, null)) } }
javalovelinux/SparkGroovyScript
sql/core/src/test/scala/org/apache/spark/sql/DataFrameNaFunctionsSuite.scala
Scala
apache-2.0
6,502
package lila.game import chess.variant.{ Crazyhouse, Variant } import chess.{ CheckCount, Color, Clock, White, Black, Status, Mode, UnmovedRooks, History => ChessHistory, Game => ChessGame } import chess.format.FEN import org.joda.time.DateTime import reactivemongo.api.bson._ import scala.util.{ Success, Try } import lila.db.BSON import lila.db.dsl._ object BSONHandlers { import lila.db.ByteArray.ByteArrayBSONHandler implicit private[game] val checkCountWriter = new BSONWriter[CheckCount] { def writeTry(cc: CheckCount) = Success(BSONArray(cc.white, cc.black)) } implicit val StatusBSONHandler = tryHandler[Status]( { case BSONInteger(v) => Status(v) toTry s"No such status: $v" }, x => BSONInteger(x.id) ) implicit private[game] val unmovedRooksHandler = tryHandler[UnmovedRooks]( { case bin: BSONBinary => ByteArrayBSONHandler.readTry(bin) map BinaryFormat.unmovedRooks.read }, x => ByteArrayBSONHandler.writeTry(BinaryFormat.unmovedRooks write x).get ) implicit private[game] val crazyhouseDataBSONHandler = new BSON[Crazyhouse.Data] { import Crazyhouse._ def reads(r: BSON.Reader) = Crazyhouse.Data( pockets = { val (white, black) = { r.str("p").view.flatMap(chess.Piece.fromChar).to(List) }.partition(_ is chess.White) Pockets( white = Pocket(white.map(_.role)), black = Pocket(black.map(_.role)) ) }, promoted = r.str("t").view.flatMap(chess.Pos.piotr).to(Set) ) def writes(w: BSON.Writer, o: Crazyhouse.Data) = BSONDocument( "p" -> { o.pockets.white.roles.map(_.forsythUpper).mkString + o.pockets.black.roles.map(_.forsyth).mkString }, "t" -> o.promoted.map(_.piotr).mkString ) } implicit private[game] val gameDrawOffersHandler = tryHandler[GameDrawOffers]( { case arr: BSONArray => Success(arr.values.foldLeft(GameDrawOffers.empty) { case (offers, BSONInteger(p)) => if (p > 0) offers.copy(white = offers.white incl p) else offers.copy(black = offers.black incl -p) case (offers, _) => offers }) }, offers => BSONArray((offers.white ++ offers.black.map(-_)).view.map(BSONInteger.apply).toIndexedSeq) ) import Player.playerBSONHandler private val emptyPlayerBuilder = playerBSONHandler.read($empty) implicit val gameBSONHandler: BSON[Game] = new BSON[Game] { import Game.{ BSONFields => F } import PgnImport.pgnImportBSONHandler def reads(r: BSON.Reader): Game = { lila.mon.game.fetch.increment() val light = lightGameBSONHandler.readsWithPlayerIds(r, r str F.playerIds) val startedAtTurn = r intD F.startedAtTurn val plies = r int F.turns atMost Game.maxPlies // unlimited can cause StackOverflowError val turnColor = Color.fromPly(plies) val createdAt = r date F.createdAt val playedPlies = plies - startedAtTurn val gameVariant = Variant(r intD F.variant) | chess.variant.Standard val decoded = r.bytesO(F.huffmanPgn).map { PgnStorage.Huffman.decode(_, playedPlies) } | { val clm = r.get[CastleLastMove](F.castleLastMove) val pgnMoves = PgnStorage.OldBin.decode(r bytesD F.oldPgn, playedPlies) val halfMoveClock = pgnMoves.reverse .indexWhere(san => san.contains("x") || san.headOption.exists(_.isLower)) .some .filter(0 <=) PgnStorage.Decoded( pgnMoves = pgnMoves, pieces = BinaryFormat.piece.read(r bytes F.binaryPieces, gameVariant), positionHashes = r.getO[chess.PositionHash](F.positionHashes) | Array.empty, unmovedRooks = r.getO[UnmovedRooks](F.unmovedRooks) | UnmovedRooks.default, lastMove = clm.lastMove, castles = clm.castles, halfMoveClock = halfMoveClock orElse r.getO[FEN](F.initialFen).flatMap(_.halfMove) getOrElse playedPlies ) } val chessGame = ChessGame( situation = chess.Situation( chess.Board( pieces = decoded.pieces, history = ChessHistory( lastMove = decoded.lastMove, castles = decoded.castles, halfMoveClock = decoded.halfMoveClock, positionHashes = decoded.positionHashes, unmovedRooks = decoded.unmovedRooks, checkCount = if (gameVariant.threeCheck) { val counts = r.intsD(F.checkCount) CheckCount(~counts.headOption, ~counts.lastOption) } else Game.emptyCheckCount ), variant = gameVariant, crazyData = gameVariant.crazyhouse option r.get[Crazyhouse.Data](F.crazyData) ), color = turnColor ), pgnMoves = decoded.pgnMoves, clock = r.getO[Color => Clock](F.clock) { clockBSONReader(createdAt, light.whitePlayer.berserk, light.blackPlayer.berserk) } map (_(turnColor)), turns = plies, startedAtTurn = startedAtTurn ) val whiteClockHistory = r bytesO F.whiteClockHistory val blackClockHistory = r bytesO F.blackClockHistory Game( id = light.id, whitePlayer = light.whitePlayer, blackPlayer = light.blackPlayer, chess = chessGame, loadClockHistory = clk => for { bw <- whiteClockHistory bb <- blackClockHistory history <- BinaryFormat.clockHistory .read(clk.limit, bw, bb, (light.status == Status.Outoftime).option(turnColor)) _ = lila.mon.game.loadClockHistory.increment() } yield history, status = light.status, daysPerTurn = r intO F.daysPerTurn, binaryMoveTimes = r bytesO F.moveTimes, mode = Mode(r boolD F.rated), bookmarks = r intD F.bookmarks, createdAt = createdAt, movedAt = r.dateD(F.movedAt, createdAt), metadata = Metadata( source = r intO F.source flatMap Source.apply, pgnImport = r.getO[PgnImport](F.pgnImport)(PgnImport.pgnImportBSONHandler), tournamentId = r strO F.tournamentId, swissId = r strO F.swissId, simulId = r strO F.simulId, analysed = r boolD F.analysed, drawOffers = r.getD(F.drawOffers, GameDrawOffers.empty) ) ) } def writes(w: BSON.Writer, o: Game) = BSONDocument( F.id -> o.id, F.playerIds -> (o.whitePlayer.id + o.blackPlayer.id), F.playerUids -> w.strListO(List(~o.whitePlayer.userId, ~o.blackPlayer.userId)), F.whitePlayer -> w.docO( playerBSONHandler write ((_: Color) => (_: Player.ID) => (_: Player.UserId) => (_: Player.Win) => o.whitePlayer ) ), F.blackPlayer -> w.docO( playerBSONHandler write ((_: Color) => (_: Player.ID) => (_: Player.UserId) => (_: Player.Win) => o.blackPlayer ) ), F.status -> o.status, F.turns -> o.chess.turns, F.startedAtTurn -> w.intO(o.chess.startedAtTurn), F.clock -> (o.chess.clock flatMap { c => clockBSONWrite(o.createdAt, c).toOption }), F.daysPerTurn -> o.daysPerTurn, F.moveTimes -> o.binaryMoveTimes, F.whiteClockHistory -> clockHistory(White, o.clockHistory, o.chess.clock, o.flagged), F.blackClockHistory -> clockHistory(Black, o.clockHistory, o.chess.clock, o.flagged), F.rated -> w.boolO(o.mode.rated), F.variant -> o.board.variant.exotic.option(w int o.board.variant.id), F.bookmarks -> w.intO(o.bookmarks), F.createdAt -> w.date(o.createdAt), F.movedAt -> w.date(o.movedAt), F.source -> o.metadata.source.map(_.id), F.pgnImport -> o.metadata.pgnImport, F.tournamentId -> o.metadata.tournamentId, F.swissId -> o.metadata.swissId, F.simulId -> o.metadata.simulId, F.analysed -> w.boolO(o.metadata.analysed) ) ++ { if (o.variant.standard) $doc(F.huffmanPgn -> PgnStorage.Huffman.encode(o.pgnMoves take Game.maxPlies)) else { val f = PgnStorage.OldBin $doc( F.oldPgn -> f.encode(o.pgnMoves take Game.maxPlies), F.binaryPieces -> BinaryFormat.piece.write(o.board.pieces), F.positionHashes -> o.history.positionHashes, F.unmovedRooks -> o.history.unmovedRooks, F.castleLastMove -> CastleLastMove.castleLastMoveBSONHandler .writeTry( CastleLastMove( castles = o.history.castles, lastMove = o.history.lastMove ) ) .toOption, F.checkCount -> o.history.checkCount.nonEmpty.option(o.history.checkCount), F.crazyData -> o.board.crazyData ) } } } implicit object lightGameBSONHandler extends lila.db.BSONReadOnly[LightGame] { import Game.{ BSONFields => F } import Player.playerBSONHandler def reads(r: BSON.Reader): LightGame = { lila.mon.game.fetchLight.increment() readsWithPlayerIds(r, "") } def readsWithPlayerIds(r: BSON.Reader, playerIds: String): LightGame = { val (whiteId, blackId) = playerIds splitAt 4 val winC = r boolO F.winnerColor map Color.fromWhite val uids = ~r.getO[List[lila.user.User.ID]](F.playerUids) val (whiteUid, blackUid) = (uids.headOption.filter(_.nonEmpty), uids.lift(1).filter(_.nonEmpty)) def makePlayer(field: String, color: Color, id: Player.ID, uid: Player.UserId): Player = { val builder = r.getO[Player.Builder](field)(playerBSONHandler) | emptyPlayerBuilder builder(color)(id)(uid)(winC map (_ == color)) } LightGame( id = r str F.id, whitePlayer = makePlayer(F.whitePlayer, White, whiteId, whiteUid), blackPlayer = makePlayer(F.blackPlayer, Black, blackId, blackUid), status = r.get[Status](F.status) ) } } private def clockHistory( color: Color, clockHistory: Option[ClockHistory], clock: Option[Clock], flagged: Option[Color] ) = for { clk <- clock history <- clockHistory times = history(color) } yield BinaryFormat.clockHistory.writeSide(clk.limit, times, flagged has color) private[game] def clockBSONReader(since: DateTime, whiteBerserk: Boolean, blackBerserk: Boolean) = new BSONReader[Color => Clock] { def readTry(bson: BSONValue): Try[Color => Clock] = bson match { case bin: BSONBinary => ByteArrayBSONHandler readTry bin map { cl => BinaryFormat.clock(since).read(cl, whiteBerserk, blackBerserk) } case b => lila.db.BSON.handlerBadType(b) } } private[game] def clockBSONWrite(since: DateTime, clock: Clock) = ByteArrayBSONHandler writeTry { BinaryFormat clock since write clock } }
luanlv/lila
modules/game/src/main/BSONHandlers.scala
Scala
mit
11,320
/* * Copyright (C) 2015 Stratio (http://stratio.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.crossdata.daos.impl import org.apache.spark.sql.crossdata.daos.DAOConstants._ import org.apache.spark.sql.crossdata.daos.{EphemeralQueriesMapDAO => EphQueriesMapDAO} import scala.util.Try class EphemeralQueriesMapDAO(opts: Map[String, String], subPath: Option[String] = None) extends EphQueriesMapDAO{ val memoryMap = opts override lazy val config: Config = new DummyConfig(subPath) def prefix: String = Try(config.getString(PrefixStreamingCatalogsConfig) + "_") getOrElse ("") }
hdominguez1989/Crossdata
core/src/main/scala/org/apache/spark/sql/crossdata/daos/impl/EphemeralQueriesMapDAO.scala
Scala
apache-2.0
1,142
package me.eax.examples.statsd.client import scala.compat._ import scala.concurrent._ import scala.concurrent.ExecutionContext.Implicits.global package object utils { private val client = new MetricsClientImpl def recordTime[T](metricName: String)(f: => T): T = { val startTimeMs = Platform.currentTime val result = f val endTimeMs = Platform.currentTime client.synchronized { client.recordTime(metricName, endTimeMs - startTimeMs) } result } def recordTimeF[T](metricName: String)(f: => Future[T]): Future[T] = { val startTimeMs = Platform.currentTime val fResult = f // TODO: check if future is completed successfully fResult.onComplete { case _ => val endTimeMs = Platform.currentTime client.synchronized { client.recordTime(metricName, endTimeMs - startTimeMs) } } fResult } }
afiskon/scala-statsd-example
src/main/scala/me/eax/examples/statsd/client/utils/package.scala
Scala
mit
875
package org.skrushingiv.util import org.joda.time.{ DateTime, Duration } import scala.concurrent.duration.FiniteDuration /** * This module provides a convenient syntax for creating Streams of DateTimes. * * For syntactical sugar, it allows the use of `scala.concurrent.duration` postfix operators * for durations as well as joda-time durations. * * Example: * * val every5Days = Dates.from(DateTime.now, 5 days) * val untilChristmas = Dates.between(DateTime.now, DateTime.now.withMonthOfYear(12).withDayOfMonth(31)) * */ object Dates { /** * Creates a lazily evaluated infinite stream of datetimes separated by a step duration in milliseconds. * * @param step duration between dates in milliseconds; defaults to 86400000 milliseconds (1 day) */ def from(start: DateTime, step: Long = 86400000L): Stream[DateTime] = start #:: from(start.plus(step), step) /** * Creates a lazily evaluated stream of datetimes separated by a step duration. * * This variation allows the use of Joda-Time Duration objects. */ def from(start: DateTime, step: Duration): Stream[DateTime] = from(start, step.getMillis) /** * Creates a lazily evaluated stream of datetimes separated by a step duration. * * This variation allows the use of scala.concurrent.duration postfix operators such as `3 minutes` or `2 days` */ def from(start: DateTime, step: FiniteDuration): Stream[DateTime] = from(start, step.toMillis) /** * Creates a lazily evaluated stream of datetimes between start and end dates (inclusive) separated by a step * duration in milliseconds. * * @param stepMillis defaults to 86400000 milliseconds, 1 day */ def between(start: DateTime, end: DateTime, stepMillis: Long = 86400000L): Stream[DateTime] = from(start, stepMillis).takeWhile(!_.isAfter(end)) /** * Creates a lazily evaluated stream of datetimes between start and end dates (inclusive) separated by a step duration. * * This variation allows the use of Joda-Time Duration objects. */ def between(start: DateTime, end: DateTime, step: Duration): Stream[DateTime] = between(start, end, step.getMillis) /** * Creates a lazily evaluated stream of datetimes between start and end dates (inclusive) separated by a step duration. * * This variation allows the use of scala.concurrent.duration postfix operators such as `3 minutes` or `2 days` */ def between(start: DateTime, end: DateTime, step: FiniteDuration): Stream[DateTime] = between(start, end, step.toMillis) }
srushingiv/org.skrushingiv
src/main/scala/org/skrushingiv/util/Dates.scala
Scala
mit
2,559
package io.buoyant.linkerd.protocol.h2 import com.fasterxml.jackson.annotation.JsonIgnore import com.twitter.finagle.buoyant.Dst import com.twitter.finagle.buoyant.h2._ import com.twitter.finagle.{Dtab, Path, Stack} import com.twitter.util.Future import io.buoyant.config.types.Port import io.buoyant.k8s.istio.{ClusterCache, IstioIdentifierBase, RouteCache} import io.buoyant.linkerd.IdentifierInitializer import io.buoyant.linkerd.protocol.H2IdentifierConfig import io.buoyant.linkerd.protocol.h2.ErrorReseter.H2ResponseException import io.buoyant.router.RoutingFactory.{BaseDtab, DstPrefix, IdentifiedRequest, Identifier, RequestIdentification} import istio.proxy.v1.config.HTTPRedirect class IstioIdentifier(val pfx: Path, baseDtab: () => Dtab, val routeCache: RouteCache, val clusterCache: ClusterCache) extends Identifier[Request] with IstioIdentifierBase[Request] { override def apply(req: Request): Future[RequestIdentification[Request]] = { getIdentifiedPath(req).map { path => val dst = Dst.Path(path, baseDtab(), Dtab.local) new IdentifiedRequest(dst, req) } } def redirectRequest(redir: HTTPRedirect, req: Request): Future[Nothing] = { val resp = Response(Status.Found, Stream.empty()) resp.headers.set(Headers.Path, redir.`uri`.getOrElse(req.path)) resp.headers.set(Headers.Authority, redir.`authority`.getOrElse(req.authority)) Future.exception(H2ResponseException(resp)) } def rewriteRequest(uri: String, authority: Option[String], req: Request): Unit = { req.headers.set(Headers.Path, uri) req.headers.set(Headers.Authority, authority.getOrElse("")) } def reqToMeta(req: Request): IstioRequestMeta = IstioRequestMeta(req.path, req.scheme, req.method.toString, req.authority, req.headers.get) } case class IstioIdentifierConfig( discoveryHost: Option[String], discoveryPort: Option[Port], apiserverHost: Option[String], apiserverPort: Option[Port] ) extends H2IdentifierConfig { @JsonIgnore override def newIdentifier(params: Stack.Params) = { import io.buoyant.k8s.istio._ val DstPrefix(prefix) = params[DstPrefix] val BaseDtab(baseDtab) = params[BaseDtab] val host = apiserverHost.getOrElse(DefaultApiserverHost) val port = apiserverPort.map(_.port).getOrElse(DefaultApiserverPort) val routeCache = RouteCache.getManagerFor(host, port) val discoveryClient = DiscoveryClient( discoveryHost.getOrElse(DefaultDiscoveryHost), discoveryPort.map(_.port).getOrElse(DefaultDiscoveryPort) ) val clusterCache = new ClusterCache(discoveryClient) new IstioIdentifier(prefix, baseDtab, routeCache, clusterCache) } } object IstioIdentifierConfig { val kind = "io.l5d.k8s.istio" } class IstioIdentifierInitializer extends IdentifierInitializer { val configClass = classOf[IstioIdentifierConfig] override val configId = IstioIdentifierConfig.kind } object IstioIdentifierInitializer extends IstioIdentifierInitializer
denverwilliams/linkerd
linkerd/protocol/h2/src/main/scala/io/buoyant/linkerd/protocol/h2/IstioIdentifier.scala
Scala
apache-2.0
2,967
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.deploy.k8s import org.apache.spark.internal.Logging import org.apache.spark.internal.config.ConfigBuilder import org.apache.spark.network.util.ByteUnit private[spark] object Config extends Logging { val KUBERNETES_NAMESPACE = ConfigBuilder("spark.kubernetes.namespace") .doc("The namespace that will be used for running the driver and executor pods. When using " + "spark-submit in cluster mode, this can also be passed to spark-submit via the " + "--kubernetes-namespace command line argument.") .stringConf .createWithDefault("default") val EXECUTOR_DOCKER_IMAGE = ConfigBuilder("spark.kubernetes.executor.docker.image") .doc("Docker image to use for the executors. Specify this using the standard Docker tag " + "format.") .stringConf .createOptional val DOCKER_IMAGE_PULL_POLICY = ConfigBuilder("spark.kubernetes.docker.image.pullPolicy") .doc("Kubernetes image pull policy. Valid values are Always, Never, and IfNotPresent.") .stringConf .checkValues(Set("Always", "Never", "IfNotPresent")) .createWithDefault("IfNotPresent") val APISERVER_AUTH_DRIVER_CONF_PREFIX = "spark.kubernetes.authenticate.driver" val APISERVER_AUTH_DRIVER_MOUNTED_CONF_PREFIX = "spark.kubernetes.authenticate.driver.mounted" val OAUTH_TOKEN_CONF_SUFFIX = "oauthToken" val OAUTH_TOKEN_FILE_CONF_SUFFIX = "oauthTokenFile" val CLIENT_KEY_FILE_CONF_SUFFIX = "clientKeyFile" val CLIENT_CERT_FILE_CONF_SUFFIX = "clientCertFile" val CA_CERT_FILE_CONF_SUFFIX = "caCertFile" val KUBERNETES_SERVICE_ACCOUNT_NAME = ConfigBuilder(s"$APISERVER_AUTH_DRIVER_CONF_PREFIX.serviceAccountName") .doc("Service account that is used when running the driver pod. The driver pod uses " + "this service account when requesting executor pods from the API server. If specific " + "credentials are given for the driver pod to use, the driver will favor " + "using those credentials instead.") .stringConf .createOptional // Note that while we set a default for this when we start up the // scheduler, the specific default value is dynamically determined // based on the executor memory. val KUBERNETES_EXECUTOR_MEMORY_OVERHEAD = ConfigBuilder("spark.kubernetes.executor.memoryOverhead") .doc("The amount of off-heap memory (in megabytes) to be allocated per executor. This " + "is memory that accounts for things like VM overheads, interned strings, other native " + "overheads, etc. This tends to grow with the executor size. (typically 6-10%).") .bytesConf(ByteUnit.MiB) .createOptional val KUBERNETES_EXECUTOR_LABEL_PREFIX = "spark.kubernetes.executor.label." val KUBERNETES_EXECUTOR_ANNOTATION_PREFIX = "spark.kubernetes.executor.annotation." val KUBERNETES_DRIVER_POD_NAME = ConfigBuilder("spark.kubernetes.driver.pod.name") .doc("Name of the driver pod.") .stringConf .createOptional val KUBERNETES_EXECUTOR_POD_NAME_PREFIX = ConfigBuilder("spark.kubernetes.executor.podNamePrefix") .doc("Prefix to use in front of the executor pod names.") .internal() .stringConf .createWithDefault("spark") val KUBERNETES_ALLOCATION_BATCH_SIZE = ConfigBuilder("spark.kubernetes.allocation.batch.size") .doc("Number of pods to launch at once in each round of executor allocation.") .intConf .checkValue(value => value > 0, "Allocation batch size should be a positive integer") .createWithDefault(5) val KUBERNETES_ALLOCATION_BATCH_DELAY = ConfigBuilder("spark.kubernetes.allocation.batch.delay") .doc("Number of seconds to wait between each round of executor allocation.") .longConf .checkValue(value => value > 0, "Allocation batch delay should be a positive integer") .createWithDefault(1) val KUBERNETES_EXECUTOR_LIMIT_CORES = ConfigBuilder("spark.kubernetes.executor.limit.cores") .doc("Specify the hard cpu limit for a single executor pod") .stringConf .createOptional val KUBERNETES_EXECUTOR_LOST_REASON_CHECK_MAX_ATTEMPTS = ConfigBuilder("spark.kubernetes.executor.lostCheck.maxAttempts") .doc("Maximum number of attempts allowed for checking the reason of an executor loss " + "before it is assumed that the executor failed.") .intConf .checkValue(value => value > 0, "Maximum attempts of checks of executor lost reason " + "must be a positive integer") .createWithDefault(10) val KUBERNETES_NODE_SELECTOR_PREFIX = "spark.kubernetes.node.selector." }
ron8hu/spark
resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/Config.scala
Scala
apache-2.0
5,456
package domain.model /** * GraPHPizer source code analytics engine * Copyright (C) 2015 Martin Helmich <[email protected]> * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ import org.neo4j.graphdb.Node import persistence.Query import persistence.NodeWrappers._ case class DataType(name: String, primitive: Boolean, collection: Boolean = false, inner: Option[DataType] = None) { def classLike: Option[ClassLike] = None def slug = { name.toLowerCase.replace("\\\\", "-").replace("<", "--").replace(">", "") } def query = new Query( ModelLabelTypes.Type, Map( "name" -> name, "primitive" -> Boolean.box(primitive), "collection" -> Boolean.box(collection) ) ) } object DataType { def fromNode(n: Node): DataType = new DataType( n.property[String]("name").getOrElse(""), n.property[Boolean]("primitive").getOrElse(false), n.property[Boolean]("collection").getOrElse(false), (n out ModelEdgeTypes.COLLECTION_OF).headOption.map { r => fromNode(r.end) } ) { override def classLike: Option[ClassLike] = (n out ModelEdgeTypes.IS).headOption.map { r => ClassLike.fromNode(r.end) } } }
martin-helmich/graphpizer-server
app/domain/model/DataType.scala
Scala
gpl-3.0
1,772
/* * Anthony Anderson (Illyohs) * Copyright (c) 2017. * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation version 2.1 * of the License. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ package io.github.illyohs.scorg import java.lang.reflect.{Field, Method} import net.minecraftforge.fml.common._ import net.minecraftforge.fml.relauncher.Side import org.apache.logging.log4j.{Level, LogManager} /** * Created by anthony on 6/18/17. */ class ScalaAdapter extends ILanguageAdapter { private val log = LogManager.getLogger("ScalaAdapter") override def supportsStatics(): Boolean = false override def setProxy(target: Field, proxyTarget: Class[_], proxy:AnyRef): Unit = { var prox = proxyTarget try if (!prox.getName.endsWith("$")) { prox = Class.forName(proxyTarget.getName + "$", true, proxyTarget.getClassLoader) } catch { case e: ClassNotFoundException => { log.printf(Level.INFO, "An error occurred trying to load a proxy into %.%. Did you declare your mod as 'class' instead of 'object'?", proxyTarget.getSimpleName, target.getName) return } } val targetInstance: AnyRef = prox.getField("MODULE$").get(null) val setterName: String = target.getName + "_$eq" for (setter <- prox.getMethods) { val setterParameters: Array[Class[_]] = setter.getParameterTypes if (setterName == setter.getName && setterParameters.length == 1 && setterParameters(0).isAssignableFrom(proxy.getClass)) { setter.invoke(targetInstance, proxy) return } } log.printf(Level.ERROR,"Failed loading proxy into %.%, could not find setter function. Did you declare the field with 'val' instead of 'var'?", proxyTarget.getSimpleName, target.getName) throw new LoaderException(String.format( "Failed loading proxy into %s.%s, could not find setter function. Did you declare the field with 'val' instead of 'var'?", proxyTarget.getSimpleName, target.getName)) } override def getNewInstance(container: FMLModContainer, objectClass: Class[_], classLoader: ClassLoader, factoryMarkedAnnotation: Method): AnyRef = { val sObjectClass: Class[_] = Class.forName(objectClass.getName + "$", true, classLoader) sObjectClass.getField("MODULE$").get(null) } override def setInternalProxies(mod: ModContainer, side: Side, loader: ClassLoader): Unit = { val proxyTarget:Class[_] = mod.getMod.getClass if (proxyTarget.getName.endsWith("$")) { for (target <- proxyTarget.getDeclaredFields) { if (target.getAnnotation(classOf[SidedProxy]) != null) { val targetType: String = if (side.isClient) { target.getAnnotation(classOf[SidedProxy]).clientSide() } else { target.getAnnotation(classOf[SidedProxy]).serverSide() } try { val proxy = Class.forName(targetType, true, loader).newInstance() if (!target.getType.isAssignableFrom(proxy.getClass)) { log.printf(Level.ERROR,"Attempted to load a proxy type % into %s.%s, but the types don't match", targetType, proxyTarget.getSimpleName, target.getName) throw new LoaderException(String.format("Attempted to load a proxy type %s into %s.%s, but the types don't match", targetType, proxyTarget.getSimpleName, target.getName)) } setProxy(target, proxyTarget, proxy.asInstanceOf[AnyRef]) } catch { case e:LoaderException => { log.printf(Level.TRACE,"An error occurred trying to load a proxy into %s.%s", proxyTarget.getSimpleName, target.getName) throw new LoaderException(e) } } } } } else { log.trace("Mod does not appear to be a singleton.") } } }
Illyohs/Scorg
src/main/scala/io/github/illyohs/scorg/ScalaAdapter.scala
Scala
lgpl-2.1
4,374
package de.fuberlin.wiwiss.silk.plugins.transformer.linguistic import de.fuberlin.wiwiss.silk.linkagerule.input.Transformer import de.fuberlin.wiwiss.silk.runtime.plugin.Plugin import java.net.{URLEncoder, HttpURLConnection, URL} import xml.Elem import collection.mutable.{ArrayBuffer, HashSet, Set => MSet} import java.lang.ConditionalSpecialCasing import de.fuberlin.wiwiss.silk.plugins.distance.tokenbased.CosineDistanceMetric /** * Created by IntelliJ IDEA. * User: andreas * Date: 5/16/12 * Time: 12:22 PM * To change this template use File | Settings | File Templates. */ @Plugin( id = "spotlight", categories = Array("Linguistic"), label = "Spotlight", description = "Concatenates all values to a string and gets a weighted entity vector from the Spotlight service." ) class SpotlightTextVectorTransformer extends Transformer { def apply(values: Seq[Set[String]]): Set[String] = { val stringSet = values.reduce(_ union _) if(stringSet.size==0) return Set[String]() val query = if(stringSet.size>1) stringSet.reduceLeft(_ + " " + _) else stringSet.toSet.head SpotlightClient.querySpotlight(query) } } object SpotlightClient { val baseURL = "http://160.45.137.71:2222/extract?text=" def querySpotlight(query: String): Set[String] = { val url = new URL(baseURL + URLEncoder.encode(query, "UTF-8")) val conn = url.openConnection().asInstanceOf[HttpURLConnection] conn.setRequestMethod("GET") conn.setRequestProperty("Accept", "text/xml") conn.connect() val rc = conn.getResponseCode if (rc < 200 || rc >= 300) { System.err.println("Query execution: Received error code " + rc + " from server") System.err.println("Error message: " + conn.getResponseMessage + "\\n\\nFor query: \\n") System.err.println(query + "\\n") } val is = conn.getInputStream if(is==null) return Set[String]() val root = xml.XML.load(is) Set(createEntityString(root)) } // Converts the elements to "resource simScore;resource simScore..." strings private def createEntityString(root: Elem): String = { val tempResources = new ArrayBuffer[(String, Double)] val sb = new StringBuilder for(resource <- root \\ "Resources" \\ "Resource") tempResources += Pair(resource.text, (resource \\ "@similarityScore").text.toDouble) var first = true for((resource, score) <- normalize(tempResources)) { if(!first) sb.append(";") else first = false sb.append(resource).append(" ").append(score.toString) } sb.toString } def normalize(vector: Seq[(String, Double)]): Seq[(String, Double)] = { var factor = 0.0 for((resource, weight) <- vector) factor += math.pow(weight, 2.0) factor = math.sqrt(factor) for((resource, weight) <- vector) yield (resource, weight/factor) } }
fusepoolP3/p3-silk
silk-core/src/main/scala/de/fuberlin/wiwiss/silk/plugins/transformer/linguistic/SpotlightTextVectorTransformer.scala
Scala
apache-2.0
2,873
/* Copyright 2017-18, Emmanouil Antonios Platanios. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package org.platanios.tensorflow.api.ops.io.data import org.platanios.tensorflow.api.core.Shape import org.platanios.tensorflow.api.ops.{Op, Output, SparseOutput} import org.platanios.tensorflow.api.tensors.{SparseTensor, Tensor} import org.platanios.tensorflow.api.types.{DataType, INT64} /** Dataset that splits a sparse tensor into its rows. * * @param tensor Sparse tensor. * @param name Name for this dataset. * * @author Emmanouil Antonios Platanios */ case class SparseTensorSlicesDataset( tensor: SparseTensor, override val name: String = "SparseTensorSliceDataset" ) extends Dataset[ SparseTensor, SparseOutput, (DataType, DataType, DataType), (Shape, Shape, Shape)](name) { /** Creates a `RESOURCE` scalar tensor representing this dataset. This function adds ops to the current graph, that * create the dataset resource. */ override def createHandle(): Output = { Op.Builder(opType = "SparseTensorSliceDataset", name = name) .addInput(tensor.indices) .addInput(tensor.values) .addInput(tensor.denseShape) .build().outputs(0) } override def outputDataTypes: (DataType, DataType, DataType) = (INT64, tensor.dataType, INT64) override def outputShapes: (Shape, Shape, Shape) = { val indicesShape = tensor.indices.shape val denseShapeShape = tensor.denseShape.shape val rank = Shape(indicesShape(1) - 1).mergeWith(Shape(denseShapeShape(0) - 1))(0) (Shape(-1, rank), Shape(-1), Shape(rank)) } } /** Dataset that splits a sparse tensor into its rows. * * @param tensor Sparse tensor. * @param name Name for this dataset. * * @author Emmanouil Antonios Platanios */ case class SparseOutputSlicesDataset( tensor: SparseOutput, override val name: String = "SparseOutputSliceDataset" ) extends Dataset[ SparseTensor, SparseOutput, (DataType, DataType, DataType), (Shape, Shape, Shape)](name) { /** Creates a `RESOURCE` scalar tensor representing this dataset. This function adds ops to the current graph, that * create the dataset resource. */ override def createHandle(): Output = { Op.Builder(opType = "SparseTensorSliceDataset", name = name) .addInput(tensor.indices) .addInput(tensor.values) .addInput(tensor.denseShape) .build().outputs(0) } override def outputDataTypes: (DataType, DataType, DataType) = (INT64, tensor.dataType, INT64) override def outputShapes: (Shape, Shape, Shape) = { val indicesShape = tensor.indices.shape val denseShapeShape = tensor.denseShape.shape val rank = Shape(indicesShape(1) - 1).mergeWith(Shape(denseShapeShape(0) - 1))(0) (Shape(-1, rank), Shape(-1), Shape(rank)) } }
eaplatanios/tensorflow
tensorflow/scala/api/src/main/scala/org/platanios/tensorflow/api/ops/io/data/SparseTensorSlicesDataset.scala
Scala
apache-2.0
3,343
/* ___ _ ___ _ _ *\\ ** / __| |/ (_) | | Your SKilL scala Binding ** ** \\__ \\ ' <| | | |__ generated: 01.02.2019 ** ** |___/_|\\_\\_|_|____| by: feldentm ** \\* */ package de.ust.skill.sir.api.internal import scala.collection.mutable.ArrayBuffer import scala.collection.mutable.HashMap import scala.collection.mutable.HashSet import scala.collection.mutable.ListBuffer import scala.collection.mutable.WrappedArray import scala.reflect.Manifest import de.ust.skill.common.jvm.streams.InStream import de.ust.skill.common.scala.SkillID import de.ust.skill.common.scala.api.SkillObject import de.ust.skill.common.scala.api.TypeMissmatchError import de.ust.skill.common.scala.internal.BasePool import de.ust.skill.common.scala.internal.FieldDeclaration import de.ust.skill.common.scala.internal.SkillState import de.ust.skill.common.scala.internal.SingletonStoragePool import de.ust.skill.common.scala.internal.StoragePool import de.ust.skill.common.scala.internal.SubPool import de.ust.skill.common.scala.internal.fieldTypes._ import de.ust.skill.common.scala.internal.restrictions.FieldRestriction import _root_.de.ust.skill.sir.api._ final class RestrictionPool(poolIndex : Int) extends BasePool[_root_.de.ust.skill.sir.Restriction]( poolIndex, "restriction" ) { override def getInstanceClass: Class[_root_.de.ust.skill.sir.Restriction] = classOf[_root_.de.ust.skill.sir.Restriction] override def addField[T : Manifest](ID : Int, t : FieldType[T], name : String, restrictions : HashSet[FieldRestriction]) : FieldDeclaration[T, _root_.de.ust.skill.sir.Restriction] = { val f = (name match { case "arguments" ⇒ new F_Restriction_arguments(ID, this, t.asInstanceOf[FieldType[scala.collection.mutable.ArrayBuffer[java.lang.String]]]) case "name" ⇒ new F_Restriction_name(ID, this, t.asInstanceOf[FieldType[java.lang.String]]) case _ ⇒ return super.addField(ID, t, name, restrictions) }).asInstanceOf[FieldDeclaration[T, _root_.de.ust.skill.sir.Restriction]] //check type if (t != f.t) throw new TypeMissmatchError(t, f.t.toString, f.name, name) val rs = restrictions.iterator while(rs.hasNext) f.addRestriction(rs.next()) dataFields += f return f } override def ensureKnownFields(st : SkillState) { val state = st.asInstanceOf[SkillFile] // data fields val Clsarguments = classOf[F_Restriction_arguments] val Clsname = classOf[F_Restriction_name] val fields = HashSet[Class[_ <: FieldDeclaration[_, _root_.de.ust.skill.sir.Restriction]]](Clsarguments,Clsname) var dfi = dataFields.size while (dfi != 0) { dfi -= 1 fields.remove(dataFields(dfi).getClass) } if(fields.contains(Clsarguments)) dataFields += new F_Restriction_arguments(dataFields.size + 1, this, VariableLengthArray(state.String)) if(fields.contains(Clsname)) dataFields += new F_Restriction_name(dataFields.size + 1, this, state.String) // no auto fields val fs = (dataFields ++ autoFields).iterator while (fs.hasNext) fs.next().createKnownRestrictions } override def makeSubPool(name : String, poolIndex : Int) = new RestrictionSubPool(poolIndex, name, this) override def allocateData : Unit = data = new Array[_root_.de.ust.skill.sir.Restriction](cachedSize) override def reflectiveAllocateInstance: _root_.de.ust.skill.sir.Restriction = { val r = new _root_.de.ust.skill.sir.Restriction(-1) this.newObjects.append(r) r } override def allocateInstances { for (b ← blocks.par) { var i : SkillID = b.bpo val last = i + b.staticCount while (i < last) { data(i) = new _root_.de.ust.skill.sir.Restriction(i + 1) i += 1 } } } def make(arguments : scala.collection.mutable.ArrayBuffer[java.lang.String] = scala.collection.mutable.ArrayBuffer[java.lang.String](), name : java.lang.String = null) = { val r = new _root_.de.ust.skill.sir.Restriction(-1 - newObjects.size, arguments : scala.collection.mutable.ArrayBuffer[java.lang.String], name : java.lang.String) newObjects.append(r) r } } final class RestrictionSubPool(poolIndex : Int, name : String, superPool : StoragePool[_ >: _root_.de.ust.skill.sir.Restriction.UnknownSubType <: _root_.de.ust.skill.sir.Restriction, _root_.de.ust.skill.sir.Restriction]) extends SubPool[_root_.de.ust.skill.sir.Restriction.UnknownSubType, _root_.de.ust.skill.sir.Restriction]( poolIndex, name, superPool ) { override def getInstanceClass : Class[_root_.de.ust.skill.sir.Restriction.UnknownSubType] = classOf[_root_.de.ust.skill.sir.Restriction.UnknownSubType] override def makeSubPool(name : String, poolIndex : Int) = new RestrictionSubPool(poolIndex, name, this) override def ensureKnownFields(st : SkillState) {} override def allocateInstances { for (b ← blocks.par) { var i : SkillID = b.bpo val last = i + b.staticCount while (i < last) { data(i) = new _root_.de.ust.skill.sir.Restriction.UnknownSubType(i + 1, this) i += 1 } } } def reflectiveAllocateInstance : _root_.de.ust.skill.sir.Restriction.UnknownSubType = { val r = new _root_.de.ust.skill.sir.Restriction.UnknownSubType(-1, this) this.newObjects.append(r) r } }
skill-lang/skill
src/main/scala/de/ust/skill/sir/api/internal/PoolRestriction.scala
Scala
bsd-3-clause
5,818
/** * * Porter stemmer in Scala. The original paper is in * * Porter, 1980, An algorithm for suffix stripping, Program, Vol. 14, * no. 3, pp 130-137, * * See also http://www.tartarus.org/~martin/PorterStemmer * * A few methods were borrowed from the existing Java port from the above page. * * This version is adapted from the original by Ken Faulkner. */ package opennlp.scalabha.lang.eng class PorterStemmer { // word to be stemmed. var b = "" // Character sets to test membership for val vowels = Set('a', 'e', 'i', 'o', 'u') val wxy = Set('w', 'x', 'y') // Just recode the existing stuff, then go through and refactor with some intelligence. def cons(i: Int): Boolean = { val ch = b(i) if (vowels(ch)) false else { if (ch == 'y') if (i == 0) true else !cons(i - 1) else true } } // Add via letter or entire word def add(ch: Char) = b += ch def add(word: String) = b = word /** * m() measures the number of consonant sequences between 0 and j. if c is * a consonant sequence and v a vowel sequence, and <..> indicates arbitrary * presence, * * <c><v> gives 0 * <c>vc<v> gives 1 * <c>vcvc<v> gives 2 * <c>vcvcvc<v> gives 3 * .... * * I think this can be recoded far more neatly. */ def calcM(s: String): Int = { val l = s.length var count = 0 var currentConst = false for (c <- 0 to l - 1) { if (cons(c)) { if (!currentConst && c != 0) { count += 1 } currentConst = true } else { currentConst = false } } count } // removing the suffix 's', does a vowel exist?' def vowelInStem(s: String): Boolean = { for (i <- 0 to b.length - 1 - s.length) { if (!cons(i)) { return true } } return false } /* doublec(j) is true <=> j,(j-1) contain a double consonant. */ def doublec(): Boolean = { val l = b.length - 1 if (l < 1) false else { if (b(l) != b(l - 1)) false else cons(l) } } /** * cvc(i) is true <=> i-2,i-1,i has the form consonant - vowel - consonant * and also if the second c is not w,x or y. this is used when trying to * restore an e at the end of a short word. e.g. * * cav(e), lov(e), hop(e), crim(e), but * snow, box, tray. * */ def cvc(s: String): Boolean = { val i = b.length - 1 - s.length if (i < 2 || !cons(i) || cons(i - 1) || !cons(i - 2)) false else !wxy(b(i)) } // returns true if it did the change. def replacer(orig: String, replace: String, checker: Int => Boolean): Boolean = { val l = b.length val origLength = orig.length if (b.endsWith(orig)) { val n = b.substring(0, l - origLength) val m = calcM(n) if (checker(m)) b = n + replace true } else { false } } // process the list of tuples to find which prefix matches the case. // checker is the conditional checker for m. def processSubList(l: List[(String, String)], checker: Int => Boolean): Boolean = { val iter = l.iterator var done = false while (!done && iter.hasNext) { val v = iter.next done = replacer(v._1, v._2, checker) } done } def step1() { val l = b.length var m = calcM(b) // step 1a val esses = List(("sses", "ss"), ("ies", "i"), ("ss", "ss"), ("s", "")) processSubList(esses, _ >= 0) // step 1b if (!(replacer("eed", "ee", _ > 0))) { if ((vowelInStem("ed") && replacer("ed", "", _ >= 0)) || (vowelInStem("ing") && replacer("ing", "", _ >= 0))) { val atebleize = List(("at", "ate"), ("bl", "ble"), ("iz", "ize")) if (!processSubList(atebleize, _ >= 0)) { // if this isn't done, then it gets more confusing. m = calcM(b) val last = b(b.length - 1) if (doublec() && !"lsz".contains(last)) { b = b.substring(0, b.length - 1) } else if (m == 1 && cvc("")) { b = b + "e" } } } } // step 1c (vowelInStem("y") && replacer("y", "i", _ >= 0)) } def step2 = { val suffixes = List(("ational", "ate"), ("tional", "tion"), ("enci", "ence"), ("anci", "ance"), ("izer", "ize"), ("bli", "ble"), ("alli", "al"), ("entli", "ent"), ("eli", "e"), ("ousli", "ous"), ("ization", "ize"), ("ation", "ate"), ("ator", "ate"), ("alism", "al"), ("iveness", "ive"), ("fulness", "ful"), ("ousness", "ous"), ("aliti", "al"), ("iviti", "ive"), ("biliti", "ble"), ("logi", "log")) processSubList(suffixes, _ > 0) } def step3 = { val suffixes = List(("icate", "ic"), ("ative", ""), ("alize", "al"), ("iciti", "ic"), ("ical", "ic"), ("ful", ""), ("ness", "")) processSubList(suffixes, _ > 0) } def step4 = { // first part. val suffixes = List(("al", ""), ("ance", ""), ("ence", ""), ("er", ""), ("ic", ""), ("able", ""), ("ible", ""), ("ant", ""), ("ement", ""), ("ment", ""), ("ent", "")) var res = processSubList(suffixes, _ > 1) // special part. if (!res) { if (b.length > 4) { if (b(b.length - 4) == 's' || b(b.length - 4) == 't') { res = replacer("ion", "", _ > 1) } } } // third part. if (!res) { val suffixes = List(("ou", ""), ("ism", ""), ("ate", ""), ("iti", ""), ("ous", ""), ("ive", ""), ("ize", "")) res = processSubList(suffixes, _ > 1) } } def step5a = { replacer("e", "", _ > 1) if (!cvc("e")) replacer("e", "", _ == 1) } def step5b = { val m = calcM(b) if (m > 1 && doublec() && b.endsWith("l")) b = b.substring(0, b.length - 1) } def apply(token: String) = { add(token) if (b.length > 2) { step1 step2 step3 step4 step5a step5b } b } } object PorterStemmerTest { def main(args: Array[String]) { val stemmer = new PorterStemmer args.foreach { line => println(stemmer(line.trim)) } } }
utcompling/Scalabha
src/main/scala/opennlp/scalabha/lang/eng/PorterStemmer.scala
Scala
apache-2.0
6,070
package space.thedocking.infinitu.integer import space.thedocking.infinitu.dimension.DimensionValue import org.junit.runner.RunWith import org.specs2.matcher.ShouldMatchers import org.specs2.mutable.Specification import org.specs2.runner.JUnitRunner @RunWith(classOf[JUnitRunner]) class IntegerUniverseSpec extends Specification with ShouldMatchers { "IntegerUniverse" should { "define an IntegerValue" in { val iv = IntegerValue() iv.value must be equalTo (0) val iv2 = IntegerValue(0) iv must be equalTo iv2 iv plus iv2 must be equalTo iv val iv3 = IntegerValue(1) iv3 plus iv3 must be equalTo IntegerValue(2) iv3 minus iv3 must be equalTo iv iv.next must be equalTo iv3 iv3.previous must be equalTo iv //why not? iv3 must be greaterThan iv iv3.asInstanceOf[DimensionValue[Integer]] must be greaterThan iv iv.asInstanceOf[DimensionValue[Integer]] must be lessThan iv3 } "define an IntegerIntervalDimension" in { val iid = IntegerIntervalDimension(name = "small test dimension", minValue = IntegerValue(), maxValue = IntegerValue(2)) iid must not beNull; iid.plus(IntegerValue(), IntegerValue(1)) must be equalTo IntegerValue(1) } } }
TheDockingSpace/InfinitU
core/shared/src/test/scala/space/thedocking/infinitu/integer/IntegerUniverseSpec.scala
Scala
lgpl-3.0
1,348
/* * Copyright 2015 data Artisans GmbH, 2019 Ververica GmbH * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.ververica.flinktraining.solutions.datastream_scala.windows import com.ververica.flinktraining.exercises.datastream_java.datatypes.TaxiRide import com.ververica.flinktraining.exercises.datastream_java.sources.TaxiRideSource import com.ververica.flinktraining.exercises.datastream_java.utils.ExerciseBase._ import com.ververica.flinktraining.exercises.datastream_java.utils.{ExerciseBase, GeoUtils} import org.apache.flink.api.common.functions.MapFunction import org.apache.flink.api.java.utils.ParameterTool import org.apache.flink.streaming.api.TimeCharacteristic import org.apache.flink.streaming.api.scala._ import org.apache.flink.streaming.api.windowing.time.Time import org.apache.flink.util.Collector /** * Scala reference implementation for the "Popular Places" exercise of the Flink training * (http://training.ververica.com). * * The task of the exercise is to identify every five minutes popular areas where many taxi rides * arrived or departed in the last 15 minutes. * * Parameters: * -input path-to-input-file * */ object PopularPlacesSolution { def main(args: Array[String]) { // read parameters val params = ParameterTool.fromArgs(args) val input = params.get("input", pathToRideData) val popThreshold = params.getInt("threshold", 20) val maxDelay = 60 // events are out of order by max 60 seconds val speed = 600 // events of 10 minutes are served in 1 second // set up streaming execution environment val env = StreamExecutionEnvironment.getExecutionEnvironment env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime) env.setParallelism(ExerciseBase.parallelism) // start the data generator val rides = env.addSource(rideSourceOrTest(new TaxiRideSource(input, maxDelay, speed))) // find n most popular spots val popularPlaces = rides // remove all rides which are not within NYC .filter { r => GeoUtils.isInNYC(r.startLon, r.startLat) && GeoUtils.isInNYC(r.endLon, r.endLat) } // match ride to grid cell and event type (start or end) .map(new GridCellMatcher) // partition by cell id and event type .keyBy( k => k ) // build sliding window .timeWindow(Time.minutes(15), Time.minutes(5)) // count events in window .apply{ (key: (Int, Boolean), window, vals, out: Collector[(Int, Long, Boolean, Int)]) => out.collect( (key._1, window.getEnd, key._2, vals.size) ) } // filter by popularity threshold .filter( c => { c._4 >= popThreshold } ) // map grid cell to coordinates .map(new GridToCoordinates) // print result on stdout printOrTest(popularPlaces) // execute the transformation pipeline env.execute("Popular Places") } /** * Map taxi ride to grid cell and event type. * Start records use departure location, end record use arrival location. */ class GridCellMatcher extends MapFunction[TaxiRide, (Int, Boolean)] { def map(taxiRide: TaxiRide): (Int, Boolean) = { if (taxiRide.isStart) { // get grid cell id for start location val gridId: Int = GeoUtils.mapToGridCell(taxiRide.startLon, taxiRide.startLat) (gridId, true) } else { // get grid cell id for end location val gridId: Int = GeoUtils.mapToGridCell(taxiRide.endLon, taxiRide.endLat) (gridId, false) } } } /** * Maps the grid cell id back to longitude and latitude coordinates. */ class GridToCoordinates extends MapFunction[ (Int, Long, Boolean, Int), (Float, Float, Long, Boolean, Int)] { def map(cellCount: (Int, Long, Boolean, Int)): (Float, Float, Long, Boolean, Int) = { val longitude = GeoUtils.getGridCellCenterLon(cellCount._1) val latitude = GeoUtils.getGridCellCenterLat(cellCount._1) (longitude, latitude, cellCount._2, cellCount._3, cellCount._4) } } }
dataArtisans/flink-training-exercises
src/main/scala/com/ververica/flinktraining/solutions/datastream_scala/windows/PopularPlacesSolution.scala
Scala
apache-2.0
4,527
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.execution.command import java.io.File import java.net.URI import java.nio.file.FileSystems import java.util.Date import scala.collection.mutable.ArrayBuffer import scala.util.control.NonFatal import scala.util.Try import org.apache.hadoop.fs.Path import org.apache.spark.sql.{AnalysisException, Row, SparkSession} import org.apache.spark.sql.catalyst.TableIdentifier import org.apache.spark.sql.catalyst.analysis.NoSuchPartitionException import org.apache.spark.sql.catalyst.catalog._ import org.apache.spark.sql.catalyst.catalog.CatalogTableType._ import org.apache.spark.sql.catalyst.catalog.CatalogTypes.TablePartitionSpec import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeReference} import org.apache.spark.sql.catalyst.util.quoteIdentifier import org.apache.spark.sql.execution.datasources.PartitioningUtils import org.apache.spark.sql.types._ import org.apache.spark.util.Utils /** * A command to create a MANAGED table with the same definition of the given existing table. * In the target table definition, the table comment is always empty but the column comments * are identical to the ones defined in the source table. * * The CatalogTable attributes copied from the source table are storage(inputFormat, outputFormat, * serde, compressed, properties), schema, provider, partitionColumnNames, bucketSpec. * * The syntax of using this command in SQL is: * {{{ * CREATE TABLE [IF NOT EXISTS] [db_name.]table_name * LIKE [other_db_name.]existing_table_name * }}} */ case class CreateTableLikeCommand( targetTable: TableIdentifier, sourceTable: TableIdentifier, ifNotExists: Boolean) extends RunnableCommand { override def run(sparkSession: SparkSession): Seq[Row] = { val catalog = sparkSession.sessionState.catalog val sourceTableDesc = catalog.getTempViewOrPermanentTableMetadata(sourceTable) val newProvider = if (sourceTableDesc.tableType == CatalogTableType.VIEW) { Some(sparkSession.sessionState.conf.defaultDataSourceName) } else { sourceTableDesc.provider } val newTableDesc = CatalogTable( identifier = targetTable, tableType = CatalogTableType.MANAGED, // We are creating a new managed table, which should not have custom table location. storage = sourceTableDesc.storage.copy(locationUri = None), schema = sourceTableDesc.schema, provider = newProvider, partitionColumnNames = sourceTableDesc.partitionColumnNames, bucketSpec = sourceTableDesc.bucketSpec) catalog.createTable(newTableDesc, ifNotExists) Seq.empty[Row] } } // TODO: move the rest of the table commands from ddl.scala to this file /** * A command to create a table. * * Note: This is currently used only for creating Hive tables. * This is not intended for temporary tables. * * The syntax of using this command in SQL is: * {{{ * CREATE [EXTERNAL] TABLE [IF NOT EXISTS] [db_name.]table_name * [(col1 data_type [COMMENT col_comment], ...)] * [COMMENT table_comment] * [PARTITIONED BY (col3 data_type [COMMENT col_comment], ...)] * [CLUSTERED BY (col1, ...) [SORTED BY (col1 [ASC|DESC], ...)] INTO num_buckets BUCKETS] * [SKEWED BY (col1, col2, ...) ON ((col_value, col_value, ...), ...) * [STORED AS DIRECTORIES] * [ROW FORMAT row_format] * [STORED AS file_format | STORED BY storage_handler_class [WITH SERDEPROPERTIES (...)]] * [LOCATION path] * [TBLPROPERTIES (property_name=property_value, ...)] * [AS select_statement]; * }}} */ case class CreateTableCommand(table: CatalogTable, ifNotExists: Boolean) extends RunnableCommand { override def run(sparkSession: SparkSession): Seq[Row] = { sparkSession.sessionState.catalog.createTable(table, ifNotExists) Seq.empty[Row] } } /** * A command that renames a table/view. * * The syntax of this command is: * {{{ * ALTER TABLE table1 RENAME TO table2; * ALTER VIEW view1 RENAME TO view2; * }}} */ case class AlterTableRenameCommand( oldName: TableIdentifier, newName: TableIdentifier, isView: Boolean) extends RunnableCommand { override def run(sparkSession: SparkSession): Seq[Row] = { val catalog = sparkSession.sessionState.catalog // If this is a temp view, just rename the view. // Otherwise, if this is a real table, we also need to uncache and invalidate the table. if (catalog.isTemporaryTable(oldName)) { catalog.renameTable(oldName, newName) } else { val table = catalog.getTableMetadata(oldName) DDLUtils.verifyAlterTableType(catalog, table, isView) // If an exception is thrown here we can just assume the table is uncached; // this can happen with Hive tables when the underlying catalog is in-memory. val wasCached = Try(sparkSession.catalog.isCached(oldName.unquotedString)).getOrElse(false) if (wasCached) { try { sparkSession.catalog.uncacheTable(oldName.unquotedString) } catch { case NonFatal(e) => log.warn(e.toString, e) } } // Invalidate the table last, otherwise uncaching the table would load the logical plan // back into the hive metastore cache catalog.refreshTable(oldName) catalog.renameTable(oldName, newName) if (wasCached) { sparkSession.catalog.cacheTable(newName.unquotedString) } } Seq.empty[Row] } } /** * A command that loads data into a Hive table. * * The syntax of this command is: * {{{ * LOAD DATA [LOCAL] INPATH 'filepath' [OVERWRITE] INTO TABLE tablename * [PARTITION (partcol1=val1, partcol2=val2 ...)] * }}} */ case class LoadDataCommand( table: TableIdentifier, path: String, isLocal: Boolean, isOverwrite: Boolean, partition: Option[TablePartitionSpec]) extends RunnableCommand { override def run(sparkSession: SparkSession): Seq[Row] = { val catalog = sparkSession.sessionState.catalog val targetTable = catalog.getTableMetadata(table) val tableIdentwithDB = targetTable.identifier.quotedString if (targetTable.tableType == CatalogTableType.VIEW) { throw new AnalysisException(s"Target table in LOAD DATA cannot be a view: $tableIdentwithDB") } if (DDLUtils.isDatasourceTable(targetTable)) { throw new AnalysisException( s"LOAD DATA is not supported for datasource tables: $tableIdentwithDB") } if (targetTable.partitionColumnNames.nonEmpty) { if (partition.isEmpty) { throw new AnalysisException(s"LOAD DATA target table $tableIdentwithDB is partitioned, " + s"but no partition spec is provided") } if (targetTable.partitionColumnNames.size != partition.get.size) { throw new AnalysisException(s"LOAD DATA target table $tableIdentwithDB is partitioned, " + s"but number of columns in provided partition spec (${partition.get.size}) " + s"do not match number of partitioned columns in table " + s"(s${targetTable.partitionColumnNames.size})") } partition.get.keys.foreach { colName => if (!targetTable.partitionColumnNames.contains(colName)) { throw new AnalysisException(s"LOAD DATA target table $tableIdentwithDB is partitioned, " + s"but the specified partition spec refers to a column that is not partitioned: " + s"'$colName'") } } } else { if (partition.nonEmpty) { throw new AnalysisException(s"LOAD DATA target table $tableIdentwithDB is not " + s"partitioned, but a partition spec was provided.") } } val loadPath = if (isLocal) { val uri = Utils.resolveURI(path) val filePath = uri.getPath() val exists = if (filePath.contains("*")) { val fileSystem = FileSystems.getDefault val pathPattern = fileSystem.getPath(filePath) val dir = pathPattern.getParent.toString if (dir.contains("*")) { throw new AnalysisException( s"LOAD DATA input path allows only filename wildcard: $path") } val files = new File(dir).listFiles() if (files == null) { false } else { val matcher = fileSystem.getPathMatcher("glob:" + pathPattern.toAbsolutePath) files.exists(f => matcher.matches(fileSystem.getPath(f.getAbsolutePath))) } } else { new File(filePath).exists() } if (!exists) { throw new AnalysisException(s"LOAD DATA input path does not exist: $path") } uri } else { val uri = new URI(path) if (uri.getScheme() != null && uri.getAuthority() != null) { uri } else { // Follow Hive's behavior: // If no schema or authority is provided with non-local inpath, // we will use hadoop configuration "fs.default.name". val defaultFSConf = sparkSession.sessionState.newHadoopConf().get("fs.default.name") val defaultFS = if (defaultFSConf == null) { new URI("") } else { new URI(defaultFSConf) } val scheme = if (uri.getScheme() != null) { uri.getScheme() } else { defaultFS.getScheme() } val authority = if (uri.getAuthority() != null) { uri.getAuthority() } else { defaultFS.getAuthority() } if (scheme == null) { throw new AnalysisException( s"LOAD DATA: URI scheme is required for non-local input paths: '$path'") } // Follow Hive's behavior: // If LOCAL is not specified, and the path is relative, // then the path is interpreted relative to "/user/<username>" val uriPath = uri.getPath() val absolutePath = if (uriPath != null && uriPath.startsWith("/")) { uriPath } else { s"/user/${System.getProperty("user.name")}/$uriPath" } new URI(scheme, authority, absolutePath, uri.getQuery(), uri.getFragment()) } } if (partition.nonEmpty) { catalog.loadPartition( targetTable.identifier, loadPath.toString, partition.get, isOverwrite, holdDDLTime = false, inheritTableSpecs = true) } else { catalog.loadTable( targetTable.identifier, loadPath.toString, isOverwrite, holdDDLTime = false) } Seq.empty[Row] } } /** * A command to truncate table. * * The syntax of this command is: * {{{ * TRUNCATE TABLE tablename [PARTITION (partcol1=val1, partcol2=val2 ...)] * }}} */ case class TruncateTableCommand( tableName: TableIdentifier, partitionSpec: Option[TablePartitionSpec]) extends RunnableCommand { override def run(spark: SparkSession): Seq[Row] = { val catalog = spark.sessionState.catalog val table = catalog.getTableMetadata(tableName) val tableIdentWithDB = table.identifier.quotedString if (table.tableType == CatalogTableType.EXTERNAL) { throw new AnalysisException( s"Operation not allowed: TRUNCATE TABLE on external tables: $tableIdentWithDB") } if (table.tableType == CatalogTableType.VIEW) { throw new AnalysisException( s"Operation not allowed: TRUNCATE TABLE on views: $tableIdentWithDB") } if (table.partitionColumnNames.isEmpty && partitionSpec.isDefined) { throw new AnalysisException( s"Operation not allowed: TRUNCATE TABLE ... PARTITION is not supported " + s"for tables that are not partitioned: $tableIdentWithDB") } if (partitionSpec.isDefined) { DDLUtils.verifyPartitionProviderIsHive(spark, table, "TRUNCATE TABLE ... PARTITION") } val partCols = table.partitionColumnNames val locations = if (partCols.isEmpty) { Seq(table.storage.locationUri) } else { val normalizedSpec = partitionSpec.map { spec => PartitioningUtils.normalizePartitionSpec( spec, partCols, table.identifier.quotedString, spark.sessionState.conf.resolver) } val partLocations = catalog.listPartitions(table.identifier, normalizedSpec).map(_.storage.locationUri) // Fail if the partition spec is fully specified (not partial) and the partition does not // exist. for (spec <- partitionSpec if partLocations.isEmpty && spec.size == partCols.length) { throw new NoSuchPartitionException(table.database, table.identifier.table, spec) } partLocations } val hadoopConf = spark.sessionState.newHadoopConf() locations.foreach { location => if (location.isDefined) { val path = new Path(location.get) try { val fs = path.getFileSystem(hadoopConf) fs.delete(path, true) fs.mkdirs(path) } catch { case NonFatal(e) => throw new AnalysisException( s"Failed to truncate table $tableIdentWithDB when removing data of the path: $path " + s"because of ${e.toString}") } } } // After deleting the data, invalidate the table to make sure we don't keep around a stale // file relation in the metastore cache. spark.sessionState.refreshTable(tableName.unquotedString) // Also try to drop the contents of the table from the columnar cache try { spark.sharedState.cacheManager.uncacheQuery(spark.table(table.identifier)) } catch { case NonFatal(e) => log.warn(s"Exception when attempting to uncache table $tableIdentWithDB", e) } Seq.empty[Row] } } /** * Command that looks like * {{{ * DESCRIBE [EXTENDED|FORMATTED] table_name partitionSpec?; * }}} */ case class DescribeTableCommand( table: TableIdentifier, partitionSpec: TablePartitionSpec, isExtended: Boolean, isFormatted: Boolean) extends RunnableCommand { override val output: Seq[Attribute] = Seq( // Column names are based on Hive. AttributeReference("col_name", StringType, nullable = false, new MetadataBuilder().putString("comment", "name of the column").build())(), AttributeReference("data_type", StringType, nullable = false, new MetadataBuilder().putString("comment", "data type of the column").build())(), AttributeReference("comment", StringType, nullable = true, new MetadataBuilder().putString("comment", "comment of the column").build())() ) override def run(sparkSession: SparkSession): Seq[Row] = { val result = new ArrayBuffer[Row] val catalog = sparkSession.sessionState.catalog if (catalog.isTemporaryTable(table)) { if (partitionSpec.nonEmpty) { throw new AnalysisException( s"DESC PARTITION is not allowed on a temporary view: ${table.identifier}") } describeSchema(catalog.lookupRelation(table).schema, result) } else { val metadata = catalog.getTableMetadata(table) if (metadata.schema.isEmpty) { // In older version(prior to 2.1) of Spark, the table schema can be empty and should be // inferred at runtime. We should still support it. describeSchema(catalog.lookupRelation(metadata.identifier).schema, result) } else { describeSchema(metadata.schema, result) } describePartitionInfo(metadata, result) if (partitionSpec.isEmpty) { if (isExtended) { describeExtendedTableInfo(metadata, result) } else if (isFormatted) { describeFormattedTableInfo(metadata, result) } } else { describeDetailedPartitionInfo(sparkSession, catalog, metadata, result) } } result } private def describePartitionInfo(table: CatalogTable, buffer: ArrayBuffer[Row]): Unit = { if (table.partitionColumnNames.nonEmpty) { append(buffer, "# Partition Information", "", "") append(buffer, s"# ${output.head.name}", output(1).name, output(2).name) describeSchema(table.partitionSchema, buffer) } } private def describeExtendedTableInfo(table: CatalogTable, buffer: ArrayBuffer[Row]): Unit = { append(buffer, "", "", "") append(buffer, "# Detailed Table Information", table.toString, "") } private def describeFormattedTableInfo(table: CatalogTable, buffer: ArrayBuffer[Row]): Unit = { append(buffer, "", "", "") append(buffer, "# Detailed Table Information", "", "") append(buffer, "Database:", table.database, "") append(buffer, "Owner:", table.owner, "") append(buffer, "Create Time:", new Date(table.createTime).toString, "") append(buffer, "Last Access Time:", new Date(table.lastAccessTime).toString, "") append(buffer, "Location:", table.storage.locationUri.getOrElse(""), "") append(buffer, "Table Type:", table.tableType.name, "") table.stats.foreach(s => append(buffer, "Statistics:", s.simpleString, "")) append(buffer, "Table Parameters:", "", "") table.properties.foreach { case (key, value) => append(buffer, s" $key", value, "") } describeStorageInfo(table, buffer) if (table.tableType == CatalogTableType.VIEW) describeViewInfo(table, buffer) if (DDLUtils.isDatasourceTable(table) && table.tracksPartitionsInCatalog) { append(buffer, "Partition Provider:", "Catalog", "") } } private def describeStorageInfo(metadata: CatalogTable, buffer: ArrayBuffer[Row]): Unit = { append(buffer, "", "", "") append(buffer, "# Storage Information", "", "") metadata.storage.serde.foreach(serdeLib => append(buffer, "SerDe Library:", serdeLib, "")) metadata.storage.inputFormat.foreach(format => append(buffer, "InputFormat:", format, "")) metadata.storage.outputFormat.foreach(format => append(buffer, "OutputFormat:", format, "")) append(buffer, "Compressed:", if (metadata.storage.compressed) "Yes" else "No", "") describeBucketingInfo(metadata, buffer) append(buffer, "Storage Desc Parameters:", "", "") val maskedProperties = CatalogUtils.maskCredentials(metadata.storage.properties) maskedProperties.foreach { case (key, value) => append(buffer, s" $key", value, "") } } private def describeViewInfo(metadata: CatalogTable, buffer: ArrayBuffer[Row]): Unit = { append(buffer, "", "", "") append(buffer, "# View Information", "", "") append(buffer, "View Original Text:", metadata.viewOriginalText.getOrElse(""), "") append(buffer, "View Expanded Text:", metadata.viewText.getOrElse(""), "") } private def describeBucketingInfo(metadata: CatalogTable, buffer: ArrayBuffer[Row]): Unit = { metadata.bucketSpec match { case Some(BucketSpec(numBuckets, bucketColumnNames, sortColumnNames)) => append(buffer, "Num Buckets:", numBuckets.toString, "") append(buffer, "Bucket Columns:", bucketColumnNames.mkString("[", ", ", "]"), "") append(buffer, "Sort Columns:", sortColumnNames.mkString("[", ", ", "]"), "") case _ => } } private def describeDetailedPartitionInfo( spark: SparkSession, catalog: SessionCatalog, metadata: CatalogTable, result: ArrayBuffer[Row]): Unit = { if (metadata.tableType == CatalogTableType.VIEW) { throw new AnalysisException( s"DESC PARTITION is not allowed on a view: ${table.identifier}") } DDLUtils.verifyPartitionProviderIsHive(spark, metadata, "DESC PARTITION") val partition = catalog.getPartition(table, partitionSpec) if (isExtended) { describeExtendedDetailedPartitionInfo(table, metadata, partition, result) } else if (isFormatted) { describeFormattedDetailedPartitionInfo(table, metadata, partition, result) describeStorageInfo(metadata, result) } } private def describeExtendedDetailedPartitionInfo( tableIdentifier: TableIdentifier, table: CatalogTable, partition: CatalogTablePartition, buffer: ArrayBuffer[Row]): Unit = { append(buffer, "", "", "") append(buffer, "Detailed Partition Information " + partition.toString, "", "") } private def describeFormattedDetailedPartitionInfo( tableIdentifier: TableIdentifier, table: CatalogTable, partition: CatalogTablePartition, buffer: ArrayBuffer[Row]): Unit = { append(buffer, "", "", "") append(buffer, "# Detailed Partition Information", "", "") append(buffer, "Partition Value:", s"[${partition.spec.values.mkString(", ")}]", "") append(buffer, "Database:", table.database, "") append(buffer, "Table:", tableIdentifier.table, "") append(buffer, "Location:", partition.storage.locationUri.getOrElse(""), "") append(buffer, "Partition Parameters:", "", "") partition.parameters.foreach { case (key, value) => append(buffer, s" $key", value, "") } } private def describeSchema(schema: StructType, buffer: ArrayBuffer[Row]): Unit = { schema.foreach { column => append(buffer, column.name, column.dataType.simpleString, column.getComment().orNull) } } private def append( buffer: ArrayBuffer[Row], column: String, dataType: String, comment: String): Unit = { buffer += Row(column, dataType, comment) } } /** * A command for users to get tables in the given database. * If a databaseName is not given, the current database will be used. * The syntax of using this command in SQL is: * {{{ * SHOW TABLES [(IN|FROM) database_name] [[LIKE] 'identifier_with_wildcards']; * }}} */ case class ShowTablesCommand( databaseName: Option[String], tableIdentifierPattern: Option[String]) extends RunnableCommand { // The result of SHOW TABLES has three columns: database, tableName and isTemporary. override val output: Seq[Attribute] = { AttributeReference("database", StringType, nullable = false)() :: AttributeReference("tableName", StringType, nullable = false)() :: AttributeReference("isTemporary", BooleanType, nullable = false)() :: Nil } override def run(sparkSession: SparkSession): Seq[Row] = { // Since we need to return a Seq of rows, we will call getTables directly // instead of calling tables in sparkSession. val catalog = sparkSession.sessionState.catalog val db = databaseName.getOrElse(catalog.getCurrentDatabase) val tables = tableIdentifierPattern.map(catalog.listTables(db, _)).getOrElse(catalog.listTables(db)) tables.map { tableIdent => val isTemp = catalog.isTemporaryTable(tableIdent) Row(tableIdent.database.getOrElse(""), tableIdent.table, isTemp) } } } /** * A command for users to list the properties for a table. If propertyKey is specified, the value * for the propertyKey is returned. If propertyKey is not specified, all the keys and their * corresponding values are returned. * The syntax of using this command in SQL is: * {{{ * SHOW TBLPROPERTIES table_name[('propertyKey')]; * }}} */ case class ShowTablePropertiesCommand(table: TableIdentifier, propertyKey: Option[String]) extends RunnableCommand { override val output: Seq[Attribute] = { val schema = AttributeReference("value", StringType, nullable = false)() :: Nil propertyKey match { case None => AttributeReference("key", StringType, nullable = false)() :: schema case _ => schema } } override def run(sparkSession: SparkSession): Seq[Row] = { val catalog = sparkSession.sessionState.catalog if (catalog.isTemporaryTable(table)) { Seq.empty[Row] } else { val catalogTable = sparkSession.sessionState.catalog.getTableMetadata(table) propertyKey match { case Some(p) => val propValue = catalogTable .properties .getOrElse(p, s"Table ${catalogTable.qualifiedName} does not have property: $p") Seq(Row(propValue)) case None => catalogTable.properties.map(p => Row(p._1, p._2)).toSeq } } } } /** * A command to list the column names for a table. This function creates a * [[ShowColumnsCommand]] logical plan. * * The syntax of using this command in SQL is: * {{{ * SHOW COLUMNS (FROM | IN) table_identifier [(FROM | IN) database]; * }}} */ case class ShowColumnsCommand( databaseName: Option[String], tableName: TableIdentifier) extends RunnableCommand { override val output: Seq[Attribute] = { AttributeReference("col_name", StringType, nullable = false)() :: Nil } override def run(sparkSession: SparkSession): Seq[Row] = { val catalog = sparkSession.sessionState.catalog val resolver = sparkSession.sessionState.conf.resolver val lookupTable = databaseName match { case None => tableName case Some(db) if tableName.database.exists(!resolver(_, db)) => throw new AnalysisException( s"SHOW COLUMNS with conflicting databases: '$db' != '${tableName.database.get}'") case Some(db) => TableIdentifier(tableName.identifier, Some(db)) } val table = catalog.getTempViewOrPermanentTableMetadata(lookupTable) table.schema.map { c => Row(c.name) } } } /** * A command to list the partition names of a table. If the partition spec is specified, * partitions that match the spec are returned. [[AnalysisException]] exception is thrown under * the following conditions: * * 1. If the command is called for a non partitioned table. * 2. If the partition spec refers to the columns that are not defined as partitioning columns. * * This function creates a [[ShowPartitionsCommand]] logical plan * * The syntax of using this command in SQL is: * {{{ * SHOW PARTITIONS [db_name.]table_name [PARTITION(partition_spec)] * }}} */ case class ShowPartitionsCommand( tableName: TableIdentifier, spec: Option[TablePartitionSpec]) extends RunnableCommand { override val output: Seq[Attribute] = { AttributeReference("partition", StringType, nullable = false)() :: Nil } override def run(sparkSession: SparkSession): Seq[Row] = { val catalog = sparkSession.sessionState.catalog val table = catalog.getTableMetadata(tableName) val tableIdentWithDB = table.identifier.quotedString /** * Validate and throws an [[AnalysisException]] exception under the following conditions: * 1. If the table is not partitioned. * 2. If it is a datasource table. * 3. If it is a view. */ if (table.tableType == VIEW) { throw new AnalysisException(s"SHOW PARTITIONS is not allowed on a view: $tableIdentWithDB") } if (table.partitionColumnNames.isEmpty) { throw new AnalysisException( s"SHOW PARTITIONS is not allowed on a table that is not partitioned: $tableIdentWithDB") } DDLUtils.verifyPartitionProviderIsHive(sparkSession, table, "SHOW PARTITIONS") /** * Validate the partitioning spec by making sure all the referenced columns are * defined as partitioning columns in table definition. An AnalysisException exception is * thrown if the partitioning spec is invalid. */ if (spec.isDefined) { val badColumns = spec.get.keySet.filterNot(table.partitionColumnNames.contains) if (badColumns.nonEmpty) { val badCols = badColumns.mkString("[", ", ", "]") throw new AnalysisException( s"Non-partitioning column(s) $badCols are specified for SHOW PARTITIONS") } } val partNames = catalog.listPartitionNames(tableName, spec) partNames.map(Row(_)) } } case class ShowCreateTableCommand(table: TableIdentifier) extends RunnableCommand { override val output: Seq[Attribute] = Seq( AttributeReference("createtab_stmt", StringType, nullable = false)() ) override def run(sparkSession: SparkSession): Seq[Row] = { val catalog = sparkSession.sessionState.catalog val tableMetadata = catalog.getTableMetadata(table) // TODO: unify this after we unify the CREATE TABLE syntax for hive serde and data source table. val stmt = if (DDLUtils.isDatasourceTable(tableMetadata)) { showCreateDataSourceTable(tableMetadata) } else { showCreateHiveTable(tableMetadata) } Seq(Row(stmt)) } private def showCreateHiveTable(metadata: CatalogTable): String = { def reportUnsupportedError(features: Seq[String]): Unit = { throw new AnalysisException( s"Failed to execute SHOW CREATE TABLE against table/view ${metadata.identifier}, " + "which is created by Hive and uses the following unsupported feature(s)\\n" + features.map(" - " + _).mkString("\\n") ) } if (metadata.unsupportedFeatures.nonEmpty) { reportUnsupportedError(metadata.unsupportedFeatures) } val builder = StringBuilder.newBuilder val tableTypeString = metadata.tableType match { case EXTERNAL => " EXTERNAL TABLE" case VIEW => " VIEW" case MANAGED => " TABLE" } builder ++= s"CREATE$tableTypeString ${table.quotedString}" if (metadata.tableType == VIEW) { if (metadata.schema.nonEmpty) { builder ++= metadata.schema.map(_.name).mkString("(", ", ", ")") } builder ++= metadata.viewText.mkString(" AS\\n", "", "\\n") } else { showHiveTableHeader(metadata, builder) showHiveTableNonDataColumns(metadata, builder) showHiveTableStorageInfo(metadata, builder) showHiveTableProperties(metadata, builder) } builder.toString() } private def showHiveTableHeader(metadata: CatalogTable, builder: StringBuilder): Unit = { val columns = metadata.schema.filterNot { column => metadata.partitionColumnNames.contains(column.name) }.map(columnToDDLFragment) if (columns.nonEmpty) { builder ++= columns.mkString("(", ", ", ")\\n") } metadata .comment .map("COMMENT '" + escapeSingleQuotedString(_) + "'\\n") .foreach(builder.append) } private def columnToDDLFragment(column: StructField): String = { val comment = column.getComment().map(escapeSingleQuotedString).map(" COMMENT '" + _ + "'") s"${quoteIdentifier(column.name)} ${column.dataType.catalogString}${comment.getOrElse("")}" } private def showHiveTableNonDataColumns(metadata: CatalogTable, builder: StringBuilder): Unit = { if (metadata.partitionColumnNames.nonEmpty) { val partCols = metadata.partitionSchema.map(columnToDDLFragment) builder ++= partCols.mkString("PARTITIONED BY (", ", ", ")\\n") } if (metadata.bucketSpec.isDefined) { throw new UnsupportedOperationException( "Creating Hive table with bucket spec is not supported yet.") } } private def showHiveTableStorageInfo(metadata: CatalogTable, builder: StringBuilder): Unit = { val storage = metadata.storage storage.serde.foreach { serde => builder ++= s"ROW FORMAT SERDE '$serde'\\n" val serdeProps = metadata.storage.properties.map { case (key, value) => s"'${escapeSingleQuotedString(key)}' = '${escapeSingleQuotedString(value)}'" } builder ++= serdeProps.mkString("WITH SERDEPROPERTIES (\\n ", ",\\n ", "\\n)\\n") } if (storage.inputFormat.isDefined || storage.outputFormat.isDefined) { builder ++= "STORED AS\\n" storage.inputFormat.foreach { format => builder ++= s" INPUTFORMAT '${escapeSingleQuotedString(format)}'\\n" } storage.outputFormat.foreach { format => builder ++= s" OUTPUTFORMAT '${escapeSingleQuotedString(format)}'\\n" } } if (metadata.tableType == EXTERNAL) { storage.locationUri.foreach { uri => builder ++= s"LOCATION '$uri'\\n" } } } private def showHiveTableProperties(metadata: CatalogTable, builder: StringBuilder): Unit = { if (metadata.properties.nonEmpty) { val props = metadata.properties.map { case (key, value) => s"'${escapeSingleQuotedString(key)}' = '${escapeSingleQuotedString(value)}'" } builder ++= props.mkString("TBLPROPERTIES (\\n ", ",\\n ", "\\n)\\n") } } private def showCreateDataSourceTable(metadata: CatalogTable): String = { val builder = StringBuilder.newBuilder builder ++= s"CREATE TABLE ${table.quotedString} " showDataSourceTableDataColumns(metadata, builder) showDataSourceTableOptions(metadata, builder) showDataSourceTableNonDataColumns(metadata, builder) builder.toString() } private def showDataSourceTableDataColumns( metadata: CatalogTable, builder: StringBuilder): Unit = { val columns = metadata.schema.fields.map(f => s"${quoteIdentifier(f.name)} ${f.dataType.sql}") builder ++= columns.mkString("(", ", ", ")\\n") } private def showDataSourceTableOptions(metadata: CatalogTable, builder: StringBuilder): Unit = { builder ++= s"USING ${metadata.provider.get}\\n" val dataSourceOptions = metadata.storage.properties.map { case (key, value) => s"${quoteIdentifier(key)} '${escapeSingleQuotedString(value)}'" } ++ metadata.storage.locationUri.flatMap { location => if (metadata.tableType == MANAGED) { // If it's a managed table, omit PATH option. Spark SQL always creates external table // when the table creation DDL contains the PATH option. None } else { Some(s"path '${escapeSingleQuotedString(location)}'") } } if (dataSourceOptions.nonEmpty) { builder ++= "OPTIONS (\\n" builder ++= dataSourceOptions.mkString(" ", ",\\n ", "\\n") builder ++= ")\\n" } } private def showDataSourceTableNonDataColumns( metadata: CatalogTable, builder: StringBuilder): Unit = { val partCols = metadata.partitionColumnNames if (partCols.nonEmpty) { builder ++= s"PARTITIONED BY ${partCols.mkString("(", ", ", ")")}\\n" } metadata.bucketSpec.foreach { spec => if (spec.bucketColumnNames.nonEmpty) { builder ++= s"CLUSTERED BY ${spec.bucketColumnNames.mkString("(", ", ", ")")}\\n" if (spec.sortColumnNames.nonEmpty) { builder ++= s"SORTED BY ${spec.sortColumnNames.mkString("(", ", ", ")")}\\n" } builder ++= s"INTO ${spec.numBuckets} BUCKETS\\n" } } } private def escapeSingleQuotedString(str: String): String = { val builder = StringBuilder.newBuilder str.foreach { case '\\'' => builder ++= s"\\\\\\'" case ch => builder += ch } builder.toString() } }
kimoonkim/spark
sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala
Scala
apache-2.0
35,261
/* * Copyright 2011-2018 GatlingCorp (http://gatling.io) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.gatling.charts.stats.buffers import io.gatling.commons.stats.{ OK, Status } import io.gatling.core.stats.CountsVsTimePlot private[stats] class Counts(var oks: Int = 0, var kos: Int = 0) { def increment(status: Status): Unit = status match { case OK => oks += 1 case _ => kos += 1 } def total = oks + kos } private[stats] class CountsBuffer(buckets: Array[Int]) { val counts: Array[Counts] = Array.fill(buckets.length)(new Counts) def update(bucketNumber: Int, status: Status): Unit = { counts(bucketNumber).increment(status) } def distribution: Iterable[CountsVsTimePlot] = counts.view.zipWithIndex .map { case (count, bucketNumber) => CountsVsTimePlot(buckets(bucketNumber), count.oks, count.kos) } }
wiacekm/gatling
gatling-charts/src/main/scala/io/gatling/charts/stats/buffers/CountBuffer.scala
Scala
apache-2.0
1,375
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // scalastyle:off println package org.apache.spark.examples.streaming import java.nio.ByteBuffer import scala.util.Random import com.amazonaws.auth.DefaultAWSCredentialsProviderChain import com.amazonaws.services.kinesis.AmazonKinesisClient import com.amazonaws.services.kinesis.clientlibrary.lib.worker.InitialPositionInStream import com.amazonaws.services.kinesis.model.PutRecordRequest import org.apache.log4j.{Level, Logger} import org.apache.spark.SparkConf import org.apache.spark.internal.Logging import org.apache.spark.storage.StorageLevel import org.apache.spark.streaming.{Milliseconds, StreamingContext} import org.apache.spark.streaming.dstream.DStream.toPairDStreamFunctions import org.apache.spark.streaming.kinesis.KinesisInputDStream /** * Consumes messages from a Amazon Kinesis streams and does wordcount. * * This example spins up 1 Kinesis Receiver per shard for the given stream. * It then starts pulling from the last checkpointed sequence number of the given stream. * * Usage: KinesisWordCountASL <app-name> <stream-name> <endpoint-url> <region-name> * <app-name> is the name of the consumer app, used to track the read data in DynamoDB * <stream-name> name of the Kinesis stream (ie. mySparkStream) * <endpoint-url> endpoint of the Kinesis service * (e.g. https://kinesis.us-east-1.amazonaws.com) * * * Example: * # export AWS keys if necessary * $ export AWS_ACCESS_KEY_ID=<your-access-key> * $ export AWS_SECRET_KEY=<your-secret-key> * * # run the example * $ SPARK_HOME/bin/run-example streaming.KinesisWordCountASL myAppName mySparkStream \\ * https://kinesis.us-east-1.amazonaws.com * * There is a companion helper class called KinesisWordProducerASL which puts dummy data * onto the Kinesis stream. * * This code uses the DefaultAWSCredentialsProviderChain to find credentials * in the following order: * Environment Variables - AWS_ACCESS_KEY_ID and AWS_SECRET_KEY * Java System Properties - aws.accessKeyId and aws.secretKey * Credential profiles file - default location (~/.aws/credentials) shared by all AWS SDKs * Instance profile credentials - delivered through the Amazon EC2 metadata service * For more information, see * http://docs.aws.amazon.com/AWSSdkDocsJava/latest/DeveloperGuide/credentials.html * * See http://spark.apache.org/docs/latest/streaming-kinesis-integration.html for more details on * the Kinesis Spark Streaming integration. */ object KinesisWordCountASL extends Logging { def main(args: Array[String]) { // Check that all required args were passed in. if (args.length != 3) { System.err.println( """ |Usage: KinesisWordCountASL <app-name> <stream-name> <endpoint-url> <region-name> | | <app-name> is the name of the consumer app, used to track the read data in DynamoDB | <stream-name> is the name of the Kinesis stream | <endpoint-url> is the endpoint of the Kinesis service | (e.g. https://kinesis.us-east-1.amazonaws.com) | |Generate input data for Kinesis stream using the example KinesisWordProducerASL. |See http://spark.apache.org/docs/latest/streaming-kinesis-integration.html for more |details. """.stripMargin) System.exit(1) } StreamingExamples.setStreamingLogLevels() // Populate the appropriate variables from the given args val Array(appName, streamName, endpointUrl) = args // Determine the number of shards from the stream using the low-level Kinesis Client // from the AWS Java SDK. val credentials = new DefaultAWSCredentialsProviderChain().getCredentials() require(credentials != null, "No AWS credentials found. Please specify credentials using one of the methods specified " + "in http://docs.aws.amazon.com/AWSSdkDocsJava/latest/DeveloperGuide/credentials.html") val kinesisClient = new AmazonKinesisClient(credentials) kinesisClient.setEndpoint(endpointUrl) val numShards = kinesisClient.describeStream(streamName).getStreamDescription().getShards().size // In this example, we're going to create 1 Kinesis Receiver/input DStream for each shard. // This is not a necessity; if there are less receivers/DStreams than the number of shards, // then the shards will be automatically distributed among the receivers and each receiver // will receive data from multiple shards. val numStreams = numShards // Spark Streaming batch interval val batchInterval = Milliseconds(2000) // Kinesis checkpoint interval is the interval at which the DynamoDB is updated with information // on sequence number of records that have been received. Same as batchInterval for this // example. val kinesisCheckpointInterval = batchInterval // Get the region name from the endpoint URL to save Kinesis Client Library metadata in // DynamoDB of the same region as the Kinesis stream val regionName = KinesisExampleUtils.getRegionNameByEndpoint(endpointUrl) // Setup the SparkConfig and StreamingContext val sparkConfig = new SparkConf().setAppName("KinesisWordCountASL") val ssc = new StreamingContext(sparkConfig, batchInterval) // Create the Kinesis DStreams val kinesisStreams = (0 until numStreams).map { i => KinesisInputDStream.builder .streamingContext(ssc) .streamName(streamName) .endpointUrl(endpointUrl) .regionName(regionName) .initialPositionInStream(InitialPositionInStream.LATEST) .checkpointAppName(appName) .checkpointInterval(kinesisCheckpointInterval) .storageLevel(StorageLevel.MEMORY_AND_DISK_2) .build() } // Union all the streams val unionStreams = ssc.union(kinesisStreams) // Convert each line of Array[Byte] to String, and split into words val words = unionStreams.flatMap(byteArray => new String(byteArray).split(" ")) // Map each word to a (word, 1) tuple so we can reduce by key to count the words val wordCounts = words.map(word => (word, 1)).reduceByKey(_ + _) // Print the first 10 wordCounts wordCounts.print() // Start the streaming context and await termination ssc.start() ssc.awaitTermination() } } /** * Usage: KinesisWordProducerASL <stream-name> <endpoint-url> \\ * <records-per-sec> <words-per-record> * * <stream-name> is the name of the Kinesis stream (ie. mySparkStream) * <endpoint-url> is the endpoint of the Kinesis service * (ie. https://kinesis.us-east-1.amazonaws.com) * <records-per-sec> is the rate of records per second to put onto the stream * <words-per-record> is the rate of records per second to put onto the stream * * Example: * $ SPARK_HOME/bin/run-example streaming.KinesisWordProducerASL mySparkStream \\ * https://kinesis.us-east-1.amazonaws.com us-east-1 10 5 */ object KinesisWordProducerASL { def main(args: Array[String]) { if (args.length != 4) { System.err.println( """ |Usage: KinesisWordProducerASL <stream-name> <endpoint-url> <records-per-sec> <words-per-record> | | <stream-name> is the name of the Kinesis stream | <endpoint-url> is the endpoint of the Kinesis service | (e.g. https://kinesis.us-east-1.amazonaws.com) | <records-per-sec> is the rate of records per second to put onto the stream | <words-per-record> is the rate of records per second to put onto the stream | """.stripMargin) System.exit(1) } // Set default log4j logging level to WARN to hide Spark logs StreamingExamples.setStreamingLogLevels() // Populate the appropriate variables from the given args val Array(stream, endpoint, recordsPerSecond, wordsPerRecord) = args // Generate the records and return the totals val totals = generate(stream, endpoint, recordsPerSecond.toInt, wordsPerRecord.toInt) // Print the array of (word, total) tuples println("Totals for the words sent") totals.foreach(println(_)) } def generate(stream: String, endpoint: String, recordsPerSecond: Int, wordsPerRecord: Int): Seq[(String, Int)] = { val randomWords = List("spark", "you", "are", "my", "father") val totals = scala.collection.mutable.Map[String, Int]() // Create the low-level Kinesis Client from the AWS Java SDK. val kinesisClient = new AmazonKinesisClient(new DefaultAWSCredentialsProviderChain()) kinesisClient.setEndpoint(endpoint) println(s"Putting records onto stream $stream and endpoint $endpoint at a rate of" + s" $recordsPerSecond records per second and $wordsPerRecord words per record") // Iterate and put records onto the stream per the given recordPerSec and wordsPerRecord for (i <- 1 to 10) { // Generate recordsPerSec records to put onto the stream val records = (1 to recordsPerSecond.toInt).foreach { recordNum => // Randomly generate wordsPerRecord number of words val data = (1 to wordsPerRecord.toInt).map(x => { // Get a random index to a word val randomWordIdx = Random.nextInt(randomWords.size) val randomWord = randomWords(randomWordIdx) // Increment total count to compare to server counts later totals(randomWord) = totals.getOrElse(randomWord, 0) + 1 randomWord }).mkString(" ") // Create a partitionKey based on recordNum val partitionKey = s"partitionKey-$recordNum" // Create a PutRecordRequest with an Array[Byte] version of the data val putRecordRequest = new PutRecordRequest().withStreamName(stream) .withPartitionKey(partitionKey) .withData(ByteBuffer.wrap(data.getBytes())) // Put the record onto the stream and capture the PutRecordResult val putRecordResult = kinesisClient.putRecord(putRecordRequest) } // Sleep for a second Thread.sleep(1000) println("Sent " + recordsPerSecond + " records") } // Convert the totals to (index, total) tuple totals.toSeq.sortBy(_._1) } } /** * Utility functions for Spark Streaming examples. * This has been lifted from the examples/ project to remove the circular dependency. */ private[streaming] object StreamingExamples extends Logging { // Set reasonable logging levels for streaming if the user has not configured log4j. def setStreamingLogLevels() { val log4jInitialized = Logger.getRootLogger.getAllAppenders.hasMoreElements if (!log4jInitialized) { // We first log something to initialize Spark's default logging, then we override the // logging level. logInfo("Setting log level to [WARN] for streaming example." + " To override add a custom log4j.properties to the classpath.") Logger.getRootLogger.setLevel(Level.WARN) } } } // scalastyle:on println
akopich/spark
external/kinesis-asl/src/main/scala/org/apache/spark/examples/streaming/KinesisWordCountASL.scala
Scala
apache-2.0
11,893
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.gearpump.streaming.examples.kafka.wordcount import java.time.Instant import scala.collection.mutable import org.mockito.Matchers._ import org.mockito.Mockito._ import org.scalacheck.Gen import org.scalatest.{FlatSpec, Matchers} import org.apache.gearpump.Message import org.apache.gearpump.cluster.UserConfig import org.apache.gearpump.streaming.MockUtil class SumSpec extends FlatSpec with Matchers { it should "sum should calculate the frequency of the word correctly" in { val stringGenerator = Gen.alphaStr val expectedWordCountMap: mutable.HashMap[String, Long] = new mutable.HashMap[String, Long]() val taskContext = MockUtil.mockTaskContext val sum = new Sum(taskContext, UserConfig.empty) sum.onStart(Instant.EPOCH) val str = "once two two three three three" var totalWordCount = 0 stringGenerator.map { word => totalWordCount += 1 expectedWordCountMap.put(word, expectedWordCountMap.getOrElse(word, 0L) + 1) sum.onNext(Message(word)) } verify(taskContext, times(totalWordCount)).output(anyObject[Message]) expectedWordCountMap.foreach { wordCount => val (word, count) = wordCount assert(count == sum.wordcount.get(word).get) } } }
manuzhang/incubator-gearpump
examples/streaming/kafka/src/test/scala/org/apache/gearpump/streaming/examples/kafka/wordcount/SumSpec.scala
Scala
apache-2.0
2,059
/* * Copyright 2014–2018 SlamData Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package quasar.std import quasar.Func import slamdata.Predef._ import quasar.TypeGenerators import org.scalacheck.{Arbitrary, Gen, Prop}, Arbitrary.arbitrary import scalaz.Validation, Validation.FlatMap._ class RelationsSpec extends quasar.Qspec with TypeGenerators { import RelationsLib._ import quasar.Type import quasar.Type.Const import quasar.Data.Bool import quasar.Data.Dec import quasar.Data.Int import quasar.Data.Str val comparisonOps = Gen.oneOf(Eq, Neq, Lt, Lte, Gt, Gte) "RelationsLib" should { "type eq with matching arguments" >> prop { (t : Type) => val expr = Eq.tpe(Func.Input2(t, t)) t match { case Const(_) => expr should beSuccessful(Const(Bool(true))) case _ => expr should beSuccessful(Type.Bool) } } "fold integer eq" in { val expr = Eq.tpe(Func.Input2(Const(Int(1)), Const(Int(1)))) expr should beSuccessful(Const(Bool(true))) } "fold eq with mixed numeric type" in { val expr = Eq.tpe(Func.Input2(Const(Int(1)), Const(Dec(1.0)))) expr should beSuccessful(Const(Bool(true))) } "fold eq with mixed type" in { val expr = Eq.tpe(Func.Input2(Const(Int(1)), Const(Str("a")))) expr should beSuccessful(Const(Bool(false))) } "type Eq with Top" >> prop { (t : Type) => Eq.tpe(Func.Input2(Type.Top, t)) should beSuccessful(Type.Bool) Eq.tpe(Func.Input2(t, Type.Top)) should beSuccessful(Type.Bool) } "type Neq with Top" >> prop { (t : Type) => Neq.tpe(Func.Input2(Type.Top, t)) should beSuccessful(Type.Bool) Neq.tpe(Func.Input2(t, Type.Top)) should beSuccessful(Type.Bool) } "fold neq with mixed type" in { val expr = Neq.tpe(Func.Input2(Const(Int(1)), Const(Str("a")))) expr should beSuccessful(Const(Bool(true))) } // TODO: similar for the rest of the simple relations "fold cond with true" >> prop { (t1 : Type, t2 : Type) => val expr = Cond.tpe(Func.Input3(Const(Bool(true)), t1, t2)) expr must beSuccessful(t1) } "fold cond with false" >> prop { (t1 : Type, t2 : Type) => val expr = Cond.tpe(Func.Input3(Const(Bool(false)), t1, t2)) expr must beSuccessful(t2) } "find lub for cond with int" in { val expr = Cond.tpe(Func.Input3(Type.Bool, Type.Int, Type.Int)) expr must beSuccessful(Type.Int) } "find lub for cond with arbitrary args" >> prop { (t1 : Type, t2 : Type) => val expr = Cond.tpe(Func.Input3(Type.Bool, t1, t2)) expr must beSuccessful(Type.lub(t1, t2)) } "flip comparison ops" >> Prop.forAll(comparisonOps, arbitrary[BigInt], arbitrary[BigInt]) { case (func, left, right) => flip(func).map( _.tpe(Func.Input2(Type.Const(Int(right)), Type.Const(Int(left))))) must beSome(func.tpe(Func.Input2(Type.Const(Int(left)), Type.Const(Int(right))))) } "flip boolean ops" >> Prop.forAll(Gen.oneOf(And, Or), arbitrary[Boolean], arbitrary[Boolean]) { case (func, left, right) => flip(func).map( _.tpe(Func.Input2(Type.Const(Bool(right)), Type.Const(Bool(left))))) must beSome(func.tpe(Func.Input2(Type.Const(Bool(left)), Type.Const(Bool(right))))) } "negate comparison ops" >> Prop.forAll(comparisonOps, arbitrary[BigInt], arbitrary[BigInt]) { case (func, left, right) => RelationsLib.negate(func).map( _.tpe(Func.Input2(Type.Const(Int(left)), Type.Const(Int(right))))) must beSome(func.tpe(Func.Input2(Type.Const(Int(left)), Type.Const(Int(right)))).flatMap(x => Not.tpe(Func.Input1(x)))) } } }
jedesah/Quasar
frontend/src/test/scala/quasar/std/relations.scala
Scala
apache-2.0
4,255
object i0{ import i0.i0 // error def i0={ import _ // error import } // error }
lampepfl/dotty
tests/neg/i6056.scala
Scala
apache-2.0
122
package org.bitcoins.protocol.script import org.bitcoins.crypto.ECFactory import org.bitcoins.script.bitwise.OP_EQUALVERIFY import org.bitcoins.script.constant.{BytesToPushOntoStack, ScriptConstant, ScriptToken} import org.bitcoins.script.crypto.{OP_CHECKSIG, OP_HASH160, OP_CODESEPARATOR} import org.bitcoins.script.stack.OP_DUP import org.bitcoins.util.{TestUtil} import org.scalatest.{MustMatchers, FlatSpec} /** * Created by chris on 1/14/16. */ class ScriptPubKeyTest extends FlatSpec with MustMatchers { val expectedAsm : Seq[ScriptToken] = List(OP_DUP, OP_HASH160, BytesToPushOntoStack(20), ScriptConstant("31a420903c05a0a7de2de40c9f02ebedbacdc172"), OP_EQUALVERIFY, OP_CHECKSIG) //from b30d3148927f620f5b1228ba941c211fdabdae75d0ba0b688a58accbf018f3cc val rawScriptPubKey = TestUtil.rawP2PKHScriptPubKey val scriptPubKey = ScriptPubKey(rawScriptPubKey) "ScriptPubKey" must "give the expected asm from creating a scriptPubKey from hex" in { scriptPubKey.asm must be (expectedAsm) } }
Christewart/scalacoin
src/test/scala/org/bitcoins/protocol/script/ScriptPubKeyTest.scala
Scala
mit
1,018
/* * Copyright 2016 Branislav Lazic * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.akkabot import akka.actor.ActorSystem import com.sksamuel.elastic4s.{ ElasticClient, ElasticsearchClientUri } import Settings.elasticsearch._ object QueryApp extends App { val client = ElasticClient.transport(ElasticsearchClientUri(host, tcpPort)) val system = ActorSystem("akkabot-query-system") system.actorOf(Root.props(client), Root.Name) }
BranislavLazic/akkabot
akkabot-query/src/main/scala/io/akkabot/QueryApp.scala
Scala
apache-2.0
964
package com.twitter.algebird.util.summer /** * @author Mansur Ashraf. */ trait Incrementor { def incr(): Unit def incrBy(amount: Long): Unit }
nevillelyh/algebird
algebird-util/src/main/scala/com/twitter/algebird/util/summer/Incrementor.scala
Scala
apache-2.0
150
package controllers import scala.concurrent._ import play.api.libs.iteratee._ import play.api.mvc._ import play.api.http.{Status, HeaderNames} import reactivemongo.api.gridfs.ReadFileEntry trait IStreamController extends HeaderNames with Status { def serve(optEntry: Future[Option[ReadFileEntry]], errorMsg: String)(implicit ec: ExecutionContext): Future[Result] = { optEntry.filter(_.isDefined).map(_.get).map { file => SimpleResult( header = ResponseHeader(OK, Map( CONTENT_LENGTH -> ("" + file.length), CONTENT_TYPE -> file.contentType.getOrElse("application/octet-stream") )), body = file.enumerate ) }.recover { case _ => val bytes = (errorMsg).toCharArray.map(_.toByte) SimpleResult( header = ResponseHeader(NOT_FOUND, Map(CONTENT_LENGTH -> bytes.length.toString)), body = Enumerator.apply(bytes) ) } } }
nmccready/snippets
play/controllers/IStreamController.scala
Scala
mit
957
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.planner.plan.rules.logical import org.apache.flink.table.api.{TableException, ValidationException} import org.apache.flink.table.expressions.FieldReferenceExpression import org.apache.flink.table.planner.calcite.FlinkTypeFactory import org.apache.flink.table.planner.calcite.FlinkTypeFactory.toLogicalType import org.apache.flink.table.runtime.types.LogicalTypeDataTypeConverter.fromLogicalTypeToDataType import org.apache.calcite.rel.`type`.RelDataType import org.apache.calcite.rel.logical.{LogicalAggregate, LogicalProject} import org.apache.calcite.rex._ import org.apache.calcite.sql.`type`.{SqlTypeFamily, SqlTypeName} import _root_.java.math.{BigDecimal => JBigDecimal} /** * Planner rule that transforms simple [[LogicalAggregate]] on a [[LogicalProject]] * with windowing expression to * [[org.apache.flink.table.planner.plan.nodes.calcite.LogicalWindowAggregate]] * for stream. */ class StreamLogicalWindowAggregateRule extends LogicalWindowAggregateRuleBase("StreamLogicalWindowAggregateRule") { /** Returns a reference to the time attribute with a time indicator type */ override private[table] def getInAggregateGroupExpression( rexBuilder: RexBuilder, windowExpression: RexCall): RexNode = { val timeAttribute = windowExpression.operands.get(0) if (!FlinkTypeFactory.isTimeIndicatorType(timeAttribute.getType)) { throw new TableException(s"Window aggregate can only be defined over a " + s"time attribute column, but ${timeAttribute.getType} encountered.") } timeAttribute } /** Returns a zero literal of a timestamp type */ override private[table] def getOutAggregateGroupExpression( rexBuilder: RexBuilder, windowExpression: RexCall): RexNode = { // Create a literal with normal SqlTypeName.TIMESTAMP // in case we reference a rowtime field. rexBuilder.makeLiteral( 0L, rexBuilder.getTypeFactory.createSqlType( SqlTypeName.TIMESTAMP, windowExpression.getType.getPrecision), true) } private[table] override def getTimeFieldReference( operand: RexNode, timeAttributeIndex: Int, rowType: RelDataType): FieldReferenceExpression = { if (!FlinkTypeFactory.isTimeIndicatorType(operand.getType)) { throw new ValidationException("Window can only be defined over a time attribute column.") } val fieldName = rowType.getFieldList.get(timeAttributeIndex).getName val fieldType = rowType.getFieldList.get(timeAttributeIndex).getType new FieldReferenceExpression( fieldName, fromLogicalTypeToDataType(toLogicalType(fieldType)), 0, timeAttributeIndex) } def getOperandAsLong(call: RexCall, idx: Int): Long = call.getOperands.get(idx) match { case v: RexLiteral if v.getTypeName.getFamily == SqlTypeFamily.INTERVAL_DAY_TIME => v.getValue.asInstanceOf[JBigDecimal].longValue() case _: RexLiteral => throw new TableException( "Window aggregate only support SECOND, MINUTE, HOUR, DAY as the time unit. " + "MONTH and YEAR time unit are not supported yet.") case _ => throw new TableException("Only constant window descriptors are supported.") } } object StreamLogicalWindowAggregateRule { val INSTANCE = new StreamLogicalWindowAggregateRule }
hequn8128/flink
flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/plan/rules/logical/StreamLogicalWindowAggregateRule.scala
Scala
apache-2.0
4,131
/* * Copyright (C) Lightbend Inc. <https://www.lightbend.com> */ package scalaguide.forms.csrf import javax.inject.Inject import org.specs2.mutable.Specification import play.api.mvc.BaseController import play.api.mvc.ControllerComponents class UserController @Inject() (val controllerComponents: ControllerComponents) extends BaseController { def userGet = Action { Ok("success").as("text/html") } } // #testing-csrf import play.api.test.Helpers._ import play.api.test.CSRFTokenHelper._ import play.api.test.FakeRequest import play.api.test.WithApplication class UserControllerSpec extends Specification { "UserController GET" should { "render the index page from the application" in new WithApplication() { val controller = app.injector.instanceOf[UserController] val request = FakeRequest().withCSRFToken val result = controller.userGet().apply(request) status(result) must beEqualTo(OK) contentType(result) must beSome("text/html") } } } // #testing-csrf
benmccann/playframework
documentation/manual/working/scalaGuide/main/forms/code/scalaguide/forms/csrf/UserControllerSpec.scala
Scala
apache-2.0
1,024
/* * Copyright (c) 2002-2018 "Neo Technology," * Network Engine for Objects in Lund AB [http://neotechnology.com] * * This file is part of Neo4j. * * Neo4j is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.neo4j.cypher.docgen.refcard import org.neo4j.cypher.QueryStatisticsTestSupport import org.neo4j.cypher.docgen.RefcardTest import org.neo4j.cypher.internal.compiler.v2_3.executionplan.InternalExecutionResult class MergeTest extends RefcardTest with QueryStatisticsTestSupport { val graphDescription = List("A:Person KNOWS B:Person") val title = "MERGE" val css = "write c4-3 c5-4 c6-1" override val linkId = "query-merge" override def assert(name: String, result: InternalExecutionResult) { name match { case "merge" => assertStats(result, nodesCreated = 1, propertiesSet = 2, labelsAdded = 1) assert(result.toList.size === 1) case "merge-rel" => assertStats(result, relationshipsCreated = 1) assert(result.toList.size === 1) case "merge-sub" => assertStats(result, relationshipsCreated = 1, nodesCreated = 1, propertiesSet = 1, labelsAdded = 1) assert(result.toList.size === 1) } } override def parameters(name: String): Map[String, Any] = name match { case "parameters=aname" => Map("value" -> "Charlie") case "parameters=names" => Map("value1" -> "Alice", "value2" -> "Bob", "value3" -> "Charlie") case "" => Map() } override val properties: Map[String, Map[String, Any]] = Map( "A" -> Map("name" -> "Alice"), "B" -> Map("name" -> "Bob")) def text = """ ###assertion=merge parameters=aname // MERGE (n:Person {name: {value}}) ON CREATE SET n.created = timestamp() ON MATCH SET n.counter = coalesce(n.counter, 0) + 1, n.accessTime = timestamp() RETURN n### Match pattern or create it if it does not exist. Use +ON CREATE+ and +ON MATCH+ for conditional updates. ###assertion=merge-rel parameters=names // MATCH (a:Person {name: {value1}}), (b:Person {name: {value2}}) MERGE (a)-[r:LOVES]->(b) RETURN r### +MERGE+ finds or creates a relationship between the nodes. ###assertion=merge-sub parameters=names // MATCH (a:Person {name: {value1}}) MERGE (a)-[r:KNOWS]->(b:Person {name: {value3}}) RETURN r, b### +MERGE+ finds or creates subgraphs attached to the node. """ }
HuangLS/neo4j
manual/cypher/refcard-tests/src/test/scala/org/neo4j/cypher/docgen/refcard/MergeTest.scala
Scala
apache-2.0
2,960
/** * API for getting build-in resources */ package helper /** * @author ShiZhan * API for getting build-in resources */ object Resource { def getInputStream(name: String) = getClass.getClassLoader.getResourceAsStream(name) def getString(name: String) = io.Source.fromInputStream(getInputStream(name)).mkString }
ShiZhan/wnotify
src/main/scala/helper/Resource.scala
Scala
apache-2.0
331
package memnets.core import memnets.model._ import memnets.ui.SkinType import scala.beans._ class EngineEvent trait EngineListener { def process(ee: EngineEvent): Unit } case class ErrorEvent(@BeanProperty msg: String = "error", @BeanProperty ex: Throwable) extends EngineEvent case class FlashTitleEvent(@BeanProperty repeat: Int = 1) extends EngineEvent case class GameEndEvent(@BeanProperty t: Int, @BooleanBeanProperty win: Boolean) extends EngineEvent object GameIntroOverEvent { def apply() = new GameIntroOverEvent() } class GameIntroOverEvent extends EngineEvent case class GoalsEvent(@BeanProperty goals: Goals, @BooleanBeanProperty start: Boolean = true) extends EngineEvent case class GoalEvent(@BeanProperty goal: Goal, @BooleanBeanProperty start: Boolean = true) extends EngineEvent /** used during task when UI has progress dialog */ case class ProgressEvent( @BeanProperty msg: String, @BeanProperty workDone: Double = -1.0, @BeanProperty max: Double = 1.0) extends EngineEvent object MessageEvent { def apply(desc: Descriptable) = new MessageEvent(desc.description) } case class MessageEvent(@BeanProperty msg: String) extends EngineEvent case class RealSampleEvent(@BeanProperty rt: RealTime, @BeanProperty act: Double) extends EngineEvent case class RealStartEvent( @BeanProperty y: Yb, @BeanProperty elem: Element, @BeanProperty src: UserSource, @BeanProperty init: Option[Float] = None, @BooleanBeanProperty touch: Boolean = false) extends EngineEvent case class RealEndEvent(@BeanProperty rt: RealTime, @BooleanBeanProperty touch: Boolean = false) extends EngineEvent case class ResetEvent(@BeanProperty trial: Trial) extends EngineEvent class EditorEvent extends EngineEvent class LibraryEvent extends EngineEvent /** NOTE : y can be null */ case class SelectEvent(@BeanProperty y: Y) extends EngineEvent case class SpeedEvent(@BooleanBeanProperty inc: Boolean) extends EngineEvent case class TogglePlayEvent(@BeanProperty msg: String = "") extends EngineEvent case class TrialDoneEvent(@BeanProperty trial: Trial) extends EngineEvent case class TrialChangeEvent(@BooleanBeanProperty next: Boolean) extends EngineEvent case class TrialEvent(@BeanProperty trial: Trial) extends EngineEvent case class BuilderEvent(@BeanProperty builder: ModelBuilder) extends EngineEvent case class SignalEvent(@BeanProperty signal: Signal, @BooleanBeanProperty on: Boolean) extends EngineEvent /** * needed for corner case where want notify AFTER skin.init(model) call by sysUI in rebuild * the primary driver for this event is SkinEditor. * the rest of system listens to Engine.skinsModel */ case class SkinBuiltEvent(@BeanProperty skin: SkinType) extends EngineEvent case class BuiltModelEvent(@BeanProperty model: BuiltModel) extends EngineEvent class CaptureEvent extends EngineEvent
MemoryNetworks/memnets
api/src/main/scala/memnets/core/EngineEvents.scala
Scala
apache-2.0
2,848
/* * Copyright 2016 Miroslav Janíček * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.sandius.rembulan.util import java.nio.charset.Charset import org.junit.runner.RunWith import org.scalatest.junit.JUnitRunner import org.scalatest.{FunSpec, MustMatchers} import scala.collection.JavaConversions._ import scala.collection.mutable.ArrayBuffer @RunWith(classOf[JUnitRunner]) class CharsetEncoderByteIteratorSpec extends FunSpec with MustMatchers { describe ("constructor") { it ("throws a NullPointerException when instantiated with a null string") { intercept[NullPointerException] { new CharsetEncoderByteIterator(null, Charset.defaultCharset()) } } it ("throws a NullPointerException when instantiated with a null charset") { intercept[NullPointerException] { new CharsetEncoderByteIterator("", null) } } it ("throws an IllegalArgumentException when instantiated with a zero step") { intercept[IllegalArgumentException] { new CharsetEncoderByteIterator("", Charset.defaultCharset(), 0) } } it ("throws an IllegalArgumentException when instantiated with a negative step") { intercept[IllegalArgumentException] { new CharsetEncoderByteIterator("", Charset.defaultCharset(), -1) } } it ("throws an IllegalArgumentException when instantiated with step equal to 1") { intercept[IllegalArgumentException] { new CharsetEncoderByteIterator("", Charset.defaultCharset(), 1) } } } describe ("in charset") { // from http://www.unicode.org/udhr/ val testStrings = Seq( "Arabic, Standard" -> "يولد جميع الناس أحرارًا متساوين في الكرامة والحقوق. وقد وهبوا عقلاً وضميرًا وعليهم أن يعامل بعضهم بعضًا بروح الإخاء.", // "Armenian" -> "Բոլոր մարդիկ ծնվում են ազատ ու հավասար իրենց արժանապատվությամբ ու իրավունքներով։ Նրանք ունեն բանականություն ու խիղճ և միմյանց պետք է եղբայրաբար վերաբերվեն։", // "Assyrian Neo-Aramaic" -> "ܟܠ ܒܪܢܫܐ ܒܪܝܠܗ ܚܐܪܐ ܘܒܪܒܪ ܓܘ ܐܝܩܪܐ ܘܙܕܩܐ. ܘܦܝܫܝܠܗ ܝܗܒܐ ܗܘܢܐ ܘܐܢܝܬ. ܒܘܕ ܕܐܗܐ ܓܫܩܬܝ ܥܠ ܐܚܪܢܐ ܓܪܓ ܗܘܝܐ ܒܚܕ ܪܘܚܐ ܕܐܚܢܘܬܐ.", // "Bengali" -> "সমস্ত মানুষ স্বাধীনভাবে সমান মর্যাদা এবং অধিকার নিয়ে জন্মগ্রহণ করে। তাঁদের বিবেক এবং বুদ্ধি আছে; সুতরাং সকলেরই একে অপরের প্রতি ভ্রাতৃত্বসুলভ মনোভাব নিয়ে আচরণ করা উচিত।", // "Burmese" -> "လူတိုင်းသည် တူညီ လွတ်လပ်သော ဂုဏ်သိက္ခါဖြင့် လည်းကောင်း၊ တူညီလွတ်လပ်သော အခွင့်အရေးများဖြင့် လည်းကောင်း၊ မွေးဖွားလာသူများ ဖြစ်သည်။ ထိုသူတို့၌ ပိုင်းခြား ဝေဖန်တတ်သော ဉာဏ်နှင့် ကျင့်ဝတ် သိတတ်သော စိတ်တို့ရှိကြ၍ ထိုသူတို့သည် အချင်းချင်း မေတ္တာထား၍ ဆက်ဆံကျင့်သုံးသင့်၏။", // "Cherokee (cased)" -> "Ꮒꭶꮣ ꭰꮒᏼꮻ ꭴꮎꮥꮕꭲ ꭴꮎꮪꮣꮄꮣ ꭰꮄ ꭱꮷꮃꭽꮙ ꮎꭲ ꭰꮲꮙꮩꮧ ꭰꮄ ꭴꮒꮂ ꭲᏻꮎꮫꮧꭲ. Ꮎꮝꭹꮎꮓ ꭴꮅꮝꭺꮈꮤꮕꭹ ꭴꮰꮿꮝꮧ ꮕᏸꮅꮫꭹ ꭰꮄ ꭰꮣꮕꮦꮯꮣꮝꮧ ꭰꮄ ꭱꮅꮝꮧ ꮟᏼꮻꭽ ꮒꮪꮎꮣꮫꮎꮥꭼꭹ ꮎ ꮧꮎꮣꮕꮯ ꭰꮣꮕꮩ ꭼꮧ.", // "Cherokee (uppercase)" -> "ᏂᎦᏓ ᎠᏂᏴᏫ ᎤᎾᏕᏅᎢ ᎤᎾᏚᏓᎴᏓ ᎠᎴ ᎡᏧᎳᎭᏉ ᎾᎢ ᎠᏢᏉᏙᏗ ᎠᎴ ᎤᏂᎲ ᎢᏳᎾᏛᏗᎢ. ᎾᏍᎩᎾᏃ ᎤᎵᏍᎪᎸᏔᏅᎩ ᎤᏠᏯᏍᏗ ᏅᏰᎵᏛᎩ ᎠᎴ ᎠᏓᏅᏖᏟᏓᏍᏗ ᎠᎴ ᎡᎵᏍᏗ ᏏᏴᏫᎭ ᏂᏚᎾᏓᏛᎾᏕᎬᎩ Ꮎ ᏗᎾᏓᏅᏟ ᎠᏓᏅᏙ ᎬᏗ.", "Chinese, Mandarin (Simplified)" -> "人人生而自由,在尊严和权利上一律平等。他们赋有理性和良心,并应以兄弟关系的精神相对待。", // "Cree, Swampy" -> "ᒥᓯᐌ ᐃᓂᓂᐤ ᑎᐯᓂᒥᑎᓱᐎᓂᐠ ᐁᔑ ᓂᑕᐎᑭᐟ ᓀᐢᑕ ᐯᔭᑾᐣ ᑭᒋ ᐃᔑ ᑲᓇᐗᐸᒥᑯᐎᓯᐟ ᑭᐢᑌᓂᒥᑎᓱᐎᓂᐠ ᓀᐢᑕ ᒥᓂᑯᐎᓯᐎᓇ᙮ ᐁ ᐸᑭᑎᓇᒪᒋᐠ ᑲᑫᑕᐌᓂᑕᒧᐎᓂᓂᐤ ᓀᐢᑕ ᒥᑐᓀᓂᒋᑲᓂᓂᐤ ᓀᐢᑕ ᐎᒋᑴᓯᑐᐎᓂᐠ ᑭᒋ ᐃᔑ ᑲᓇᐗᐸᒥᑐᒋᐠ᙮", "English" -> "All human beings are born free and equal in dignity and rights. They are endowed with reason and conscience and should act towards one another in a spirit of brotherhood.", // "Georgian" -> "ყველა ადამიანი იბადება თავისუფალი და თანასწორი თავისი ღირსებითა და უფლებებით. მათ მინიჭებული აქვთ გონება და სინდისი და ერთმანეთის მიმართ უნდა იქცეოდნენ ძმობის სულისკვეთებით.", "Greek (polytonic)" -> "Ὅλοι οἱ ἄνθρωποι γεννιοῦνται ἐλεύθεροι καὶ ἴσοι στὴν ἀξιοπρέπεια καὶ τὰ δικαιώματα. Εἶναι προικισμένοι μὲ λογικὴ καὶ συνείδηση, καὶ ὀφείλουν νὰ συμπεριφέρονται μεταξύ τους μὲ πνεῦμα ἀδελφοσύνης.", // "Gujarati" -> "પ્રતિષ્ઠા અને અધિકારોની દૃષ્ટિએ સર્વ માનવો જન્મથી સ્વતંત્ર અને સમાન હોય છે. તેમનામાં વિચારશક્તિ અને અંતઃકરણ હોય છે અને તેમણે પરસ્પર બંધુત્વની ભાવનાથી વર્તવું જોઇએ.", "Hebrew" -> "כל בני אדם נולדו בני חורין ושווים בערכם ובזכויותיהם. כולם חוננו בתבונה ובמצפון, לפיכך חובה עליהם לנהוג איש ברעהו ברוח של אחוה.", "Hindi" -> "सभी मनुष्यों को गौरव और अधिकारों के मामले में जन्मजात स्वतन्त्रता और समानता प्राप्त है । उन्हें बुद्धि और अन्तरात्मा की देन प्राप्त है और परस्पर उन्हें भाईचारे के भाव से बर्ताव करना चाहिए ।", "Japanese" -> "すべての人間は、生まれながらにして自由であり、かつ、尊厳と権利とについて平等である。人間は、理性と良心とを授けられており、互いに同胞の精神をもって行動しなければならない。", // "Kannada" -> "ಎಲ್ಲಾ ಮಾನವರೂ ಸ್ವತಂತ್ರರಾಗಿಯೇ ಜನಿಸಿದ್ಧಾರೆ. ಹಾಗೂ ಘನತೆ ಮತ್ತು ಹಕ್ಕುಗಳಲ್ಲಿ ಸಮಾನರಾಗಿದ್ದಾರೆ. ವಿವೇಕ ಮತ್ತು ಅಂತಃಕರಣಗಳನ್ನು ಪದೆದವರಾದ್ದ ರಿಂದ ಅವರು ಪರಸ್ಪರ ಸಹೋದರ ಭಾವದಿಂದ ವರ್ತಿಸಬೇಕು.", // "Khmer, Central" -> "មនុស្សទាំងអស់ កើតមកមានសេរីភាព និងសមភាព ក្នុងផ្នែកសេចក្ដីថ្លៃថ្នូរនិងសិទ្ធិ។ មនុស្ស មានវិចារណញ្ញាណនិងសតិសម្បជញ្ញៈជាប់ពីកំណើត ហើយគប្បីប្រព្រឹត្ដចំពោះគ្នាទៅវិញទៅមក ក្នុង ស្មារតីភាតរភាពជាបងប្អូន។", "Korean" -> "모든 인간은 태어날 때부터 자유로우며 그 존엄과 권리에 있어 동등하다. 인간은 천부적으로 이성과 양심을 부여받았으며 서로 형제애의 정신으로 행동하여야 한다.", // "Lao" -> "ມະນຸດເກີດມາມີສິດເສລີພາບ ແລະ ສະເໝີໜ້າກັນໃນທາງກຽດຕິສັກ ແລະ ທາງສິດດ້ວຍມະນຸດມີສະຕິສຳປັດຊັນຍະ(ຮູ້ດີຮູ້ຊົ່ວ)ແລະມີມະໂນທຳຈື່ງຕ້ອງປະພຶດຕົນຕໍ່ກັນໃນທາງພີ່ນ້ອງ.", // "Malayalam" -> "മനുഷ്യരെല്ലാവരും തുല്യാവകാശങ്ങളോടും അന്തസ്സോടും സ്വാതന്ത്ര്യത്തോടുംകൂടി ജനിച്ചിട്ടുള്ളവരാണ്‌. അന്യോന്യം ഭ്രാതൃഭാവത്തോടെ പെരുമാറുവാനാണ്‌ മനുഷ്യന്നു വിവേകബുദ്ധിയും മനസ്സാക്ഷിയും സിദ്ധമായിരിക്കുന്നത്‌.", // "Maldivian" -> "ހުރިހާ އިންސާނުންވެސް ދުނިޔެއަށް އުފަންވަނީ، މިނިވަންކަމުގައި، ހަމަހަމަ ޙައްޤުތަކަކާއެކު، ހަމަހަމަ ދަރަޖައެއްގައި ކަމޭހިތެވިގެންވާ ބައެއްގެ ގޮތުގައެވެ. ހެޔޮ ވިސްނުމާއި، ހެޔޮބުއްދީގެ ބާރު އެމީހުންނަށް ލިބިގެންވެއެވެ. އަދި އެކަކު އަނެކަކާމެދު އެމީހުން މުޢާމަލާތް ކުރަންވާނީ، އުޚުއްވަތްތެރިކަމުގެ ރޫޙެއްގައެވެ.", // "Nuosu" -> "ꊿꂷꃅꄿꐨꐥ,ꌅꅍꀂꏽꐯꒈꃅꐥꌐ。ꊿꊇꉪꍆꌋꆀꁨꉌꑌꐥ,ꄷꀋꁨꂛꊨꅫꃀꃅꐥꄡꑟ。", // "Panjabi, Eastern" -> "ਸਾਰਾ ਮਨੁੱਖੀ ਪਰਿਵਾਰ ਆਪਣੀ ਮਹਿਮਾ, ਸ਼ਾਨ ਅਤੇ ਹੱਕਾਂ ਦੇ ਪੱਖੋਂ ਜਨਮ ਤੋਂ ਹੀ ਆਜ਼ਾਦ ਹੈ ਅਤੇ ਸੁਤੇ ਸਿੱਧ ਸਾਰੇ ਲੋਕ ਬਰਾਬਰ ਹਨ । ਉਨ੍ਹਾਂ ਸਭਨਾ ਨੂੰ ਤਰਕ ਅਤੇ ਜ਼ਮੀਰ ਦੀ ਸੌਗਾਤ ਮਿਲੀ ਹੋਈ ਹੈ ਅਤੇ ਉਨ੍ਹਾਂ ਨੂੰ ਭਰਾਤਰੀਭਾਵ ਦੀ ਭਾਵਨਾ ਰਖਦਿਆਂ ਆਪਸ ਵਿਚ ਵਿਚਰਣਾ ਚਾਹੀਦਾ ਹੈ ।", "Russian" -> "Все люди рождаются свободными и равными в своем достоинстве и правах. Они наделены разумом и совестью и должны поступать в отношении друг друга в духе братства.", // "Sinhala" -> "සියලු මනුෂ්‍යයෝ නිදහස්ව උපත ලබා ඇත. ගරුත්වයෙන් හා අයිතිවාසිකම්වලින් සමාන වෙති. යුක්ති අයුක්ති පිළිබඳ හැඟීමෙන් හා හෘදය සාක්ෂියෙන් යුත් ඔවුන්, ඔවුනොවුන්ට සැළකිය යුත්තේ සහෝදරත්වය පිළිබඳ හැඟීමෙනි.", // "Tagalog (Tagalog)" -> "ᜀᜅ ᜎᜑᜆ᜔ ᜅ ᜆᜂᜌ᜔ ᜁᜐᜒᜈᜒᜎᜅ ᜈ ᜋᜎᜌ ᜀᜆ᜔ ᜉᜈ᜔ᜆᜌ᜔ ᜉᜈ᜔ᜆᜌ᜔ ᜐ ᜃᜇᜅᜎᜈ᜔ ᜀᜆ᜔ ᜋ᜔ᜄ ᜃᜇᜓᜉᜆᜈ᜔᜶ ᜐᜒᜎᜌ᜔ ᜉᜒᜈᜄ᜔ᜃᜎᜓᜊᜈ᜔ ᜅ ᜃᜆ᜔ᜏᜒᜇᜈ᜔ ᜀᜆ᜔ ᜊᜓᜇ᜔ᜑᜒ ᜀᜆ᜔ ᜇᜉᜆ᜔ ᜋᜄ᜔ᜉᜎᜄᜌᜈ᜔ ᜀᜅ ᜁᜐᜆ᜔ ᜁᜐ ᜐ ᜇᜒᜏ ᜅ ᜉᜄ᜔ᜃᜃᜉᜆᜒᜇᜈ᜔᜶", // "Tamazight, Standard Morocan" -> "ⴰⵔ ⴷ ⵜⵜⵍⴰⵍⴰⵏ ⵎⵉⴷⴷⵏ ⴳⴰⵏ ⵉⵍⴻⵍⵍⵉⵜⵏ ⵎⴳⴰⴷⴷⴰⵏ ⵖ ⵡⴰⴷⴷⵓⵔ ⴷ ⵉⵣⵔⴼⴰⵏ, ⵢⵉⵍⵉ ⴰⴽⵯ ⴷⴰⵔⵙⵏ ⵓⵏⵍⵍⵉ ⴷ ⵓⴼⵔⴰⴽ, ⵉⵍⵍⴰ ⴼⵍⵍⴰ ⵙⵏ ⴰⴷ ⵜⵜⵎⵢⴰⵡⴰⵙⵏ ⵏⴳⵔⴰⵜⵙⵏ ⵙ ⵜⴰⴳⵎⴰⵜ.", // "Tamil" -> "மனிதப் பிறிவியினர் சகலரும் சுதந்திரமாகவே பிறக்கின்றனர்; அவர்கள் மதிப்பிலும், உரிமைகளிலும் சமமானவர்கள், அவர்கள் நியாயத்தையும் மனச்சாட்சியையும் இயற்பண்பாகப் பெற்றவர்கள். அவர்கள் ஒருவருடனொருவர் சகோதர உணர்வுப் பாங்கில் நடந்துகொள்ளல் வேண்டும்.", // "Telugu" -> "ప్రతిపత్తిస్వత్వముల విషయమున మానవులెల్లరును జన్మతః స్వతంత్రులును సమానులును నగుదురు. వారు వివేచన-అంతఃకరణ సంపన్నులగుటచే పరస్పరము భ్రాతృభావముతో వర్తింపవలయును.", "Thai" -> "มนุษย์ทั้งหลายเกิดมามีอิสระและเสมอภาคกันในเกียรติศักด[เกียรติศักดิ์]และสิทธิ ต่างมีเหตุผลและมโนธรรม และควรปฏิบัติต่อกันด้วยเจตนารมณ์แห่งภราดรภาพ" // "Tibetan, Central" -> "འགྲོ་བ་མིའི་རིགས་རྒྱུད་ཡོངས་ལ་སྐྱེས་ཙམ་ཉིད་ནས་ཆེ་མཐོངས་དང༌། ཐོབ་ཐངགི་རང་དབང་འདྲ་མཉམ་དུ་ཡོད་ལ། ཁོང་ཚོར་རང་བྱུང་གི་བློ་རྩལ་དང་བསམ་ཚུལ་བཟང་པོ་འདོན་པའི་འོས་བབས་ཀྱང་ཡོད། དེ་བཞིན་ཕན་ཚུན་གཅིག་གིས་གཅིག་ལ་བུ་སྤུན་གྱི་འདུ་ཤེས་འཛིན་པའི་བྱ་སྤྱོད་ཀྱང་ལག་ལེན་བསྟར་དགོས་པ་ཡིན༎", // "Tigrigna" -> "ብመንፅር ክብርን መሰልን ኩሎም ሰባት እንትውለዱ ነፃን ማዕሪን እዮም፡፡ ምስትውዓልን ሕልናን ዝተዓደሎም ብምዃኖም ንሕድሕዶም ብሕውነታዊ መንፈስ ክተሓላለዩ ኦለዎም፡፡", // "Vai" -> "ꕉꕜꕮ ꔔꘋ ꖸ ꔰ ꗋꘋ ꕮꕨ ꔔꘋ ꖸ ꕎ ꕉꖸꕊ ꕴꖃ ꕃꔤꘂ ꗱ, ꕉꖷ ꗪꗡ ꔻꔤ ꗏꗒꗡ ꕎ ꗪ ꕉꖸꕊ ꖏꕎ. ꕉꕡ ꖏ ꗳꕮꕊ ꗏ ꕪ ꗓ ꕉꖷ ꕉꖸ ꕘꕞ ꗪ. ꖏꖷ ꕉꖸꔧ ꖏ ꖸ ꕚꕌꘂ ꗷꔤ ꕞ ꘃꖷ ꘉꔧ ꗠꖻ ꕞ ꖴꘋ ꔳꕩ ꕉꖸ ꗳ." ) val binaryStrings = Seq( "empty" -> "", "surrogate pair" -> "\\uD800\\uDC00", "swapped surrogates" -> "\\uDC00\\uD800", "single low surrogate" -> "\\uDC00", "single high surrogate" -> "\\uD800" ) for ((charsetName, charset) <- Charset.availableCharsets() if charset.canEncode) { describe (charsetName) { for ((n, text) <- testStrings ++ binaryStrings) { describe (s"the text [$n]") { it ("can be encoded into bytes using String.getBytes") { val bytes = text.getBytes(charset) bytes mustNot be (null) } def drain(encoder: CharsetEncoderByteIterator): Array[Byte] = { val buf = ArrayBuffer.empty[Byte] while (encoder.hasNext) { buf.append(encoder.next()) } buf.toArray } it ("can be encoded into bytes incrementally") { val encoder = new CharsetEncoderByteIterator(text, charset) drain(encoder) mustNot be (null) } it ("encoded incrementally yields the correct number of bytes") { val byteLength = text.getBytes(charset).length val encoder = new CharsetEncoderByteIterator(text, charset) var n = 0 while (encoder.hasNext) { encoder.nextByte() n += 1 } n mustEqual byteLength } it ("encoded incrementally yields the same results as String.getBytes") { val bytes = text.getBytes(charset) val encoder = new CharsetEncoderByteIterator(text, charset) val incBytes = drain(encoder) if (incBytes.toSeq != bytes.toSeq) { println(bytes.toSeq) println(incBytes.toSeq) } incBytes mustEqual bytes } } } } } } }
mjanicek/rembulan
rembulan-tests/src/test/scala/net/sandius/rembulan/util/CharsetEncoderByteIteratorSpec.scala
Scala
apache-2.0
18,625
object Test { def mkNumbers(x: Int): List[AnyRef] = { //Use explicit AnyRef to workaround known limitation of type inference with F-Bounds val base = List[AnyRef]( BigDecimal(x), BigInt(x), new java.lang.Double(x.toDouble), new java.lang.Float(x.toFloat), new java.lang.Long(x.toLong), new java.lang.Integer(x) ) val extras = List( if (x >= Short.MinValue && x <= Short.MaxValue) List(new java.lang.Short(x.toShort)) else Nil, if (x >= Byte.MinValue && x <= Byte.MaxValue) List(new java.lang.Byte(x.toByte)) else Nil, if (x >= Char.MinValue && x <= Char.MaxValue) List(new java.lang.Character(x.toChar)) else Nil ).flatten base ::: extras } def mkNumbers(x: BigInt): List[AnyRef] = { List( List(BigDecimal(x, java.math.MathContext.UNLIMITED)), List(x), if (x.isValidDouble) List(new java.lang.Double(x.toDouble)) else Nil, if (x.isValidFloat) List(new java.lang.Float(x.toFloat)) else Nil, if (x.isValidLong) List(new java.lang.Long(x.toLong)) else Nil, if (x.isValidInt) List(new java.lang.Integer(x.toInt)) else Nil, if (x.isValidShort) List(new java.lang.Short(x.toShort)) else Nil, if (x.isValidByte) List(new java.lang.Byte(x.toByte)) else Nil, if (x.isValidChar) List(new java.lang.Character(x.toChar)) else Nil ).flatten } // Don't necessarily expect BigDecimal created from BigInt to agree with Double here. def isIffy(x: Any, y: Any, canSwap: Boolean = true): Boolean = x match { case bd: BigDecimal => y match { case _: Float | _: Double => bd.toString.length > 15 case _ => false } case _ => canSwap && isIffy(y, x, false) } // Don't necessarily expect BigInt to agree with Float/Double beyond a Long def isIffyB(x: Any, y: Any, canSwap: Boolean = true): Boolean = x match { case bi: BigInt => y match { case _: Float | _: Double => bi < Long.MinValue || bi > Long.MaxValue case _ => false } case _ => canSwap && isIffyB(y, x, false) } def main(args: Array[String]): Unit = { val ints = (0 to 15).toList map (Short.MinValue >> _) val ints2 = ints map (x => -x) val ints3 = ints map (_ + 1) val ints4 = ints2 map (_ - 1) val setneg1 = ints map mkNumbers val setneg2 = ints3 map mkNumbers val setpos1 = ints2 map mkNumbers val setpos2 = ints4 map mkNumbers val zero = mkNumbers(0) val sets = setneg1 ++ setneg2 ++ List(zero) ++ setpos1 ++ setpos2 for (set <- sets ; x <- set ; y <- set) { assert(x == y, "%s/%s != %s/%s".format(x, x.getClass, y, y.getClass)) assert(x.## == y.##, "%s != %s".format(x.getClass, y.getClass)) } val bigInts = (0 to 1024).toList map (BigInt(-1) << _) val bigInts2 = bigInts map (x => -x) val bigInts3 = bigInts map (_ + 1) val bigInts4 = bigInts2 map (_ - 1) val setneg1b = bigInts map mkNumbers val setneg2b = bigInts3 map mkNumbers val setpos1b = bigInts2 map mkNumbers val setpos2b = bigInts4 map mkNumbers val sets2 = setneg1 ++ setneg1b ++ setneg2 ++ setneg2b ++ List(zero) ++ setpos1 ++ setpos1b ++ setpos2 ++ setpos2b for (set <- sets2 ; x <- set ; y <- set) { if (!isIffy(x,y)) { assert(x == y, "%s/%s != %s/%s".format(x, x.getClass, y, y.getClass)) // The following is blocked by SI-8150 // if (!isIffyB(x,y)) assert(x.## == y.##, "%x/%s != %x/%s from %s.## and %s.##".format(x.##, x.getClass, y.##, y.getClass, x, y)) } } } }
felixmulder/scala
test/files/run/numbereq.scala
Scala
bsd-3-clause
3,548
package com.netaporter.dynamomapper import com.netaporter.dynamomapper.DynamoMapper._ import org.scalatest.prop.PropertyChecks import org.scalatest.{FreeSpec, Matchers} /** * Checks that we can read and write the Scala Traversable traits, * such as Seq, List, and Set. * */ // todo - this is not yet a complete set of them. fix that. class TraversableReadWriteDefaultsSpec extends FreeSpec with Matchers with PropertyChecks { "converting fromDynamo and toDynamo work on" - { "Seq[T]" in { forAll("Seq") { (l: Seq[String]) => val m = map("l" -> l) fromDynamo(toDynamo(m)) shouldBe m m.attr[Seq[String]]("l") shouldBe DynamoReadSuccess(l) } } "List[T]" in { forAll("List") { (l: List[String]) => val m = map("l" -> l) fromDynamo(toDynamo(m)) shouldBe m m.attr[List[String]]("l") shouldBe DynamoReadSuccess(l) } } "Set[String]" in { forAll("Set") { (l: Set[String]) => val m = map("l" -> l) fromDynamo(toDynamo(m)) shouldBe m m.attr[Set[String]]("l") shouldBe DynamoReadSuccess(l) } } "Seq[Option[_]]" in { forAll("Seq[Option[String]]") { (l: Seq[Option[String]]) => val m = map("l" -> l) fromDynamo(toDynamo(m)) shouldBe m m.attr[Seq[Option[String]]]("l") shouldBe DynamoReadSuccess(l) } } } }
cjwebb/dynamo-mapper
src/test/scala/com/netaporter/dynamomapper/TraversableReadWriteDefaultsSpec.scala
Scala
apache-2.0
1,385
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package kafka.tools import java.util.Properties import joptsimple._ import kafka.utils.{CommandLineUtils, Exit, ToolsUtils} import org.apache.kafka.clients.consumer.{ConsumerConfig, KafkaConsumer} import org.apache.kafka.common.{PartitionInfo, TopicPartition} import org.apache.kafka.common.requests.ListOffsetRequest import org.apache.kafka.common.serialization.ByteArrayDeserializer import scala.jdk.CollectionConverters._ import scala.collection.Seq object GetOffsetShell { def main(args: Array[String]): Unit = { val parser = new OptionParser(false) val brokerListOpt = parser.accepts("broker-list", "REQUIRED: The list of hostname and port of the server to connect to.") .withRequiredArg .describedAs("hostname:port,...,hostname:port") .ofType(classOf[String]) val topicOpt = parser.accepts("topic", "REQUIRED: The topic to get offset from.") .withRequiredArg .describedAs("topic") .ofType(classOf[String]) val partitionOpt = parser.accepts("partitions", "comma separated list of partition ids. If not specified, it will find offsets for all partitions") .withRequiredArg .describedAs("partition ids") .ofType(classOf[String]) .defaultsTo("") val timeOpt = parser.accepts("time", "timestamp of the offsets before that. [Note: No offset is returned, if the timestamp greater than recently commited record timestamp is given.]") .withRequiredArg .describedAs("timestamp/-1(latest)/-2(earliest)") .ofType(classOf[java.lang.Long]) .defaultsTo(-1L) parser.accepts("offsets", "DEPRECATED AND IGNORED: number of offsets returned") .withRequiredArg .describedAs("count") .ofType(classOf[java.lang.Integer]) .defaultsTo(1) parser.accepts("max-wait-ms", "DEPRECATED AND IGNORED: The max amount of time each fetch request waits.") .withRequiredArg .describedAs("ms") .ofType(classOf[java.lang.Integer]) .defaultsTo(1000) if (args.length == 0) CommandLineUtils.printUsageAndDie(parser, "An interactive shell for getting topic offsets.") val options = parser.parse(args : _*) CommandLineUtils.checkRequiredArgs(parser, options, brokerListOpt, topicOpt) val clientId = "GetOffsetShell" val brokerList = options.valueOf(brokerListOpt) ToolsUtils.validatePortOrDie(parser, brokerList) val topic = options.valueOf(topicOpt) val partitionIdsRequested: Set[Int] = { val partitionsString = options.valueOf(partitionOpt) if (partitionsString.isEmpty) Set.empty else partitionsString.split(",").map { partitionString => try partitionString.toInt catch { case _: NumberFormatException => System.err.println(s"--partitions expects a comma separated list of numeric partition ids, but received: $partitionsString") Exit.exit(1) } }.toSet } val listOffsetsTimestamp = options.valueOf(timeOpt).longValue val config = new Properties config.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, brokerList) config.setProperty(ConsumerConfig.CLIENT_ID_CONFIG, clientId) val consumer = new KafkaConsumer(config, new ByteArrayDeserializer, new ByteArrayDeserializer) val partitionInfos = listPartitionInfos(consumer, topic, partitionIdsRequested) match { case None => System.err.println(s"Topic $topic does not exist") Exit.exit(1) case Some(p) if p.isEmpty => if (partitionIdsRequested.isEmpty) System.err.println(s"Topic $topic has 0 partitions") else System.err.println(s"Topic $topic does not have any of the requested partitions ${partitionIdsRequested.mkString(",")}") Exit.exit(1) case Some(p) => p } if (partitionIdsRequested.nonEmpty) { (partitionIdsRequested -- partitionInfos.map(_.partition)).foreach { partitionId => System.err.println(s"Error: partition $partitionId does not exist") } } val topicPartitions = partitionInfos.sortBy(_.partition).flatMap { p => if (p.leader == null) { System.err.println(s"Error: partition ${p.partition} does not have a leader. Skip getting offsets") None } else Some(new TopicPartition(p.topic, p.partition)) } /* Note that the value of the map can be null */ val partitionOffsets: collection.Map[TopicPartition, java.lang.Long] = listOffsetsTimestamp match { case ListOffsetRequest.EARLIEST_TIMESTAMP => consumer.beginningOffsets(topicPartitions.asJava).asScala case ListOffsetRequest.LATEST_TIMESTAMP => consumer.endOffsets(topicPartitions.asJava).asScala case _ => val timestampsToSearch = topicPartitions.map(tp => tp -> (listOffsetsTimestamp: java.lang.Long)).toMap.asJava consumer.offsetsForTimes(timestampsToSearch).asScala.map { case (k, x) => if (x == null) (k, null) else (k, x.offset: java.lang.Long) } } partitionOffsets.toSeq.sortBy { case (tp, _) => tp.partition }.foreach { case (tp, offset) => println(s"$topic:${tp.partition}:${Option(offset).getOrElse("")}") } } /** * Return the partition infos for `topic`. If the topic does not exist, `None` is returned. */ private def listPartitionInfos(consumer: KafkaConsumer[_, _], topic: String, partitionIds: Set[Int]): Option[Seq[PartitionInfo]] = { val partitionInfos = consumer.listTopics.asScala.filter { case (k, _) => k == topic }.values.flatMap(_.asScala).toBuffer if (partitionInfos.isEmpty) None else if (partitionIds.isEmpty) Some(partitionInfos) else Some(partitionInfos.filter(p => partitionIds.contains(p.partition))) } }
sslavic/kafka
core/src/main/scala/kafka/tools/GetOffsetShell.scala
Scala
apache-2.0
7,023
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.execution.datasources.orc import java.io.File import scala.reflect.ClassTag import scala.reflect.runtime.universe.TypeTag import org.scalatest.BeforeAndAfterAll import org.apache.spark.sql._ import org.apache.spark.sql.catalyst.expressions.{Attribute, Predicate} import org.apache.spark.sql.catalyst.planning.PhysicalOperation import org.apache.spark.sql.execution.datasources.{DataSourceStrategy, FileBasedDataSourceTest} import org.apache.spark.sql.execution.datasources.v2.DataSourceV2Relation import org.apache.spark.sql.execution.datasources.v2.orc.OrcTable import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.internal.SQLConf.ORC_IMPLEMENTATION /** * OrcTest * -> OrcSuite * -> OrcSourceSuite * -> HiveOrcSourceSuite * -> OrcQueryTests * -> OrcQuerySuite * -> HiveOrcQuerySuite * -> OrcPartitionDiscoveryTest * -> OrcPartitionDiscoverySuite * -> HiveOrcPartitionDiscoverySuite * -> OrcFilterSuite * -> HiveOrcFilterSuite */ abstract class OrcTest extends QueryTest with FileBasedDataSourceTest with BeforeAndAfterAll { val orcImp: String = "native" private var originalConfORCImplementation = "native" override protected val dataSourceName: String = "orc" override protected val vectorizedReaderEnabledKey: String = SQLConf.ORC_VECTORIZED_READER_ENABLED.key protected override def beforeAll(): Unit = { super.beforeAll() originalConfORCImplementation = spark.conf.get(ORC_IMPLEMENTATION) spark.conf.set(ORC_IMPLEMENTATION.key, orcImp) } protected override def afterAll(): Unit = { spark.conf.set(ORC_IMPLEMENTATION.key, originalConfORCImplementation) super.afterAll() } /** * Writes `data` to a Orc file, which is then passed to `f` and will be deleted after `f` * returns. */ protected def withOrcFile[T <: Product: ClassTag: TypeTag] (data: Seq[T]) (f: String => Unit): Unit = withDataSourceFile(data)(f) /** * Writes `data` to a Orc file and reads it back as a `DataFrame`, * which is then passed to `f`. The Orc file will be deleted after `f` returns. */ protected def withOrcDataFrame[T <: Product: ClassTag: TypeTag] (data: Seq[T], testVectorized: Boolean = true) (f: DataFrame => Unit): Unit = withDataSourceDataFrame(data, testVectorized)(f) /** * Writes `data` to a Orc file, reads it back as a `DataFrame` and registers it as a * temporary table named `tableName`, then call `f`. The temporary table together with the * Orc file will be dropped/deleted after `f` returns. */ protected def withOrcTable[T <: Product: ClassTag: TypeTag] (data: Seq[T], tableName: String, testVectorized: Boolean = true) (f: => Unit): Unit = withDataSourceTable(data, tableName, testVectorized)(f) protected def makeOrcFile[T <: Product: ClassTag: TypeTag]( data: Seq[T], path: File): Unit = makeDataSourceFile(data, path) protected def makeOrcFile[T <: Product: ClassTag: TypeTag]( df: DataFrame, path: File): Unit = makeDataSourceFile(df, path) protected def checkPredicatePushDown(df: DataFrame, numRows: Int, predicate: String): Unit = { withTempPath { file => // It needs to repartition data so that we can have several ORC files // in order to skip stripes in ORC. df.repartition(numRows).write.orc(file.getCanonicalPath) val actual = stripSparkFilter(spark.read.orc(file.getCanonicalPath).where(predicate)).count() assert(actual < numRows) } } protected def checkNoFilterPredicate (predicate: Predicate, noneSupported: Boolean = false) (implicit df: DataFrame): Unit = { val output = predicate.collect { case a: Attribute => a }.distinct val query = df .select(output.map(e => Column(e)): _*) .where(Column(predicate)) query.queryExecution.optimizedPlan match { case PhysicalOperation(_, filters, DataSourceV2Relation(orcTable: OrcTable, _, options)) => assert(filters.nonEmpty, "No filter is analyzed from the given query") val scanBuilder = orcTable.newScanBuilder(options) scanBuilder.pushFilters(filters.flatMap(DataSourceStrategy.translateFilter).toArray) val pushedFilters = scanBuilder.pushedFilters() if (noneSupported) { assert(pushedFilters.isEmpty, "Unsupported filters should not show in pushed filters") } else { assert(pushedFilters.nonEmpty, "No filter is pushed down") val maybeFilter = OrcFilters.createFilter(query.schema, pushedFilters) assert(maybeFilter.isEmpty, s"Couldn't generate filter predicate for $pushedFilters") } case _ => throw new AnalysisException("Can not match OrcTable in the query.") } } }
aosagie/spark
sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/orc/OrcTest.scala
Scala
apache-2.0
5,603
/* Copyright (C) 2008-2014 University of Massachusetts Amherst. This file is part of "FACTORIE" (Factor graphs, Imperative, Extensible) http://factorie.cs.umass.edu, http://github.com/factorie Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package cc.factorie.util object FastSorting { // NOTE: This sorts keeps the keys and values in correspondence, while sorting by the keys def quickSort(keys: Array[Int], values: Array[Int]): Unit = { // lo = pivot, hi = range def quickSortInner(keys: Array[Int], values: Array[Int], lo: Int, hi: Int): Unit = { if (lo < hi) { val part = partition(keys, values, lo, hi) quickSortInner(keys, values, lo, part) quickSortInner(keys, values, part + 1, hi) } } def partition(keys: Array[Int], values: Array[Int], lo: Int, hi: Int): Int = { val piv = (lo + hi) / 2 val x = keys(piv) var i = lo - 1 var j = hi + 1 var res = -1 while (res < 0) { i += 1 while (i < hi && keys(i) < x) i += 1 j -= 1 while (j > lo && keys(j) > x) j -= 1 if (i < j) swap(keys, values, i, j) else res = j } res } def swap(keys: Array[Int], values: Array[Int], i: Int, j: Int): Unit = { val keyTemp = keys(i) val valTemp = values(i) keys(i) = keys(j) values(i) = values(j) keys(j) = keyTemp values(j) = valTemp } quickSortInner(keys, values, 0, keys.length - 1) } }
patverga/factorie
src/main/scala/cc/factorie/util/FastSorting.scala
Scala
apache-2.0
1,988
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package whisk.core.containerpool.docker import scala.concurrent.Future import scala.concurrent.ExecutionContext import scala.util.Failure import whisk.common.TransactionId import scala.util.Success import whisk.common.LoggingMarkers import whisk.common.Logging import akka.event.Logging.ErrorLevel import whisk.core.containerpool.ContainerId /** * Serves as interface to the docker CLI tool. * * Be cautious with the ExecutionContext passed to this, as the * calls to the CLI are blocking. * * You only need one instance (and you shouldn't get more). */ class RuncClient(executionContext: ExecutionContext)(implicit log: Logging) extends RuncApi with ProcessRunner { implicit private val ec = executionContext // Determines how to run docker. Failure to find a Docker binary implies // a failure to initialize this instance of DockerClient. protected val runcCmd: Seq[String] = Seq("/usr/bin/docker-runc") def pause(id: ContainerId)(implicit transid: TransactionId): Future[Unit] = runCmd("pause", id.asString).map(_ => ()) def resume(id: ContainerId)(implicit transid: TransactionId): Future[Unit] = runCmd("resume", id.asString).map(_ => ()) private def runCmd(args: String*)(implicit transid: TransactionId): Future[String] = { val cmd = runcCmd ++ args val start = transid.started(this, LoggingMarkers.INVOKER_RUNC_CMD(args.head), s"running ${cmd.mkString(" ")}") executeProcess(cmd: _*).andThen { case Success(_) => transid.finished(this, start) case Failure(t) => transid.failed(this, start, t.getMessage, ErrorLevel) } } } trait RuncApi { /** * Pauses the container with the given id. * * @param id the id of the container to pause * @return a Future completing according to the command's exit-code */ def pause(id: ContainerId)(implicit transid: TransactionId): Future[Unit] /** * Unpauses the container with the given id. * * @param id the id of the container to unpause * @return a Future completing according to the command's exit-code */ def resume(id: ContainerId)(implicit transid: TransactionId): Future[Unit] }
paulcastro/openwhisk
core/invoker/src/main/scala/whisk/core/containerpool/docker/RuncClient.scala
Scala
apache-2.0
2,932
package ca.pgx.common.db.entities import ca.pgx.common.db.collections.CollectionNames import ca.pgx.common.db.helpers.{MongoEnumListField, InjectableMetaRecord} import ca.pgx.common.events.EventAction.EventAction import ca.pgx.common.events.{EventAction, EventType} import com.foursquare.index.IndexedRecord import net.liftweb.mongodb.record.MongoRecord import net.liftweb.mongodb.record.field.{DateField, ObjectIdPk, ObjectIdRefField} import net.liftweb.record.field.{EnumNameField, BooleanField, EnumField, StringField} /** * A Log of all events handled by the service with processed status. * * If you want this collection to be capped you can create it beforehand - before the application * runs for the first time. */ class EventLog extends MongoRecord[EventLog] with ObjectIdPk[EventLog] with IndexedRecord[EventLog] { override def meta = EventLog /** * Id of a user who submitted the reading. */ object userId extends ObjectIdRefField(this, User) /** * Project to which this entry belongs. */ object projectId extends ObjectIdRefField(this, Project) /** * Type of an event registered. */ object event extends EnumNameField(this, EventType) /** * When this event was registered. */ object when extends DateField(this) /** * A boolean that takes the value of true if an alert was raised. This flag is used for alerts * only and not notifications. For example if backup validation fails we send an alert by email, * while on successful backup if users are interested we can send a notification email. * Any failed validation should raise an alert because otherwise there is no point in monitoring it. */ object alertRaised extends BooleanField(this) /** * A list of actions taken in the order of processing. */ object actionsTaken extends MongoEnumListField[EventLog, EventAction.type](this, EventAction) /** * Optional comment written by Processors. Might contain commands executed, human readable text, etc. */ object comment extends StringField(this, 2048) } object EventLog extends EventLog with InjectableMetaRecord[EventLog] { override def collectionName = CollectionNames.EVENT_LOG.toString }
pgxcentre/eventhub
common/src/main/scala/ca/pgx/common/db/entities/EventLog.scala
Scala
apache-2.0
2,204
package com.gx.factorymethod /** * Copyright 2017 josephguan * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ // Client trait Chef { def makeNoodle(): Noodle def cook(): Unit = { val noodle = makeNoodle() println(s"The noodle is ${noodle.flavor()}. Yummy!") } } class ThaiChef extends Chef { override def makeNoodle(): Noodle = new PadThai() } class ItalianChef extends Chef { override def makeNoodle(): Noodle = new Spaghetti() }
josephguan/scala-design-patterns
creational/factory-method/src/main/scala/com/gx/factorymethod/Chef.scala
Scala
apache-2.0
982
/* * @author Robin Hafen * * Copyright 2013 University of Zurich * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.signalcollect.dcop.evaluation.candidates /* * Utility functions that may be shared across multiple algorithms */ object Util { /* * Get the maximal values that are equal to each other. */ def maxValuesBy[T : Ordering](elems: Iterable[T]): Iterable[T] = { def maxValues(xss: List[T], currentMax: List[T]): List[T] = xss match { case Nil => currentMax case (x::xs) => currentMax match { case Nil => maxValues(xs, List(x)) // No current max, take the first element as the new max case maxs@(max::_) => { val cmp = implicitly[Ordering[T]].compare(x, max) if (cmp < 0) { maxValues(xs, maxs) } else if (cmp == 0) { maxValues(xs, x::maxs) } else { maxValues(xs, List(x)) } } } } maxValues(elems.toList, List()) } /* Generate the cartesian product of multiple sets (represented as traversables). * Similar to * A x B x C = { (a1, b1, c1), ..., (an, bn, cn) } * but with Seqs instead of Sets */ def cartesian[A](xs: Traversable[Traversable[A]]): Seq[Seq[A]] = { xs.foldLeft(Seq(Seq.empty[A])){ (x, y) => for (a <- x.view; b <- y) yield a :+ b } } }
gmazlami/dcop-maxsum
src/main/scala/com/signalcollect/dcop/evaluation/candidates/Util.scala
Scala
apache-2.0
1,884
package org.nefilim.influxdb import java.net.InetSocketAddress import java.text.DecimalFormat import java.util.concurrent.TimeUnit import akka.actor.{Actor, ActorLogging, Props} import com.codahale.metrics.{Meter, MetricRegistry, Timer} import scala.collection.JavaConverters._ import scala.concurrent.duration._ import scala.language.implicitConversions /** * Created by peter on 11/16/14. */ case class InfluxEntry(name: String, time_precision: String = "ms", columns: Seq[String], points: Option[List[List[BigDecimal]]]) object Reporter { val MilliSeconds = "ms" object Columns { val TimerColumns = List( "time", "tcount", "tmin", "tmax", "tmean", "std_dev", "50th_percentile", "75th_percentile", "95th_percentile", "99th_percentile", "999th_percentile", "one_minute", "five_minute", "fifteen_minute", "mean_rate" ) val MeterColumns = List( "time", "mcount", "one_minute", "five_minute", "fifteen_minute", "mean_rate", "mcount_delta" ) // make configurable? val RateFactor = TimeUnit.SECONDS.toSeconds(1) val DurationFactor = 1.0 / TimeUnit.MILLISECONDS.toNanos(1) def convertDuration(duration: Double): Double = duration * DurationFactor def convertRate(rate: Double): Double = rate * RateFactor } // NOT THREAD SAFE should only be accessed from the actor // TODO safer to move this inside the actor? makes for ugly interface private var previousMeterCount = Map.empty[String, Long] // no snapshot available :( private[influxdb] object Conversion { import org.nefilim.influxdb.Reporter.Columns._ import spray.json._ object InfluxJsonProtocol extends DefaultJsonProtocol { implicit val influxEntryFormat = jsonFormat4(InfluxEntry) } import org.nefilim.influxdb.Reporter.Conversion.InfluxJsonProtocol._ def round(d: Double)(implicit formatter: DecimalFormat): BigDecimal = BigDecimal(formatter.format(d)) implicit def long2BigDecimal(l: Long) = BigDecimal(l) def timer2Json(name: String, timer: Timer, timestamp: Long)(implicit formatter: DecimalFormat): String = { val snapshot = timer.getSnapshot val point = List(List[BigDecimal]( timestamp, snapshot.size, round(convertDuration(snapshot.getMin)), round(convertDuration(snapshot.getMax)), round(convertDuration(snapshot.getMean)), round(convertDuration(snapshot.getStdDev)), round(convertDuration(snapshot.getMedian)), round(convertDuration(snapshot.get75thPercentile())), round(convertDuration(snapshot.get95thPercentile())), round(convertDuration(snapshot.get99thPercentile())), round(convertDuration(snapshot.get999thPercentile())), round(convertRate(timer.getOneMinuteRate)), round(convertRate(timer.getFiveMinuteRate)), round(convertRate(timer.getFifteenMinuteRate)), round(convertRate(timer.getMeanRate)) )) assert (TimerColumns.length == point(0).size) s"[${InfluxEntry(name, MilliSeconds, TimerColumns, Some(point)).toJson.compactPrint}]" } def meter2Json(name: String, meter: Meter, timestamp: Long)(implicit formatter: DecimalFormat): String = { val currentCount = meter.getCount val point = List(List[BigDecimal]( timestamp, currentCount, round(convertRate(meter.getOneMinuteRate)), round(convertRate(meter.getFiveMinuteRate)), round(convertRate(meter.getFifteenMinuteRate)), round(convertRate(meter.getMeanRate)), currentCount - previousMeterCount.get(name).getOrElse(0L) )) assert (MeterColumns.length == point(0).size) previousMeterCount = previousMeterCount + (name -> currentCount) s"[${InfluxEntry(name, MilliSeconds, MeterColumns, Some(point)).toJson.compactPrint}]" } } case object Report def props(registry: MetricRegistry, influxDBHost: String, influxDBPort: Int = 4444, reportingInterval: FiniteDuration = 60.seconds) = Props(classOf[Reporter], reportingInterval, registry, influxDBHost, influxDBPort) } import org.nefilim.influxdb.Reporter.Conversion._ import org.nefilim.influxdb.Reporter.Report class Reporter(reportingInterval: FiniteDuration, registry: MetricRegistry, influxDBHost: String, influxDBPort: Int) extends Actor with ActorLogging { import context.dispatcher log.info("starting InfluxDB Reporter with host {}:{} at interval {}", influxDBHost, influxDBPort, reportingInterval) context.system.scheduler.schedule(reportingInterval, reportingInterval, self, Report) val udpSender = context.actorOf(Props(classOf[UDPSender], new InetSocketAddress(influxDBHost, influxDBPort))) // not thread safe implicit val formatter = new DecimalFormat("#.##") def receive: Receive = { case Report => log.debug("waking up to report") // on the fence about synchronizing timestamps, it seems less accurate ultimately but probably useful to influx/grafana val timestamp = System.currentTimeMillis()/1000 val meters = registry.getMeters meters.keySet.asScala.foreach { k => udpSender ! meter2Json(k, meters.get(k), timestamp) } val timers = registry.getTimers timers.keySet.asScala.foreach { k => udpSender ! timer2Json(k, timers.get(k), timestamp) } } }
nefilim/ScalaInfluxDBReporter
src/main/scala/org/nefilim/influxdb/Reporter.scala
Scala
mit
5,433
package org.jetbrains.plugins.scala.lang.psi.stubs import com.intellij.psi.stubs.NamedStub import org.jetbrains.plugins.scala.lang.psi.api.base.patterns.ScReferencePattern /** * User: Alexander Podkhalyuzin * Date: 17.07.2009 */ trait ScReferencePatternStub extends NamedStub[ScReferencePattern]
gtache/intellij-lsp
intellij-lsp-dotty/src/org/jetbrains/plugins/scala/lang/psi/stubs/ScReferencePatternStub.scala
Scala
apache-2.0
303
package scala.test import org.scalatest.FunSpec class TestFilterTestA extends FunSpec { describe("A") { it ("tests a") { assert(true) } } }
sdtwigg/rules_scala
test/TestFilterTestA.scala
Scala
apache-2.0
160
package com.itszuvalex.itszulib.network.messages import com.itszuvalex.itszulib.core.traits.tile.{TileMultiFluidTank, TileFluidTank} import cpw.mods.fml.common.network.simpleimpl.{IMessage, IMessageHandler, MessageContext} import io.netty.buffer.ByteBuf import net.minecraft.client.Minecraft import net.minecraftforge.fluids.{FluidRegistry, FluidStack} /** * Created by Alex on 11.10.2015. */ class MessageFluidTankUpdate(var x: Int, var y: Int, var z: Int, var tankID: Int, var fluidID: Int, var amount: Int) extends IMessage with IMessageHandler[MessageFluidTankUpdate, IMessage] { def this() = this(0, 0, 0, -1, -1, -1) def this(_x: Int, _y: Int, _z: Int, fID: Int, amt: Int) = this (_x, _y, _z, -1, fID, amt) override def toBytes(buf: ByteBuf): Unit = { buf.writeInt(x) buf.writeShort(y) buf.writeInt(z) buf.writeInt(tankID) buf.writeInt(fluidID) buf.writeInt(amount) } override def fromBytes(buf: ByteBuf): Unit = { x = buf.readInt() y = buf.readShort() z = buf.readInt() tankID = buf.readInt() fluidID = buf.readInt() amount = buf.readInt() } override def onMessage(message: MessageFluidTankUpdate, ctx: MessageContext): IMessage = { val world = Minecraft.getMinecraft.theWorld world.getTileEntity(message.x, message.y, message.z) match { case tank: TileFluidTank => tank.tank.setFluid(new FluidStack(FluidRegistry.getFluid(message.fluidID), message.amount)) case tank: TileMultiFluidTank => tank.tanks(message.tankID).setFluid(if (message.fluidID == -1) null else new FluidStack(FluidRegistry.getFluid(message.fluidID), message.amount)) case _ => } null } }
BlockWorker/ItszuLib
src/main/scala/com/itszuvalex/itszulib/network/messages/MessageFluidTankUpdate.scala
Scala
gpl-2.0
1,686
package geostat.lattice import scala.math._ import geostat.MapPoint import geostat.MapPointSet /** * RandomLattice Lattice * * @param npts number of points uniformly distrbuted on the surface */ class RandomLattice(npts: Int = 1) extends Lattice { require(npts > 0) val vertex = generateVertexSet() private def generateVertexSet() = { val set = new MapPointSet for (i <- 0 to npts - 1) { val u = random val v = random set.add(new MapPoint((acos(2.0 * v - 1.0)).toDegrees-90.0, (2.0 * Pi * u).toDegrees-180.0)) } set } }
alessandroadamo/geostat
src/main/scala/geostat/lattice/RandomLattice.scala
Scala
lgpl-3.0
592
/* * Copyright 2015 MongoDB, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package tour import scala.collection.immutable.IndexedSeq import org.mongodb.scala._ import org.mongodb.scala.model.Aggregates._ import org.mongodb.scala.model.Filters._ import org.mongodb.scala.model.Projections._ import org.mongodb.scala.model.Sorts._ import org.mongodb.scala.model.Updates._ import org.mongodb.scala.model._ import tour.Helpers._ /** * The QuickTour code example see: https://mongodb.github.io/mongo-scala-driver/2.0/getting-started */ object QuickTour { //scalastyle:off method.length /** * Run this main method to see the output of this quick example. * * @param args takes an optional single argument for the connection string * @throws Throwable if an operation fails */ def main(args: Array[String]): Unit = { val mongoClient: MongoClient = if (args.isEmpty) MongoClient() else MongoClient(args.head) // get handle to "mydb" database val database: MongoDatabase = mongoClient.getDatabase("mydb") // get a handle to the "test" collection val collection: MongoCollection[Document] = database.getCollection("test") collection.drop().results() // make a document and insert it val doc: Document = Document("_id" -> 0, "name" -> "MongoDB", "type" -> "database", "count" -> 1, "info" -> Document("x" -> 203, "y" -> 102)) collection.insertOne(doc).results() // get it (since it's the only one in there since we dropped the rest earlier on) collection.find.first().printResults() // now, lets add lots of little documents to the collection so we can explore queries and cursors val documents: IndexedSeq[Document] = (1 to 100) map { i: Int => Document("i" -> i) } val insertObservable = collection.insertMany(documents) val insertAndCount = for { insertResult <- insertObservable countResult <- collection.count() } yield countResult println(s"total # of documents after inserting 100 small ones (should be 101): ${insertAndCount.headResult()}") collection.find().first().printHeadResult() // Query Filters // now use a query to get 1 document out collection.find(equal("i", 71)).first().printHeadResult() // now use a range query to get a larger subset collection.find(gt("i", 50)).printResults() // range query with multiple constraints collection.find(and(gt("i", 50), lte("i", 100))).printResults() // Sorting collection.find(exists("i")).sort(descending("i")).first().printHeadResult() // Projection collection.find().projection(excludeId()).first().printHeadResult() //Aggregation collection.aggregate(Seq( filter(gt("i", 0)), project(Document("""{ITimes10: {$multiply: ["$i", 10]}}""")) )).printResults() // Update One collection.updateOne(equal("i", 10), set("i", 110)).printHeadResult("Update Result: ") // Update Many collection.updateMany(lt("i", 100), inc("i", 100)).printHeadResult("Update Result: ") // Delete One collection.deleteOne(equal("i", 110)).printHeadResult("Delete Result: ") // Delete Many collection.deleteMany(gte("i", 100)).printHeadResult("Delete Result: ") collection.drop().results() // ordered bulk writes val writes: List[WriteModel[_ <: Document]] = List( InsertOneModel(Document("_id" -> 4)), InsertOneModel(Document("_id" -> 5)), InsertOneModel(Document("_id" -> 6)), UpdateOneModel(Document("_id" -> 1), set("x", 2)), DeleteOneModel(Document("_id" -> 2)), ReplaceOneModel(Document("_id" -> 3), Document("_id" -> 3, "x" -> 4)) ) collection.bulkWrite(writes).printHeadResult("Bulk write results: ") collection.drop().results() collection.bulkWrite(writes, BulkWriteOptions().ordered(false)).printHeadResult("Bulk write results (unordered): ") collection.find().printResults("Documents in collection: ") // Clean up collection.drop().results() // release resources mongoClient.close() } }
jCalamari/mongo-scala-driver
examples/src/test/scala/tour/QuickTour.scala
Scala
apache-2.0
4,572
package org.retistruen trait Collector[T] extends Receiver[T] with CachingEmitter[Seq[Datum[T]]] with Reset { private var buf: Seq[Datum[T]] = Seq.empty /** Returns the collected data in the internal buffer */ protected def buffer = buf /** Clears the collected data from the internal buffer */ protected def clear = buf = Seq.empty def receive(emitter: Emitter[T], datum: Datum[T]) = buf :+= datum override def reset { super.reset clear } }
plalloni/retistruen
src/main/scala/org/retistruen/Collector.scala
Scala
mit
489
package core import _root_.builder.OriginalValidator import lib.ServiceConfiguration import com.bryzek.apidoc.api.v0.models.Original import com.bryzek.apidoc.api.v0.models.OriginalType.SwaggerJson import org.scalatest.{FunSpec, Matchers} class OriginalValidatorSpec extends FunSpec with Matchers{ private def readFile(path: String): String = { scala.io.Source.fromFile(path).getLines.mkString("\\n") } val config = ServiceConfiguration( orgKey = "apidoc", orgNamespace = "me.apidoc", version = "0.0.2-dev" ) describe("OriginalValidator") { it("should validate valid swagger json with parameter of type array") { val filename = "simple-w-array.json" val path = s"core/src/test/resources/$filename" val result = OriginalValidator( config, original = Original ( SwaggerJson, readFile(path) ), new MockServiceFetcher() ).validate result.isRight should be(true) } it("should validate valid swagger json without parameter of type array") { val filename = "simple-without-array.json" val path = s"core/src/test/resources/$filename" val result = OriginalValidator( config, original = Original ( SwaggerJson, readFile(path) ), new MockServiceFetcher() ).validate result.isRight should be(true) } } }
Seanstoppable/apidoc
core/src/test/scala/core/OriginalValidatorSpec.scala
Scala
mit
1,455
package org.barelyfunctional.tools.scalafs import org.scalatest.{MustMatchers, FunSuite} class DirectoryTest extends FunSuite with MustMatchers { test("can list files in a directory") { Directory("src/test/data").list.map(_.name) must be (List("file1.txt", "file2.txt")) } }
BarelyFunctional/ScalaFS
src/test/scala/org/barelyfunctional/tools/scalafs/DirectoryTest.scala
Scala
epl-1.0
287
package sttp.client.circe import io.circe.jawn.decode import io.circe.{Decoder, Encoder, Printer} import sttp.client.{IsOption, ResponseAs, ResponseError, _} import sttp.client.internal.Utf8 import sttp.model.MediaType //Taken from sttp-circe 2.2.9 and upgraded to circe 0.14. TODO: remove after upgrade to sttp3 trait SttpCirceApi { implicit def circeBodySerializer[B](implicit encoder: Encoder[B], printer: Printer = Printer.noSpaces ): BodySerializer[B] = b => StringBody(encoder(b).printWith(printer), Utf8, Some(MediaType.ApplicationJson)) /** * If the response is successful (2xx), tries to deserialize the body from a string into JSON. Returns: * - `Right(b)` if the parsing was successful * - `Left(HttpError(String))` if the response code was other than 2xx (deserialization is not attempted) * - `Left(DeserializationError)` if there's an error during deserialization */ def asJson[B: Decoder: IsOption]: ResponseAs[Either[ResponseError[io.circe.Error], B], Nothing] = asString.mapWithMetadata(ResponseAs.deserializeRightWithError(deserializeJson)) /** * Tries to deserialize the body from a string into JSON, regardless of the response code. Returns: * - `Right(b)` if the parsing was successful * - `Left(DeserializationError)` if there's an error during deserialization */ def asJsonAlways[B: Decoder: IsOption]: ResponseAs[Either[DeserializationError[io.circe.Error], B], Nothing] = asStringAlways.map(ResponseAs.deserializeWithError(deserializeJson)) /** * Tries to deserialize the body from a string into JSON, regardless of the response code. Returns the parse * result, or throws an exception is there's an error during deserialization */ def asJsonAlwaysUnsafe[B: Decoder: IsOption]: ResponseAs[B, Nothing] = asStringAlways.map(ResponseAs.deserializeOrThrow(deserializeJson)) def deserializeJson[B: Decoder: IsOption]: String => Either[io.circe.Error, B] = JsonInput.sanitize[B].andThen(decode[B]) }
TouK/nussknacker
utils/http-utils/src/main/scala/sttp/client/circe/SttpCirceApi.scala
Scala
apache-2.0
2,025
package mesosphere.marathon.api.v2.json import java.lang.{ Double => JDouble, Integer => JInt } import mesosphere.marathon.Protos.Constraint import mesosphere.marathon.api.validation.FieldConstraints._ import mesosphere.marathon.api.validation.{ PortIndices, ValidV2AppDefinition } import mesosphere.marathon.health.{ HealthCheck, HealthCounts } import mesosphere.marathon.state.AppDefinition.VersionInfo.FullVersionInfo import mesosphere.marathon.state._ import org.apache.mesos.{ Protos => mesos } import scala.collection.immutable.Seq import scala.concurrent.duration._ @PortIndices @ValidV2AppDefinition case class V2AppDefinition( id: PathId = AppDefinition.DefaultId, cmd: Option[String] = AppDefinition.DefaultCmd, args: Option[Seq[String]] = AppDefinition.DefaultArgs, user: Option[String] = AppDefinition.DefaultUser, env: Map[String, String] = AppDefinition.DefaultEnv, @FieldMin(0) instances: JInt = AppDefinition.DefaultInstances, cpus: JDouble = AppDefinition.DefaultCpus, mem: JDouble = AppDefinition.DefaultMem, disk: JDouble = AppDefinition.DefaultDisk, @FieldPattern(regexp = "^(//cmd)|(/?[^/]+(/[^/]+)*)|$") executor: String = AppDefinition.DefaultExecutor, constraints: Set[Constraint] = AppDefinition.DefaultConstraints, uris: Seq[String] = AppDefinition.DefaultUris, storeUrls: Seq[String] = AppDefinition.DefaultStoreUrls, @FieldPortsArray ports: Seq[JInt] = AppDefinition.DefaultPorts, requirePorts: Boolean = AppDefinition.DefaultRequirePorts, backoff: FiniteDuration = AppDefinition.DefaultBackoff, backoffFactor: JDouble = AppDefinition.DefaultBackoffFactor, maxLaunchDelay: FiniteDuration = AppDefinition.DefaultMaxLaunchDelay, container: Option[Container] = AppDefinition.DefaultContainer, healthChecks: Set[HealthCheck] = AppDefinition.DefaultHealthChecks, dependencies: Set[PathId] = AppDefinition.DefaultDependencies, upgradeStrategy: UpgradeStrategy = AppDefinition.DefaultUpgradeStrategy, labels: Map[String, String] = AppDefinition.DefaultLabels, acceptedResourceRoles: Option[Set[String]] = None, version: Timestamp = Timestamp.now(), versionInfo: Option[V2AppDefinition.VersionInfo] = None) extends Timestamped { assert( portIndicesAreValid(), "Health check port indices must address an element of the ports array or container port mappings." ) /** * Returns true if all health check port index values are in the range * of ths app's ports array, or if defined, the array of container * port mappings. */ def portIndicesAreValid(): Boolean = this.toAppDefinition.portIndicesAreValid() /** * Returns the canonical internal representation of this API-specific * application defintion. */ def toAppDefinition: AppDefinition = { val appVersionInfo = versionInfo match { case Some(V2AppDefinition.VersionInfo(lastScalingAt, lastChangeAt)) => AppDefinition.VersionInfo.FullVersionInfo(version, lastScalingAt, lastChangeAt) case None => AppDefinition.VersionInfo.OnlyVersion(version) } AppDefinition( id = id, cmd = cmd, args = args, user = user, env = env, instances = instances, cpus = cpus, mem = mem, disk = disk, executor = executor, constraints = constraints, uris = uris, storeUrls = storeUrls, ports = ports, requirePorts = requirePorts, backoff = backoff, backoffFactor = backoffFactor, maxLaunchDelay = maxLaunchDelay, container = container, healthChecks = healthChecks, dependencies = dependencies, upgradeStrategy = upgradeStrategy, labels = labels, acceptedResourceRoles = acceptedResourceRoles, versionInfo = appVersionInfo ) } def withCanonizedIds(base: PathId = PathId.empty): V2AppDefinition = { val baseId = id.canonicalPath(base) copy(id = baseId, dependencies = dependencies.map(_.canonicalPath(baseId))) } } object V2AppDefinition { case class VersionInfo( lastScalingAt: Timestamp, lastConfigChangeAt: Timestamp) def apply(app: AppDefinition): V2AppDefinition = { val maybeVersionInfo = app.versionInfo match { case FullVersionInfo(_, lastScalingAt, lastConfigChangeAt) => Some(VersionInfo(lastScalingAt, lastConfigChangeAt)) case _ => None } V2AppDefinition( id = app.id, cmd = app.cmd, args = app.args, user = app.user, env = app.env, instances = app.instances, cpus = app.cpus, mem = app.mem, disk = app.disk, executor = app.executor, constraints = app.constraints, uris = app.uris, storeUrls = app.storeUrls, ports = app.ports, requirePorts = app.requirePorts, backoff = app.backoff, backoffFactor = app.backoffFactor, maxLaunchDelay = app.maxLaunchDelay, container = app.container, healthChecks = app.healthChecks, dependencies = app.dependencies, upgradeStrategy = app.upgradeStrategy, labels = app.labels, acceptedResourceRoles = app.acceptedResourceRoles, version = app.version, versionInfo = maybeVersionInfo) } }
cgvarela/marathon
src/main/scala/mesosphere/marathon/api/v2/json/V2AppDefinition.scala
Scala
apache-2.0
5,048
package adt.bson.mongo.async.client object JavaAsyncClientModels extends JavaAsyncClientModels trait JavaAsyncClientModels { type JavaAggregateIterable[T] = com.mongodb.async.client.AggregateIterable[T] type JavaAsyncBatchCursor[T] = com.mongodb.async.AsyncBatchCursor[T] type JavaDistinctIterable[T] = com.mongodb.async.client.DistinctIterable[T] type JavaFindIterable[T] = com.mongodb.async.client.FindIterable[T] type JavaListCollectionsIterable[T] = com.mongodb.async.client.ListCollectionsIterable[T] type JavaListDatabasesIterable[T] = com.mongodb.async.client.ListDatabasesIterable[T] type JavaListIndexesIterable[T] = com.mongodb.async.client.ListIndexesIterable[T] type JavaMapReduceIterable[T] = com.mongodb.async.client.MapReduceIterable[T] type JavaMongoIterable[T] = com.mongodb.async.client.MongoIterable[T] type JavaMongoClient = com.mongodb.async.client.MongoClient }
jeffmay/bson-adt
bson-adt-mongo3-async/src/main/scala/adt/bson/mongo/async/client/JavaAsyncClientModels.scala
Scala
apache-2.0
975
package io.atom.electron import scala.scalajs.js import js.annotation.JSName /** * See http://electron.atom.io/docs/v0.30.0/api/process/ */ @JSName("process") @js.native object Process extends js.Object { def platform: String = js.native }
sschaef/tooling-research
electron/src/main/scala/io/atom/electron/Process.scala
Scala
mit
248
package gg.uhc.hosts.endpoints.rules import akka.http.scaladsl.model.StatusCodes import akka.http.scaladsl.server.Directives.{as, complete, entity, handleRejections} import akka.http.scaladsl.server.Route import gg.uhc.hosts.CustomJsonCodec import gg.uhc.hosts.database.Database import gg.uhc.hosts.endpoints.{CustomDirectives, EndpointRejectionHandler} class SetRules(customDirectives: CustomDirectives, database: Database) { import CustomJsonCodec._ import customDirectives._ def apply(): Route = handleRejections(EndpointRejectionHandler()) { requireAuthentication { session => requirePermission("hosting advisor", session.username) { entity(as[String]) { entity => requireSucessfulQuery(database.setRules(author = session.username, content = entity)) { _ => complete(StatusCodes.Created) } } } } } }
Eluinhost/hosts.uhc.gg
src/main/scala/gg/uhc/hosts/endpoints/rules/SetRules.scala
Scala
mit
907