code
stringlengths
5
1M
repo_name
stringlengths
5
109
path
stringlengths
6
208
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
5
1M
package models import scala.concurrent._ import scala.concurrent.ExecutionContext.Implicits.global import play.api.libs.json._ object Game extends models.Base("game") { def isExist(gid: String): Future[Boolean] = { val query = Json.obj("id" -> gid) isExist(query) } }
noraesae/tenhou-analysis-play
app/models/Game.scala
Scala
mit
282
/* * Copyright (C) Lightbend Inc. <https://www.lightbend.com> */ package com.lightbend.lagom.scaladsl.persistence.slick import com.lightbend.lagom.internal.scaladsl.persistence.jdbc.JdbcPersistentEntityRegistry import com.lightbend.lagom.scaladsl.persistence.TestEntity.Evt import com.lightbend.lagom.scaladsl.persistence._ import scala.concurrent.Future import scala.concurrent.duration.DurationDouble class SlickReadSideSpec extends SlickPersistenceSpec(TestEntitySerializerRegistry) with AbstractReadSideSpec { import system.dispatcher protected override lazy val persistentEntityRegistry = new JdbcPersistentEntityRegistry(system, slick) override def processorFactory(): ReadSideProcessor[Evt] = new SlickTestEntityReadSide.TestEntityReadSideProcessor(slickReadSide, slick.db, slick.profile) lazy val readSide = new SlickTestEntityReadSide(slick.db, slick.profile) override def getAppendCount(id: String): Future[Long] = readSide.getAppendCount(id) override def afterAll(): Unit = { super.afterAll() } }
lagom/lagom
persistence-jdbc/scaladsl/src/test/scala/com/lightbend/lagom/scaladsl/persistence/slick/SlickReadSideSpec.scala
Scala
apache-2.0
1,041
// test that dependent types work // TODO: def apply(x: String): x.type does NOT work yet object Test { val s: String = "" trait T { def apply(x: s.type): s.type } val preservedResult: s.type = ((x => x): T)(s) }
yusuke2255/dotty
tests/untried/pos/sammy_single.scala
Scala
bsd-3-clause
221
package controllers import com.github.mrmechko.service.{TripsHierarchyQueryImpl$, TripsHierarchyQuery, WordLookupQuery, WordLookupQueryImpl$} import play.api.libs.json.Json import play.api.mvc.{Action, Controller} import com.github.mrmechko.protocol.QueryJson._ object QueryController extends Controller{ val wordLookup : WordLookupQuery = WordLookupQueryImpl$ val ancestorLookup : TripsHierarchyQuery = TripsHierarchyQueryImpl$ def list(pos : Int, lemma : String) = Action {Ok(Json.toJson(wordLookup.lookup(lemma, pos)))} def ancestor(concept : String) = Action {Ok(Json.toJson(ancestorLookup.ancestor(concept)))} def children(concept : String) = Action {Ok(Json.toJson(ancestorLookup.children(concept)))} def pathToRoot(concept : String) = Action {Ok(Json.toJson(ancestorLookup.pathToRoot(concept)))} }
mrmechko/OntologyManager
server/app/controllers/QueryController.scala
Scala
mit
825
/* Copyright (C) 2008-2014 University of Massachusetts Amherst. This file is part of "FACTORIE" (Factor graphs, Imperative, Extensible) http://factorie.cs.umass.edu, http://github.com/factorie Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package cc.factorie.model import cc.factorie.model import cc.factorie.variable.{DiscreteVar, TensorVar, Var, VectorVar} import scala.reflect.ClassTag abstract class Template3[N1<:Var,N2<:Var,N3<:Var](implicit nm1:ClassTag[N1], nm2:ClassTag[N2], nm3:ClassTag[N3]) extends Family3[N1,N2,N3] with Template { val neighborClass1 = nm1.runtimeClass val neighborClass2 = nm2.runtimeClass val neighborClass3 = nm3.runtimeClass def neighborClasses: Seq[Class[_]] = Seq(neighborClass1, neighborClass2, neighborClass3) // override def limitDiscreteValuesIteratorAsIn(variables:Iterable[DiscreteVar]): Unit = { // if (classOf[DiscreteVar].isAssignableFrom(neighborClass1) && // classOf[DiscreteVar].isAssignableFrom(neighborClass2) && // classOf[DiscreteVar].isAssignableFrom(neighborClass3)) // for (variable <- variables; factor <- factors(variable)) // limitedDiscreteValues.+=(( // factor._1.asInstanceOf[DiscreteVar].intValue, // factor._2.asInstanceOf[DiscreteVar].intValue, // factor._3.asInstanceOf[DiscreteVar].intValue)) // } final override def addFactors(v:Var, result:scala.collection.mutable.Set[model.Factor]): Unit = { if (neighborClass1.isAssignableFrom(v.getClass)) result ++= unroll1(v.asInstanceOf[N1]) if (neighborClass2.isAssignableFrom(v.getClass)) result ++= unroll2(v.asInstanceOf[N2]) if (neighborClass3.isAssignableFrom(v.getClass)) result ++= unroll3(v.asInstanceOf[N3]) unroll(v) match { case fs:IterableSingleFactor[Factor] => result += fs.factor; case Nil => {}; case fs => result ++= fs } } //* Override this method if you want to re-capture old unrollCascade functionality. */ def unroll(v:Var): Iterable[Factor] = Nil def unroll1(v:N1): Iterable[FactorType] def unroll2(v:N2): Iterable[FactorType] def unroll3(v:N3): Iterable[FactorType] def limitDiscreteValuesAsIn(vars:Iterable[DiscreteVar]): Unit = for (v <- vars; factor <- factors(v)) factor.asInstanceOf[model.Factor] match { case factor:Factor3[VectorVar @unchecked,VectorVar @unchecked,VectorVar @unchecked] => (classOf[DiscreteVar].isAssignableFrom(neighborClass1), classOf[DiscreteVar].isAssignableFrom(neighborClass2), classOf[DiscreteVar].isAssignableFrom(neighborClass3)) match { // (factor._1.isInstanceOf[DiscreteVar], factor._2.isInstanceOf[DiscreteVar], factor._3.isInstanceOf[DiscreteVar]) match { // TODO No need to check types every time. -akm case (true, true, true) => getLimitedDiscreteValues123(factor).+=(factor._1.asInstanceOf[DiscreteVar].intValue, factor._2.asInstanceOf[DiscreteVar].intValue, factor._3.asInstanceOf[DiscreteVar].intValue) getLimitedDiscreteValues12(factor).+=(factor._1.asInstanceOf[DiscreteVar].intValue, factor._2.asInstanceOf[DiscreteVar].intValue) getLimitedDiscreteValues1(factor).+=(factor._1.asInstanceOf[DiscreteVar].intValue) case (true, true, false) => getLimitedDiscreteValues12(factor).+=(factor._1.asInstanceOf[DiscreteVar].intValue, factor._2.asInstanceOf[DiscreteVar].intValue) getLimitedDiscreteValues1(factor).+=(factor._1.asInstanceOf[DiscreteVar].intValue) case (true, false, false) => getLimitedDiscreteValues1(factor).+=(factor._1.asInstanceOf[DiscreteVar].intValue) case (false, false, false) => {} case _ => throw new Error("Combination of DiscreteVar not yet implemented.") } } } abstract class TupleTemplate3[N1<:Var:ClassTag,N2<:Var:ClassTag,N3<:Var:ClassTag] extends Template3[N1,N2,N3] with TupleFamily3[N1,N2,N3] abstract class TupleTemplateWithStatistics3[N1<:Var:ClassTag,N2<:Var:ClassTag,N3<:Var:ClassTag] extends Template3[N1,N2,N3] with TupleFamilyWithStatistics3[N1,N2,N3] abstract class TensorTemplate3[N1<:Var:ClassTag,N2<:Var:ClassTag,N3<:Var:ClassTag] extends Template3[N1,N2,N3] with TensorFamily3[N1,N2,N3] abstract class TensorTemplateWithStatistics3[N1<:TensorVar:ClassTag,N2<:TensorVar:ClassTag,N3<:TensorVar:ClassTag] extends Template3[N1,N2,N3] with TensorFamilyWithStatistics3[N1,N2,N3] abstract class DotTemplate3[N1<:Var:ClassTag,N2<:Var:ClassTag,N3<:Var:ClassTag] extends Template3[N1,N2,N3] with DotFamily3[N1,N2,N3] abstract class DotTemplateWithStatistics3[N1<:TensorVar:ClassTag,N2<:TensorVar:ClassTag,N3<:TensorVar:ClassTag] extends Template3[N1,N2,N3] with DotFamilyWithStatistics3[N1,N2,N3]
patverga/factorie
src/main/scala/cc/factorie/model/Template3.scala
Scala
apache-2.0
5,156
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package kafka.raft import java.io.File import java.nio.file.Files import java.util import java.util.OptionalInt import java.util.concurrent.CompletableFuture import kafka.log.UnifiedLog import kafka.raft.KafkaRaftManager.RaftIoThread import kafka.server.{KafkaConfig, MetaProperties} import kafka.server.KafkaRaftServer.ControllerRole import kafka.utils.timer.SystemTimer import kafka.utils.{KafkaScheduler, Logging, ShutdownableThread} import org.apache.kafka.clients.{ApiVersions, ManualMetadataUpdater, NetworkClient} import org.apache.kafka.common.metrics.Metrics import org.apache.kafka.common.network.{ChannelBuilders, ListenerName, NetworkReceive, Selectable, Selector} import org.apache.kafka.common.protocol.ApiMessage import org.apache.kafka.common.requests.RequestHeader import org.apache.kafka.common.security.JaasContext import org.apache.kafka.common.security.auth.SecurityProtocol import org.apache.kafka.common.utils.{LogContext, Time} import org.apache.kafka.common.{TopicPartition, Uuid} import org.apache.kafka.raft.RaftConfig.{AddressSpec, InetAddressSpec, NON_ROUTABLE_ADDRESS, UnknownAddressSpec} import org.apache.kafka.raft.{FileBasedStateStore, KafkaRaftClient, LeaderAndEpoch, RaftClient, RaftConfig, RaftRequest, ReplicatedLog} import org.apache.kafka.server.common.serialization.RecordSerde import scala.jdk.CollectionConverters._ object KafkaRaftManager { class RaftIoThread( client: KafkaRaftClient[_], threadNamePrefix: String ) extends ShutdownableThread( name = threadNamePrefix + "-io-thread", isInterruptible = false ) { override def doWork(): Unit = { client.poll() } override def initiateShutdown(): Boolean = { if (super.initiateShutdown()) { client.shutdown(5000).whenComplete { (_, exception) => if (exception != null) { error("Graceful shutdown of RaftClient failed", exception) } else { info("Completed graceful shutdown of RaftClient") } } true } else { false } } override def isRunning: Boolean = { client.isRunning && !isThreadFailed } } private def createLogDirectory(logDir: File, logDirName: String): File = { val logDirPath = logDir.getAbsolutePath val dir = new File(logDirPath, logDirName) Files.createDirectories(dir.toPath) dir } } trait RaftManager[T] { def handleRequest( header: RequestHeader, request: ApiMessage, createdTimeMs: Long ): CompletableFuture[ApiMessage] def register( listener: RaftClient.Listener[T] ): Unit def leaderAndEpoch: LeaderAndEpoch def client: RaftClient[T] def replicatedLog: ReplicatedLog } class KafkaRaftManager[T]( metaProperties: MetaProperties, config: KafkaConfig, recordSerde: RecordSerde[T], topicPartition: TopicPartition, topicId: Uuid, time: Time, metrics: Metrics, threadNamePrefixOpt: Option[String], val controllerQuorumVotersFuture: CompletableFuture[util.Map[Integer, AddressSpec]] ) extends RaftManager[T] with Logging { private val raftConfig = new RaftConfig(config) private val threadNamePrefix = threadNamePrefixOpt.getOrElse("kafka-raft") private val logContext = new LogContext(s"[RaftManager nodeId=${config.nodeId}] ") this.logIdent = logContext.logPrefix() private val scheduler = new KafkaScheduler(threads = 1, threadNamePrefix + "-scheduler") scheduler.startup() private val dataDir = createDataDir() override val replicatedLog: ReplicatedLog = buildMetadataLog() private val netChannel = buildNetworkChannel() override val client: KafkaRaftClient[T] = buildRaftClient() private val raftIoThread = new RaftIoThread(client, threadNamePrefix) def startup(): Unit = { // Update the voter endpoints (if valid) with what's in RaftConfig val voterAddresses: util.Map[Integer, AddressSpec] = controllerQuorumVotersFuture.get() for (voterAddressEntry <- voterAddresses.entrySet.asScala) { voterAddressEntry.getValue match { case spec: InetAddressSpec => netChannel.updateEndpoint(voterAddressEntry.getKey, spec) case _: UnknownAddressSpec => logger.info(s"Skipping channel update for destination ID: ${voterAddressEntry.getKey} " + s"because of non-routable endpoint: ${NON_ROUTABLE_ADDRESS.toString}") case invalid: AddressSpec => logger.warn(s"Unexpected address spec (type: ${invalid.getClass}) for channel update for " + s"destination ID: ${voterAddressEntry.getKey}") } } netChannel.start() raftIoThread.start() } def shutdown(): Unit = { raftIoThread.shutdown() client.close() scheduler.shutdown() netChannel.close() replicatedLog.close() } override def register( listener: RaftClient.Listener[T] ): Unit = { client.register(listener) } override def handleRequest( header: RequestHeader, request: ApiMessage, createdTimeMs: Long ): CompletableFuture[ApiMessage] = { val inboundRequest = new RaftRequest.Inbound( header.correlationId, request, createdTimeMs ) client.handle(inboundRequest) inboundRequest.completion.thenApply { response => response.data } } private def buildRaftClient(): KafkaRaftClient[T] = { val expirationTimer = new SystemTimer("raft-expiration-executor") val expirationService = new TimingWheelExpirationService(expirationTimer) val quorumStateStore = new FileBasedStateStore(new File(dataDir, "quorum-state")) val nodeId = if (config.processRoles.contains(ControllerRole)) { OptionalInt.of(config.nodeId) } else { OptionalInt.empty() } val client = new KafkaRaftClient( recordSerde, netChannel, replicatedLog, quorumStateStore, time, metrics, expirationService, logContext, metaProperties.clusterId, nodeId, raftConfig ) client.initialize() client } private def buildNetworkChannel(): KafkaNetworkChannel = { val netClient = buildNetworkClient() new KafkaNetworkChannel(time, netClient, config.quorumRequestTimeoutMs, threadNamePrefix) } private def createDataDir(): File = { val logDirName = UnifiedLog.logDirName(topicPartition) KafkaRaftManager.createLogDirectory(new File(config.metadataLogDir), logDirName) } private def buildMetadataLog(): KafkaMetadataLog = { KafkaMetadataLog( topicPartition, topicId, dataDir, time, scheduler, config = MetadataLogConfig(config, KafkaRaftClient.MAX_BATCH_SIZE_BYTES, KafkaRaftClient.MAX_FETCH_SIZE_BYTES) ) } private def buildNetworkClient(): NetworkClient = { val controllerListenerName = new ListenerName(config.controllerListenerNames.head) val controllerSecurityProtocol = config.effectiveListenerSecurityProtocolMap.getOrElse(controllerListenerName, SecurityProtocol.forName(controllerListenerName.value())) val channelBuilder = ChannelBuilders.clientChannelBuilder( controllerSecurityProtocol, JaasContext.Type.SERVER, config, controllerListenerName, config.saslMechanismControllerProtocol, time, config.saslInterBrokerHandshakeRequestEnable, logContext ) val metricGroupPrefix = "raft-channel" val collectPerConnectionMetrics = false val selector = new Selector( NetworkReceive.UNLIMITED, config.connectionsMaxIdleMs, metrics, time, metricGroupPrefix, Map.empty[String, String].asJava, collectPerConnectionMetrics, channelBuilder, logContext ) val clientId = s"raft-client-${config.nodeId}" val maxInflightRequestsPerConnection = 1 val reconnectBackoffMs = 50 val reconnectBackoffMsMs = 500 val discoverBrokerVersions = true new NetworkClient( selector, new ManualMetadataUpdater(), clientId, maxInflightRequestsPerConnection, reconnectBackoffMs, reconnectBackoffMsMs, Selectable.USE_DEFAULT_BUFFER_SIZE, config.socketReceiveBufferBytes, config.quorumRequestTimeoutMs, config.connectionSetupTimeoutMs, config.connectionSetupTimeoutMaxMs, time, discoverBrokerVersions, new ApiVersions, logContext ) } override def leaderAndEpoch: LeaderAndEpoch = { client.leaderAndEpoch } }
TiVo/kafka
core/src/main/scala/kafka/raft/RaftManager.scala
Scala
apache-2.0
9,218
/* * SPDX-License-Identifier: Apache-2.0 * Copyright 2016-2020 Daniel Urban and contributors listed in NOTICE.txt * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package dev.tauri.choam import cats.Eq import cats.implicits._ import org.scalatestplus.scalacheck.ScalaCheckDrivenPropertyChecks abstract class CtrieSpec extends BaseSpec with ScalaCheckDrivenPropertyChecks { val hs: Int => Int = { x => x } def newEmpty( hashFunc: Int => Int = hs, eqFunc: (Int, Int) => Boolean = _ == _ ): Ctrie[Int, String] = { new Ctrie[Int, String](hashFunc, Eq.instance(eqFunc)) } "Ctrie#lookup" should "not find anything in an empty trie" in { val ct = newEmpty() forAll { i: Int => ct.lookup.unsafePerform(i) should === (None) } } it should "find a previously inserted single key" in { forAll { (k: Int, i: Int) => val ct = newEmpty() ct.insert.unsafePerform(k -> k.toString) ct.lookup.unsafePerform(k) should === (Some(k.toString)) if (i =!= k) { ct.lookup.unsafePerform(i) should === (None) } } } it should "find all previously inserted keys" in { forAll { (ks: Set[Int], x: Int) => val ct = newEmpty() val shadow = new scala.collection.mutable.HashSet[Int] for (k <- ks) { ct.insert.unsafePerform(k -> k.toString) shadow += k for (i <- shadow) { ct.lookup.unsafePerform(i) should === (Some(i.toString)) } if (!shadow.contains(x)) { ct.lookup.unsafePerform(x) should === (None) } } } } it should "find an equal key which is not equal according to universal equality" in { val ct = newEmpty(_ % 4, (x, y) => (x % 8) == (y % 8)) ct.insert.unsafePerform(0 -> "0") ct.lookup.unsafePerform(0) should === (Some("0")) ct.lookup.unsafePerform(8) should === (Some("0")) ct.insert.unsafePerform(4 -> "4") ct.lookup.unsafePerform(0) should === (Some("0")) ct.lookup.unsafePerform(8) should === (Some("0")) ct.lookup.unsafePerform(4) should === (Some("4")) ct.lookup.unsafePerform(12) should === (Some("4")) } "Ctrie#insert" should "handle hash collisions correctly" in { forAll { (ks: Set[Int], x: Int) => val ct = newEmpty(_ % 8) ct.insert.unsafePerform(x -> x.toString) ct.lookup.unsafePerform(x) should === (Some(x.toString)) ct.insert.unsafePerform(x + 8 -> (x + 8).toString) ct.lookup.unsafePerform(x) should === (Some(x.toString)) ct.lookup.unsafePerform(x + 8) should === (Some((x + 8).toString)) ct.insert.unsafePerform(x + 16 -> (x + 16).toString) ct.lookup.unsafePerform(x) should === (Some(x.toString)) ct.lookup.unsafePerform(x + 8) should === (Some((x + 8).toString)) ct.lookup.unsafePerform(x + 16) should === (Some((x + 16).toString)) ct.insert.unsafePerform(x + 1 -> (x + 1).toString) ct.lookup.unsafePerform(x) should === (Some(x.toString)) ct.lookup.unsafePerform(x + 8) should === (Some((x + 8).toString)) ct.lookup.unsafePerform(x + 16) should === (Some((x + 16).toString)) ct.lookup.unsafePerform(x + 1) should === (Some((x + 1).toString)) ct.insert.unsafePerform(x + 9 -> (x + 9).toString) ct.lookup.unsafePerform(x) should === (Some(x.toString)) ct.lookup.unsafePerform(x + 8) should === (Some((x + 8).toString)) ct.lookup.unsafePerform(x + 16) should === (Some((x + 16).toString)) ct.lookup.unsafePerform(x + 1) should === (Some((x + 1).toString)) ct.lookup.unsafePerform(x + 9) should === (Some((x + 9).toString)) for (k <- ks) { ct.insert.unsafePerform(k -> k.toString) ct.lookup.unsafePerform(k) should === (Some(k.toString)) } ct.insert.unsafePerform(x + 17 -> (x + 17).toString) ct.lookup.unsafePerform(x + 17) should === (Some((x + 17).toString)) } } "Ctrie#debugStr" should "pretty print the trie structure" in { val ct = new Ctrie[Int, String](_ % 4, Eq.instance(_ % 8 == _ % 8)) ct.insert.unsafePerform(0 -> "0") ct.insert.unsafePerform(1 -> "1") ct.insert.unsafePerform(4 -> "4") ct.insert.unsafePerform(5 -> "5") ct.insert.unsafePerform(8 -> "8") // overwrites 0 ct.insert.unsafePerform(9 -> "9") // overwrites 1 val expStr = """INode -> CNode 3 | INode -> CNode 1 | INode -> CNode 1 | INode -> CNode 1 | INode -> CNode 1 | INode -> CNode 1 | INode -> CNode 1 | INode -> LNode(8 -> 8, 4 -> 4) | INode -> CNode 1 | INode -> CNode 1 | INode -> CNode 1 | INode -> CNode 1 | INode -> CNode 1 | INode -> CNode 1 | INode -> LNode(9 -> 9, 5 -> 5)""".stripMargin ct.debugStr should === (expStr) } } final class CtrieSpecNaiveKCAS extends CtrieSpec with SpecNaiveKCAS final class CtrieSpecCASN extends CtrieSpec with SpecCASN final class CtrieSpecMCAS extends CtrieSpec with SpecMCAS final class CtrieSpecEMCAS extends CtrieSpec with SpecEMCAS
durban/exp-reagents
core/src/test/scala/dev/tauri/choam/CtrieSpec.scala
Scala
apache-2.0
5,604
package info.armado.ausleihe.client.transport.dataobjects.information import info.armado.ausleihe.client.transport.dataobjects.LendGameStatusDTO import info.armado.ausleihe.client.transport.requests.GameInformationRequestDTO import javax.xml.bind.annotation.{XmlAccessType, XmlAccessorType, XmlRootElement} @XmlRootElement @XmlAccessorType(XmlAccessType.FIELD) case class GameInformationDTO( var request: GameInformationRequestDTO, var foundGames: Array[LendGameStatusDTO] ) { def this() = this(null, Array.empty) override def equals(other: Any): Boolean = { val Request = request other match { case GameInformationDTO(Request, otherFoundGames) if foundGames.sameElements(otherFoundGames) => true case _ => false } } override def hashCode: Int = { val prime = 31 var result = 1 result = prime * result + (if (request == null) 0 else request.hashCode) result = prime * result + (if (foundGames == null) 0 else foundGames.toSet.hashCode) result } override def toString: String = s"GameInformation($request, ${foundGames.toSet})" }
Spielekreis-Darmstadt/lending
lending-client-interfaces/src/main/scala/info/armado/ausleihe/client/transport/dataobjects/information/GameInformationDTO.scala
Scala
apache-2.0
1,120
// -*- mode: Scala;-*- // Filename: Bard.scala // Authors: lgm // Creation: Wed May 27 17:12:47 2009 // Copyright: Not supplied // Description: // ------------------------------------------------------------------------ package net.liftweb.amqp; //import com.eaio.uuid.UUID; import java.util.UUID trait IdSupplier { type ActedOn = {def setId( s : String ) : Unit} type Classic = {def getClass() : java.lang.Class[_]} type ClassicallyActedOn = Classic with ActedOn type Action = ActedOn => Unit type InAction = Classic => Unit def recurse() : Boolean def hasDepth( pojo : java.lang.Object ) : Boolean = { pojo.isInstanceOf[ClassicallyActedOn] } def recurse( pojo : java.lang.Object ) : Boolean = { ((hasDepth( pojo )) && (recurse())) } def failOnUnknownType() : Boolean def inView( field : java.lang.reflect.Field, pojo : Classic ) : Boolean = { true } def isGroundValueType( value : {def getClass() : java.lang.Class[_]} ) : Boolean = { ((value.isInstanceOf[Boolean]) || (value.isInstanceOf[Int]) || (value.isInstanceOf[Float]) || (value.isInstanceOf[String]) // put more ground types here ) } def getNextId() : String def stdAction() : Action = { ( subject : ActedOn ) => { subject.setId( getNextId() ) } } def inAction() = { (_ : Classic) => { } } def handleUnmatchedValue( vUnmatched : Classic, field : java.lang.reflect.Field, pojo : ClassicallyActedOn, inAction : InAction ) : Unit = { if (! failOnUnknownType() ){ inAction( vUnmatched ) } else throw new Exception( ( "unmatched type" + vUnmatched.getClass.toString + "when attempting render the " + field.getName + "field of " + pojo ) ) } def handleGroundValue( value : Classic ) : Unit = { (inAction())( value ) } def handleValue( value : Classic, field : java.lang.reflect.Field, pojo : ClassicallyActedOn, action : Action ) : Unit = { if (value == null) { (inAction())( null ) } else { if ( isGroundValueType( value ) ) handleGroundValue( value, action ) else if ( recurse( value ) ) handlePOJO( value.asInstanceOf[ClassicallyActedOn], action ) else handleUnmatchedValue( value, field, pojo, inAction() ) } } def handleField( field : java.lang.reflect.Field, pojo : ClassicallyActedOn, action : Action ) : Unit = { // reflectively break java access control mechanisms val accessible = field.isAccessible; field.setAccessible( true ); handleValue( field.get( pojo ), field, pojo, action ) // put java access mechanisms back in place field.setAccessible( accessible ); } // This is the basic monadic/monad transformer view of the pojo // rendering process. It shouldn't be surprising that this would // have this form: if you stop to think about it a pojo is a relation. def handlePOJO( pojo : ClassicallyActedOn, action : Action ) : Unit = { action( pojo ); val progeny = for (field <- pojo.getClass.getDeclaredFields if inView( field, pojo )) yield handleField( field, pojo, action ); } def supplyIds( pojo : ClassicallyActedOn ) : Unit = { handlePOJO( pojo, stdAction() ) } def generateIds( obj : java.lang.Object ) : Unit = { supplyIds( obj.asInstanceOf[ClassicallyActedOn] ) } // ugly stuff def reallyHasSetId( pojo : Classic ) : Boolean = { (for ( m <- pojo.getClass.getMethods if m.getName.contains( "setId" ) ) yield { true }).length > 0 } def reallyCallSetId( pojo : Classic, id : String ) : Unit = { // fix this // val setIdMethod : java.lang.reflect.Method // = (for ( m <- pojo.getClass.getMethods if m.getName.contains( "setId" ) ) // yield { m })(0); // setIdMethod.invoke( pojo, Array( id ): _* ); } def altHasDepth( pojo : java.lang.Object ) : Boolean = { reallyHasSetId( pojo ) } def altRecurse( pojo : java.lang.Object ) : Boolean = { ((altHasDepth( pojo )) && (recurse())) } def altAction() : Action = { ( subject : ActedOn ) => { if (reallyHasSetId( subject )) reallyCallSetId( subject, getNextId() ) } } } case class Fingerer( rcrs : Boolean, fOUT : Boolean ) extends IdSupplier { override def recurse() = rcrs override def failOnUnknownType() = fOUT // override def inAction() = { // case x : Classic => { // } // } override def getNextId() = { UUID.randomUUID + "" } }
leithaus/strategies
src/main/scala/net/liftweb/amqp/Bard.scala
Scala
cc0-1.0
4,694
package io.getquill.context.sql.norm import io.getquill.ast.FlatMap import io.getquill.ast.Join import io.getquill.ast.Map import io.getquill.ast.Query import io.getquill.ast.StatelessTransformer object MergeSecondaryJoin extends StatelessTransformer { override def apply(q: Query) = q match { case FlatMap(current: Join, _, body) => body match { case FlatMap(next: Join, alias, body) if isSecondary(next) => body match { case Map(last: Join, alias, body) if isSecondary(last) => Map(merge(merge(current, next), last), alias, body) case _ => apply(FlatMap(merge(current, next), alias, body)) } case Map(last: Join, alias, body) if isSecondary(last) => Map(merge(current, last), alias, body) case _ => q } case _ => q } private def merge(current: Join, next: Join): Join = { val ident = next.aliasA Join(next.typ, current, next.a, ident, ident, next.on) } private def isSecondary(j: Join): Boolean = j.a == j.b }
jcranky/quill
quill-sql/src/main/scala/io/getquill/context/sql/norm/MergeSecondaryJoin.scala
Scala
apache-2.0
1,061
/*列表是不可变的,值一旦被定义了就不能改变*/ object ListTest{ //字符串列表 val site1: List[String] = List("Runoob", "Google", "Baidu") val site2 = "Runoob" :: ("Google" :: ("Baidu" :: Nil)) //整型列表 val nums1: List[Int] = List(1, 2, 3, 4) val nums2 = 1 :: (2 :: (3 :: (4 :: Nil))) //二维列表 val dim1: List[List[Int]] = List( List(1, 0, 0), List(0, 1, 0), List(0, 0, 1) ) val dim2 = (1 :: (0 :: (0 :: Nil))) :: (0 :: (1 :: (0 :: Nil))) :: (0 :: (0 :: (1 :: Nil))) :: Nil val empty1: List[Nothing] = List() val empty2 = Nil //匹配模式求列表的和 def sum(lst: List[Int]): Int = lst match{ case Nil => 0 case h :: t => h + sum(t) //h是列表头(lst.head), t是链表尾(lst.tail) } def main(args: Array[String]){ println(site1.head) println(site1(1)) println(dim1(1)(1)) println("列表元素和: " + sum(nums1)) var MapList = site1.map(_.toUpperCase) println(MapList) } }
PengLiangWang/Scala
Collection/ListTest.scala
Scala
gpl-3.0
1,177
// Copyright: 2010 - 2018 https://github.com/ensime/ensime-server/graphs // License: http://www.gnu.org/licenses/gpl-3.0.en.html package org.ensime.indexer import java.nio.charset.Charset import scala.concurrent.duration._ import akka.testkit._ import org.apache.commons.vfs2._ import org.ensime.fixture._ import org.ensime.util._ import org.ensime.util.file._ import org.ensime.util.fileobject._ import org.ensime.util.path._ import org.ensime.vfs._ import org.scalatest.concurrent.TimeLimitedTests import org.scalatest.tagobjects.Retryable import org.scalatest.time._ sealed trait FileWatcherMessage final case class Added(f: FileObject) extends FileWatcherMessage final case class Removed(f: FileObject) extends FileWatcherMessage final case class Changed(f: FileObject) extends FileWatcherMessage final case class BaseAdded(f: FileObject) extends FileWatcherMessage final case class BaseRemoved(f: FileObject) extends FileWatcherMessage final case class BaseRegistered() extends FileWatcherMessage /** * These tests are insanely flakey so everything is retryable. The * fundamental problem is that file watching is impossible without * true OS and FS support, which is lacking on all major platforms. * * OS X is just too flakey to even bother. * * NOTE: if you're making local edits to this file, try adding "with * ParallelTestExecution". It's not used by default, only to * reduce the load on the pathetic CI machines. */ @tags.IgnoreOnTravis class FileWatcherSpec extends EnsimeSpec with TimeLimitedTests with IsolatedTestKitFixture with IsolatedEnsimeVFSFixture { // some of these tests hang sporadically on Windows, so fail fast. // not retried: https://github.com/scalatest/scalatest/issues/1087 override val timeLimit = scaled(Span(30, Seconds)) implicit val DefaultCharset: Charset = Charset.defaultCharset() // variant that watches a jar file def createJarWatcher(jar: File)(implicit vfs: EnsimeVFS, tk: TestKit): Watcher = (new JarJava7WatcherBuilder()).build(jar, listeners) // variant that recursively watches a directory of classes def createClassWatcher(base: File)(implicit vfs: EnsimeVFS, tk: TestKit): Watcher = (new ClassJava7WatcherBuilder()).build(base, listeners) /** * The Linux ext2+ filesystems have a timestamp precision of 1 * second, which means its impossible to tell if a newly created * file has been modified, or deleted and re-added, if it happens * sub-second (without looking at the contents). */ def waitForLinus(): Unit = Thread.sleep(1000) val maxWait = 20 seconds "FileWatcher" should "detect added files" taggedAs (Retryable) in withVFS { implicit vfs => withTestKit { implicit tk => withTempDir { dir => withClassWatcher(dir) { watcher => waitForBaseRegistered(tk) val foo = (dir / "foo.class") val bar = (dir / "b/bar.class") foo.createWithParents() shouldBe true bar.createWithParents() shouldBe true val fishForFooBar: Fish = { case Added(f) => { f.asLocalFile.getAbsolutePath == foo.getAbsolutePath || f.asLocalFile.getAbsolutePath == bar.getAbsolutePath } case _ => false } tk.fishForMessage(maxWait)(fishForFooBar) tk.fishForMessage(maxWait)(fishForFooBar) } } } } it should "detect added / changed files" taggedAs (Retryable) in withVFS { implicit vfs => withTestKit { implicit tk => withTempDir { dir => withClassWatcher(dir) { watcher => waitForBaseRegistered(tk) val foo = (dir / "foo.class") val bar = (dir / "b/bar.class") foo.createWithParents() shouldBe true bar.createWithParents() shouldBe true tk.expectMsgType[Added] tk.expectMsgType[Added] ignoreAdded(tk) ignoreAdded(tk) waitForLinus() foo.writeString("foo") bar.writeString("bar") tk.expectMsgType[Changed] tk.expectMsgType[Changed] } } } } it should "detect added / removed files" taggedAs (Retryable) in withVFS { implicit vfs => withTestKit { implicit tk => withTempDir { dir => withClassWatcher(dir) { watcher => waitForBaseRegistered(tk) tk.ignoreMsg { case msg: Changed => true // ignore on Windows } val foo = (dir / "foo.class") val bar = (dir / "b/bar.class") foo.createWithParents() shouldBe true bar.createWithParents() shouldBe true tk.expectMsgType[Added] tk.expectMsgType[Added] ignoreAdded(tk) ignoreAdded(tk) waitForLinus() foo.delete() bar.delete() tk.expectMsgType[Removed] tk.expectMsgType[Removed] } } } } it should "detect removed base directory" taggedAs (Retryable) in withVFS { implicit vfs => withTestKit { implicit tk => withTempDir { dir => withClassWatcher(dir) { watcher => waitForBaseRegistered(tk) waitForLinus() dir.delete() val createOrDelete: Fish = { case r: BaseRemoved => true case a: BaseAdded => true case _ => false } tk.fishForMessage()(createOrDelete) tk.fishForMessage()(createOrDelete) } } } } it should "detect removed parent base directory" taggedAs (Retryable) in withVFS { implicit vfs => withTestKit { implicit tk => withTempDirPath { tmpDir => val parent = tmpDir.toFile.canon val dir = parent / "base" dir.mkdirs() try { withClassWatcher(dir) { watcher => // would be better if this was atomic (not possible from JVM?) waitForBaseRegistered(tk) parent.toPath().deleteDirRecursively() val createOrDelete: Fish = { case r: BaseRemoved => true case a: BaseAdded => true case _ => false } tk.fishForMessage()(createOrDelete) tk.fishForMessage()(createOrDelete) } } finally parent.toPath().deleteDirRecursively() } } } // hangs regularly on Windows, possible bug in JDK it should "survive deletion of the watched directory" taggedAs (Retryable, IgnoreOnAppVeyor) in withVFS { implicit vfs => withTestKit { implicit tk => withTempDir { dir => withClassWatcher(dir) { watcher => waitForBaseRegistered(tk) val foo = (dir / "foo.class") val bar = (dir / "b/bar.class") foo.createWithParents() shouldBe true bar.createWithParents() shouldBe true tk.expectMsgType[Added] tk.expectMsgType[Added] ignoreAdded(tk) ignoreAdded(tk) waitForLinus() dir.tree.reverse.foreach(_.delete()) val createOrDelete: Fish = { case r: BaseRemoved => true case a: BaseAdded => true case r: Removed => false case r: Added => false // java7 watcher can detect it twice as existing and created case r: Changed => false // ignore on Windows } tk.fishForMessage()(createOrDelete) tk.fishForMessage()(createOrDelete) foo.createWithParents() shouldBe true bar.createWithParents() shouldBe true val nonDeterministicAdd: Fish = { case a: Added => true case c: Changed => true case r: Removed => false case r: BaseRemoved => false case r: BaseAdded => false } tk.fishForMessage()(nonDeterministicAdd) tk.fishForMessage()(nonDeterministicAdd) } } } } it should "be able to start up from a non-existent directory" taggedAs (Retryable) in withVFS { implicit vfs => withTestKit { implicit tk => withTempDirPath { tmpDir => val dir = tmpDir.toFile.canon / "root" try { withClassWatcher(dir) { watcher => tk.ignoreMsg { case msg: BaseAdded => true } val foo = (dir / "foo.class") val bar = (dir / "b/bar.class") waitForLinus() foo.createWithParents() shouldBe true bar.createWithParents() shouldBe true val fishForFooBar: Fish = { case Added(f) => { f.asLocalFile.getAbsolutePath == foo.getAbsolutePath || f.asLocalFile.getAbsolutePath == bar.getAbsolutePath } case _ => false } tk.fishForMessage(maxWait)(fishForFooBar) tk.fishForMessage(maxWait)(fishForFooBar) } } finally dir.tree.reverse.foreach(_.delete()) } } } it should "survive removed parent base directory and recreated base" taggedAs (Retryable, IgnoreOnAppVeyor) in withVFS { implicit vfs => withTestKit { implicit tk => withTempDirPath { tmpDir => val parent = tmpDir.toFile.canon val dir = parent / "base" dir.mkdirs() try { withClassWatcher(dir) { watcher => waitForBaseRegistered(tk) val foo = (dir / "foo.class") val bar = (dir / "b/bar.class") foo.createWithParents() shouldBe true bar.createWithParents() shouldBe true tk.expectMsgType[Added] tk.expectMsgType[Added] ignoreAdded(tk) ignoreAdded(tk) waitForLinus() parent.tree.reverse.foreach(_.delete()) val createOrDelete: Fish = { case r: BaseRemoved => true case a: BaseAdded => true case r: Removed => false case a: Added => false case r: Changed => false // ignore on Windows } tk.fishForMessage()(createOrDelete) tk.fishForMessage()(createOrDelete) foo.createWithParents() shouldBe true bar.createWithParents() shouldBe true // non-deterministically receive zero, one or two more Removed // and either Added or Changed for foo / bar. val nonDeterministicAdd: Fish = { case a: Added => true case c: Changed => true case r: Removed => false case r: BaseRemoved => false //ignore on Windows case r: BaseAdded => false // ignore on Windows } tk.fishForMessage()(nonDeterministicAdd) tk.fishForMessage()(nonDeterministicAdd) } } finally dir.tree.reverse.foreach(_.delete()) } } } ////////////////////////////////////////////////////////////////////////////// it should "detect changes to a file base" taggedAs (Retryable) in withVFS { implicit vfs => withTestKit { implicit tk => withTempDir { dir => val jar = (dir / "jar.jar") jar.createWithParents() shouldBe true withJarWatcher(jar) { watcher => waitForBaseRegistered(tk) waitForLinus() jar.writeString("binks") tk.expectMsgType[Changed] } } } } it should "detect removal of a file base" taggedAs (Retryable) in withVFS { implicit vfs => withTestKit { implicit tk => withTempDir { dir => val jar = (dir / "jar.jar") jar.createWithParents() shouldBe true withJarWatcher(jar) { watcher => waitForBaseRegistered(tk) waitForLinus() tk.ignoreMsg { case msg: Changed => true // ignore on Windows } jar.delete() tk.expectMsgType[Removed] } } } } it should "be able to start up from a non-existent base file" taggedAs (Retryable) in withVFS { implicit vfs => withTestKit { implicit tk => withTempDir { dir => val jar = (dir / "jar.jar") withJarWatcher(jar) { watcher => waitForLinus() tk.ignoreMsg { case msg: BaseAdded => true } jar.createWithParents() shouldBe true tk.expectMsgType[Added] } } } } it should "survive removal of a file base" taggedAs (Retryable) in withVFS { implicit vfs => withTestKit { implicit tk => withTempDir { dir => val jar = (dir / "jar.jar") jar.createWithParents() shouldBe true withJarWatcher(jar) { watcher => waitForBaseRegistered(tk) tk.ignoreMsg { case msg: Changed => true // ignore on Windows } waitForLinus() jar.delete() // best thing for him, frankly tk.expectMsgType[Removed] waitForLinus() jar.writeString("binks") tk.expectMsgType[Added] } } } } ////////////////////////////////////////////////////////////////////////////// type -->[A, B] = PartialFunction[A, B] type Fish = PartialFunction[Any, Boolean] def waitForBaseRegistered(tk: TestKit) = { val baseCreated: Fish = { case BaseRegistered() => true //f == dir // case e => { logEvent("Bad ", dir, e); false } case e => false } tk.fishForMessage(5 seconds)(baseCreated) } def ignoreAdded(tk: TestKit) = // Ignore extra Added message because // java7 watcher can detect a file in a directory as existing // or/and as created. try { val baseCreated: Fish = { case Added(f) => true case e => false } tk.fishForMessage(1 second)(baseCreated) } catch { case e: Throwable => true } def withClassWatcher[T]( base: File )(code: Watcher => T)(implicit vfs: EnsimeVFS, tk: TestKit) = { val w = createClassWatcher(base) try code(w) finally w.shutdown() } def withJarWatcher[T](jar: File)(code: Watcher => T)(implicit vfs: EnsimeVFS, tk: TestKit) = { val w = createJarWatcher(jar) try code(w) finally w.shutdown() } def listeners(implicit tk: TestKit) = List( new FileChangeListener { def fileAdded(f: FileObject): Unit = tk.testActor ! Added(f) def fileRemoved(f: FileObject): Unit = tk.testActor ! Removed(f) def fileChanged(f: FileObject): Unit = tk.testActor ! Changed(f) override def baseReCreated(f: FileObject): Unit = tk.testActor ! BaseAdded(f) override def baseRemoved(f: FileObject): Unit = tk.testActor ! BaseRemoved(f) override def baseRegistered(): Unit = tk.testActor ! BaseRegistered() } ) }
yyadavalli/ensime-server
core/src/test/scala/org/ensime/indexer/FileWatcherSpec.scala
Scala
gpl-3.0
15,611
package spire.algebra import annotation.tailrec import scala.{specialized => spec} /** * Ring represents a set (A) that is a group over addition (+) and a monoid * over multiplication (*). Aside from this, the multiplication must distribute * over addition. * * Ring implements some methods (for example fromInt) in terms of other more * fundamental methods (zero, one and plus). Where possible, these methods * should be overridden by more efficient implementations. */ trait Ring[@spec(Byte, Short, Int, Long, Float, Double) A] extends Any with Rig[A] with Rng[A] { /** * Defined to be equivalent to `additive.sumn(one, n)`. That is, `n` * repeated summations of this ring's `one`, or `-one` if `n` is * negative. */ def fromInt(n: Int): A = sumn(one, n) } object Ring { @inline final def apply[A](implicit r: Ring[A]): Ring[A] = r } /** * CRing is a Ring that is commutative under multiplication. */ trait CRing[@spec(Byte, Short, Int, Long, Float, Double) A] extends Any with Ring[A] with MultiplicativeCMonoid[A] object CRing { @inline final def apply[A](implicit r: CRing[A]): CRing[A] = r }
woparry/spire
core/src/main/scala/spire/algebra/Ring.scala
Scala
mit
1,133
/* * Copyright 2001-2016 Artima, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.scalactic.anyvals import org.scalactic.Equality import org.scalatest._ import org.scalatest.prop.GeneratorDrivenPropertyChecks import OptionValues._ import org.scalactic.{Pass, Fail} import org.scalactic.{Good, Bad} import scala.util.{Failure, Success, Try} trait NonZeroIntSpecSupport { implicit def tryEquality[T]: Equality[Try[T]] = new Equality[Try[T]] { override def areEqual(a: Try[T], b: Any): Boolean = a match { case Success(double: Double) if double.isNaN => // This is because in scala.js x/0 results to NaN not ArithmetricException like in jvm, and we need to make sure Success(NaN) == Success(NaN) is true to pass the test. b match { case Success(bDouble: Double) if bDouble.isNaN => true case _ => false } // I needed this because with GenDrivenPropertyChecks, got: // [info] - should offer a '%' method that is consistent with Int *** FAILED *** // [info] Success(NaN) did not equal Success(NaN) (NonZeroIntExperiment.scala:498) case Success(float: Float) if float.isNaN => b match { case Success(bFloat: Float) if bFloat.isNaN => true case _ => false } case _: Success[_] => a == b case Failure(ex) => b match { case _: Success[_] => false case Failure(otherEx) => ex.getClass == otherEx.getClass && ex.getMessage == otherEx.getMessage case _ => false } } } } class NonZeroIntSpec extends FunSpec with Matchers with GeneratorDrivenPropertyChecks with NonZeroIntSpecSupport { describe("A NonZeroInt") { describe("should offer a from factory method that") { it("returns Some[NonZeroInt] if the passed Int is greater than 0") { NonZeroInt.from(50).value.value shouldBe 50 NonZeroInt.from(100).value.value shouldBe 100 } it("returns Some[NonZeroInt] if the passed Int is lesser than 0") { NonZeroInt.from(-1).value.value shouldBe -1 NonZeroInt.from(-99).value.value shouldBe -99 } it("returns None if the passed Int is 0") { NonZeroInt.from(0) shouldBe None } } describe("should offer an ensuringValid factory method that") { it("returns NonZeroInt if the passed Int is greater than 0") { NonZeroInt.ensuringValid(50).value shouldBe 50 NonZeroInt.ensuringValid(100).value shouldBe 100 } it("returns NonZeroInt if the passed Int is lesser than 0") { NonZeroInt.ensuringValid(-1).value shouldBe -1 NonZeroInt.ensuringValid(-99).value shouldBe -99 } it("throws AssertionError if the passed Int is 0") { an [AssertionError] should be thrownBy NonZeroInt.ensuringValid(0) } } describe("should offer a tryingValid factory method that") { import TryValues._ it("returns a NonZeroInt wrapped in a Success if the passed Int is non-zero") { NonZeroInt.tryingValid(50).success.value.value shouldBe 50 NonZeroInt.tryingValid(100).success.value.value shouldBe 100 NonZeroInt.tryingValid(-50).success.value.value shouldBe -50 NonZeroInt.tryingValid(-100).success.value.value shouldBe -100 } it("returns an AssertionError wrapped in a Failure if the passed Int is NOT non-zero") { NonZeroInt.tryingValid(0).failure.exception shouldBe an [AssertionError] } } describe("should offer a passOrElse factory method that") { it("returns a Pass if the given Int is non-zero") { NonZeroInt.passOrElse(50)(i => i) shouldBe Pass NonZeroInt.passOrElse(100)(i => i) shouldBe Pass NonZeroInt.passOrElse(-1)(i => i) shouldBe Pass NonZeroInt.passOrElse(-99)(i => i) shouldBe Pass } it("returns an error value produced by passing the given Int to the given function if the passed Int is NOT non-zero, wrapped in a Fail") { NonZeroInt.passOrElse(0)(i => s"$i did not taste good") shouldBe Fail("0 did not taste good") } } describe("should offer a goodOrElse factory method that") { it("returns a NonZeroInt wrapped in a Good if the given Int is non-zero") { NonZeroInt.goodOrElse(50)(i => i) shouldBe Good(NonZeroInt(50)) NonZeroInt.goodOrElse(100)(i => i) shouldBe Good(NonZeroInt(100)) NonZeroInt.goodOrElse(-1)(i => i) shouldBe Good(NonZeroInt(-1)) NonZeroInt.goodOrElse(-99)(i => i) shouldBe Good(NonZeroInt(-99)) } it("returns an error value produced by passing the given Int to the given function if the passed Int is NOT non-zero, wrapped in a Bad") { NonZeroInt.goodOrElse(0)(i => s"$i did not taste good") shouldBe Bad("0 did not taste good") } } describe("should offer a rightOrElse factory method that") { it("returns a NonZeroInt wrapped in a Right if the given Int is non-zero") { NonZeroInt.rightOrElse(50)(i => i) shouldBe Right(NonZeroInt(50)) NonZeroInt.rightOrElse(100)(i => i) shouldBe Right(NonZeroInt(100)) NonZeroInt.rightOrElse(-1)(i => i) shouldBe Right(NonZeroInt(-1)) NonZeroInt.rightOrElse(-99)(i => i) shouldBe Right(NonZeroInt(-99)) } it("returns an error value produced by passing the given Int to the given function if the passed Int is NOT non-zero, wrapped in a Left") { NonZeroInt.rightOrElse(0)(i => s"$i did not taste good") shouldBe Left("0 did not taste good") } } describe("should offer an isValid predicate method that") { it("returns true if the passed Int is not 0") { NonZeroInt.isValid(50) shouldBe true NonZeroInt.isValid(100) shouldBe true NonZeroInt.isValid(0) shouldBe false NonZeroInt.isValid(-0) shouldBe false NonZeroInt.isValid(-1) shouldBe true NonZeroInt.isValid(-99) shouldBe true } } describe("should offer a fromOrElse factory method that") { it("returns a NonZeroInt if the passed Int is greater than 0") { NonZeroInt.fromOrElse(50, NonZeroInt(42)).value shouldBe 50 NonZeroInt.fromOrElse(100, NonZeroInt(42)).value shouldBe 100 } it("returns a NonZeroInt if the passed Int is leser than 0") { NonZeroInt.fromOrElse(-1, NonZeroInt(42)).value shouldBe -1 NonZeroInt.fromOrElse(-99, NonZeroInt(42)).value shouldBe -99 } it("returns a given default if the passed Int is 0") { NonZeroInt.fromOrElse(0, NonZeroInt(42)).value shouldBe 42 } } it("should offer MaxValue and MinValue factory methods") { NonZeroInt.MaxValue shouldEqual NonZeroInt.from(Int.MaxValue).get // SKIP-DOTTY-START // not a literal NonZeroInt.MinValue shouldEqual NonZeroInt(Int.MinValue) // SKIP-DOTTY-END } it("should be sortable") { val xs = List(NonZeroInt(2), NonZeroInt(4), NonZeroInt(1), NonZeroInt(3)) xs.sorted shouldEqual List(NonZeroInt(1), NonZeroInt(2), NonZeroInt(3), NonZeroInt(4)) } describe("when created with apply method") { it("should compile when 8 is passed in") { "NonZeroInt(8)" should compile NonZeroInt(8).value shouldEqual 8 } it("should not compile when 0 is passed in") { "NonZeroInt(0)" shouldNot compile } it("should compile when -8 is passed in") { "NonZeroInt(-8)" should compile NonZeroInt(-8).value shouldEqual -8 } it("should not compile when x is passed in") { val x: Int = -8 "NonZeroInt(x)" shouldNot compile } } describe("when specified as a plain-old Int") { def takesNonZeroInt(non0: NonZeroInt): Int = non0.value it("should compile when 8 is passed in") { "takesNonZeroInt(8)" should compile takesNonZeroInt(8) shouldEqual 8 } it("should not compile when 0 is passed in") { "takesNonZeroInt(0)" shouldNot compile } it("should compile when -8 is passed in") { "takesNonZeroInt(-8)" should compile takesNonZeroInt(-8) shouldEqual -8 } it("should not compile when x is passed in") { val x: Int = -8 "takesNonZeroInt(x)" shouldNot compile } } it("should offer a unary ~ method that is consistent with Int") { forAll { (nzint: NonZeroInt) => (~nzint) shouldEqual (~(nzint.toInt)) } } it("should offer << methods that are consistent with Int") { forAll { (nzint: NonZeroInt, shift: Int) => nzint << shift shouldEqual nzint.toInt << shift } forAll { (nzint: NonZeroInt, shift: Long) => nzint << shift shouldEqual nzint.toInt << shift } } it("should offer >>> methods that are consistent with Int") { forAll { (nzint: NonZeroInt, shift: Int) => nzint >>> shift shouldEqual nzint.toInt >>> shift } forAll { (nzint: NonZeroInt, shift: Long) => nzint >>> shift shouldEqual nzint.toInt >>> shift } } it("should offer >> methods that are consistent with Int") { forAll { (nzint: NonZeroInt, shift: Int) => nzint >> shift shouldEqual nzint.toInt >> shift } forAll { (nzint: NonZeroInt, shift: Long) => nzint >> shift shouldEqual nzint.toInt >> shift } } it("should offer a '|' method that is consistent with Int") { forAll { (nzint: NonZeroInt, byte: Byte) => (nzint | byte) shouldEqual (nzint.toInt | byte) } forAll { (nzint: NonZeroInt, short: Short) => (nzint | short) shouldEqual (nzint.toInt | short) } forAll { (nzint: NonZeroInt, char: Char) => (nzint | char) shouldEqual (nzint.toInt | char) } forAll { (nzint: NonZeroInt, int: Int) => (nzint | int) shouldEqual (nzint.toInt | int) } forAll { (nzint: NonZeroInt, long: Long) => (nzint | long) shouldEqual (nzint.toInt | long) } } it("should offer an '&' method that is consistent with Int") { forAll { (nzint: NonZeroInt, byte: Byte) => (nzint & byte) shouldEqual (nzint.toInt & byte) } forAll { (nzint: NonZeroInt, short: Short) => (nzint & short) shouldEqual (nzint.toInt & short) } forAll { (nzint: NonZeroInt, char: Char) => (nzint & char) shouldEqual (nzint.toInt & char) } forAll { (nzint: NonZeroInt, int: Int) => (nzint & int) shouldEqual (nzint.toInt & int) } forAll { (nzint: NonZeroInt, long: Long) => (nzint & long) shouldEqual (nzint.toInt & long) } } it("should offer an '^' method that is consistent with Int") { forAll { (nzint: NonZeroInt, byte: Byte) => (nzint ^ byte) shouldEqual (nzint.toInt ^ byte) } forAll { (nzint: NonZeroInt, char: Char) => (nzint ^ char) shouldEqual (nzint.toInt ^ char) } forAll { (nzint: NonZeroInt, short: Short) => (nzint ^ short) shouldEqual (nzint.toInt ^ short) } forAll { (nzint: NonZeroInt, int: Int) => (nzint ^ int) shouldEqual (nzint.toInt ^ int) } forAll { (nzint: NonZeroInt, long: Long) => (nzint ^ long) shouldEqual (nzint.toInt ^ long) } } it("should offer a unary + method that is consistent with Int") { forAll { (p: NonZeroInt) => (+p).toInt shouldEqual (+(p.toInt)) } } it("should offer a unary - method that returns NonZeroInt") { forAll { (p: NonZeroInt) => (-p) shouldEqual (NonZeroInt.ensuringValid(-(p.toInt))) } } it("should offer 'min' and 'max' methods that are consistent with Int") { forAll { (nzint1: NonZeroInt, nzint2: NonZeroInt) => nzint1.max(nzint2).toInt shouldEqual nzint1.toInt.max(nzint2.toInt) nzint1.min(nzint2).toInt shouldEqual nzint1.toInt.min(nzint2.toInt) } } it("should offer a 'toBinaryString' method that is consistent with Int") { forAll { (nzint: NonZeroInt) => nzint.toBinaryString shouldEqual nzint.toInt.toBinaryString } } it("should offer a 'toHexString' method that is consistent with Int") { forAll { (nzint: NonZeroInt) => nzint.toHexString shouldEqual nzint.toInt.toHexString } } it("should offer a 'toOctalString' method that is consistent with Int") { forAll { (nzint: NonZeroInt) => nzint.toOctalString shouldEqual nzint.toInt.toOctalString } } it("should offer 'to' and 'until' methods that are consistent with Int") { forAll { (nzint: NonZeroInt, end: Int, step: Int) => // The reason we need this is that in Scala 2.10, the equals check (used by shouldEqual below) will call range.length // and it'll cause IllegalArgumentException to be thrown when we do the Try(x) shouldEqual Try(y) assertion below, // while starting from scala 2.11 the equals call implementation does not call .length. // To make the behavior consistent for all scala versions, we explicitly call .length for all returned Range, and // shall it throws IllegalArgumentException, it will be wrapped as Failure for the Try. def ensuringValid(range: Range): Range = { range.length // IllegalArgumentException will be thrown if it is an invalid range, this will turn the Success to Failure for Try range } Try(ensuringValid(nzint.to(end))) shouldEqual Try(ensuringValid(nzint.toInt.to(end))) Try(ensuringValid(nzint.to(end, step))) shouldEqual Try(ensuringValid(nzint.toInt.to(end, step))) Try(ensuringValid(nzint.until(end))) shouldEqual Try(ensuringValid(nzint.toInt.until(end))) Try(ensuringValid(nzint.until(end, step))) shouldEqual Try(ensuringValid(nzint.toInt.until(end, step))) } } it("should offer an ensuringValid method that takes an Int => Int, throwing AssertionError if the result is invalid") { NonZeroInt(33).ensuringValid(_ + 1) shouldEqual NonZeroInt(34) an [AssertionError] should be thrownBy { NonZeroInt(-1).ensuringValid(_ + 1) } } it("should provide a Ordering that works for both negative and positive values") { NonZeroInt(-1924396667) should be <= NonZeroInt(1081481977) } } }
dotty-staging/scalatest
scalactic-test/src/test/scala/org/scalactic/anyvals/NonZeroIntSpec.scala
Scala
apache-2.0
14,828
package org.apache.spark.streaming.talos.util object Utils { def md5(s: String): String = { val m = java.security.MessageDigest.getInstance("MD5") val b = s.getBytes("UTF-8") m.update(b, 0, b.length) new java.math.BigInteger(1, m.digest()).toString(16) } }
XiaoMi/galaxy-sdk-java
galaxy-talos-client/galaxy-talos-spark/src/main/scala/org/apache/spark/streaming/talos/util/Utils.scala
Scala
apache-2.0
280
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ly.stealth.mesos.kafka import java.util import scala.util.parsing.json.{JSONArray, JSONObject} import scala.collection.JavaConversions._ import scala.collection.mutable import scala.collection.mutable.ListBuffer import java.util.Collections import java.io.{FileWriter, File} import org.I0Itec.zkclient.ZkClient import kafka.utils.ZKStringSerializer import org.I0Itec.zkclient.exception.ZkNodeExistsException import net.elodina.mesos.util.Version class Cluster { private val brokers: util.List[Broker] = new util.concurrent.CopyOnWriteArrayList[Broker]() private[kafka] var rebalancer: Rebalancer = new Rebalancer() private[kafka] var topics: Topics = new Topics() private[kafka] var frameworkId: String = null def getBrokers:util.List[Broker] = Collections.unmodifiableList(brokers) def getBroker(id: String): Broker = { for (broker <- brokers) if (broker.id == id) return broker null } def addBroker(broker: Broker): Broker = { brokers.add(broker) broker } def removeBroker(broker: Broker): Unit = brokers.remove(broker) def clear(): Unit = brokers.clear() def load() = Cluster.storage.load(this) def save() = Cluster.storage.save(this) def fromJson(root: Map[String, Object]): Unit = { if (root.contains("brokers")) { for (brokerNode <- root("brokers").asInstanceOf[List[Map[String, Object]]]) { val broker: Broker = new Broker() broker.fromJson(brokerNode) brokers.add(broker) } } if (root.contains("frameworkId")) frameworkId = root("frameworkId").asInstanceOf[String] } def toJson: JSONObject = { val obj = new mutable.LinkedHashMap[String, Object]() obj("version") = "" + Scheduler.version if (!brokers.isEmpty) { val brokerNodes = new ListBuffer[JSONObject]() for (broker <- brokers) brokerNodes.add(broker.toJson(false)) obj("brokers") = new JSONArray(brokerNodes.toList) } if (frameworkId != null) obj("frameworkId") = frameworkId new JSONObject(obj.toMap) } } object Cluster { var storage: Storage = newStorage(Config.storage) def newStorage(s: String): Storage = { if (s.startsWith("file:")) return new FsStorage(new File(s.substring("file:".length))) else if (s.startsWith("zk:")) return new ZkStorage(s.substring("zk:".length)) throw new IllegalStateException("Unsupported storage " + s) } abstract class Storage { def load(cluster: Cluster): Unit = { val json: String = loadJson if (json == null) return var node: Map[String, Object] = Util.parseJson(json) val fromVersion: Version = new Version(if (node.contains("version")) node("version").asInstanceOf[String] else "0.9.5.0") node = Migration.apply(fromVersion, Scheduler.version, node) cluster.brokers.clear() cluster.fromJson(node) save(cluster) } def save(cluster: Cluster): Unit = { saveJson("" + cluster.toJson) } protected def loadJson: String protected def saveJson(json: String): Unit } class FsStorage(val file: File) extends Storage { protected def loadJson: String = { if (!file.exists) return null val source = scala.io.Source.fromFile(file) try source.mkString finally source.close() } protected def saveJson(json: String): Unit = { val writer = new FileWriter(file) try { writer.write(json) } finally { writer.close() } } } object FsStorage { val DEFAULT_FILE: File = new File("kafka-mesos.json") } class ZkStorage(val path: String) extends Storage { createChrootIfRequired() val zkClient = new ZkClient(Config.zk, 30000, 30000, ZKStringSerializer) private def createChrootIfRequired(): Unit = { val slashIdx: Int = Config.zk.indexOf('/') if (slashIdx == -1) return val chroot = Config.zk.substring(slashIdx) val zkConnect = Config.zk.substring(0, slashIdx) val client = new ZkClient(zkConnect, 30000, 30000, ZKStringSerializer) try { client.createPersistent(chroot, true) } finally { client.close() } } protected def loadJson: String = { zkClient.readData(path, true).asInstanceOf[String] } protected def saveJson(json: String): Unit = { if (zkClient.exists(path)) { zkClient.writeData(path, json) } else { try { zkClient.createPersistent(path, json) } catch { case e: ZkNodeExistsException => zkClient.writeData(path, json) } } } } }
vidhyaarvind/kafka-mesos
src/scala/ly/stealth/mesos/kafka/Cluster.scala
Scala
apache-2.0
5,328
package de.fosd.typechef.crefactor.backend.codeselection import de.fosd.typechef.crefactor.Morpheus import de.fosd.typechef.crefactor.frontend.util.CodeSelection import de.fosd.typechef.parser.c._ import java.util.Collections import scala.collection.JavaConversions._ import scala.Some import de.fosd.typechef.crefactor.backend.RefactorException object CExtractSelection extends ASTSelection { def getSelectedElements(morpheus: Morpheus, selection: CodeSelection): List[AST] = { val ids = filterASTElementsForFile[Id]( filterASTElems[Id](morpheus.getTranslationUnit).par.filter(x => isPartOfSelection(x, selection)).toList, selection.getFilePath) // this function tries to find the greatest statement of a selection: for example: // if (1) { // i++; // } // in case the whole if statement is selected we don't want to add the i++ statement to our selection list, // as it is already part of the if statement def exploitStatement(stmt: Statement): Statement = { try { parentAST(stmt, morpheus.getASTEnv) match { case null => throw new RefactorException("No proper selection for extract function.") case _: FunctionDef => stmt case _: NestedFunctionDef => stmt case p => if (isElementOfSelection(p, selection)) { exploitStatement(p.asInstanceOf[Statement]) } else stmt } } catch { case _: Throwable => stmt } } // TODO @ajanker: I don't get the purpose of this function? // if an control statement was hit - we look if the afterwards, possible embedded stmts are also selected. def lookupControlStatements(stmt: Statement): Statement = { nextAST(stmt, morpheus.getASTEnv) match { case ns @ ( ContinueStatement() | BreakStatement() | CaseStatement(_) | GotoStatement(_) | ReturnStatement(_)) => if (isElementOfSelection(ns, selection)) ns.asInstanceOf[Statement] else stmt case _ => stmt } } val uniqueSelectedStatements = Collections.newSetFromMap[Statement](new java.util.IdentityHashMap()) val uniqueSelectedExpressions = Collections.newSetFromMap[Expr](new java.util.IdentityHashMap()) ids.foreach(id => { val parent = findPriorASTElem[Statement](id, morpheus.getASTEnv) parent match { case Some(stmt) => stmt.setPositionRange(id) uniqueSelectedStatements.add(stmt) uniqueSelectedStatements.add(lookupControlStatements(stmt)) case None => // logger.info("There may have been an expression!") } }) val selectedElements = { if (!uniqueSelectedStatements.isEmpty) { val parents = uniqueSelectedStatements.toList uniqueSelectedStatements.clear() parents.foreach(statement => { val exploitedStatement = exploitStatement(statement) uniqueSelectedStatements.add(exploitedStatement) }) uniqueSelectedStatements } else uniqueSelectedExpressions } val selected = selectedElements.toList.sortWith(comparePosition) logger.info("ExtractFuncSelection: " + selected) selected } def getAvailableIdentifiers(morpheus: Morpheus, selection: CodeSelection): List[Id] = getSelectedElements(morpheus, selection).isEmpty match { case true => null case false => List[Id]() // returns a empty list to signalize a valid selection was found } }
joliebig/Morpheus
src/main/scala/de/fosd/typechef/crefactor/backend/codeselection/CExtractSelection.scala
Scala
lgpl-3.0
4,009
/******************************************************************************* * Copyright (c) 2019. Carl Minden * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. ******************************************************************************/ package com.anathema_roguelike package entities.characters.inventory import com.anathema_roguelike.entities.items.weapons.Weapon import com.anathema_roguelike.entities.items.weapons.natural_weapons.Unarmed import com.anathema_roguelike.entities.characters.Character class PrimaryWeapon(character: Character) extends SingleSlot[Weapon](character) { override protected def getDefaultItem = new Unarmed() }
carlminden/anathema-roguelike
src/com/anathema_roguelike/entities/characters/inventory/PrimaryWeapon.scala
Scala
gpl-3.0
1,254
package com.github.vooolll.services import akka.http.scaladsl.model.StatusCodes import com.github.vooolll.base.{AsyncResourceSpec, TestUrls} class AsyncRequestServiceSpec extends AsyncResourceSpec { val asyncRequest = AsyncRequest() "Should send GET request" in { val responseContext = asyncRequest(TestUrls.appTokenUri) responseContext.cleanResources() responseContext.response.map(_.status shouldBe StatusCodes.OK) } "Should send POST request" in { val responseContext = asyncRequest.post(TestUrls.appTokenUri, Map("test" -> "value")) responseContext.cleanResources() responseContext.response.map(_.status shouldBe StatusCodes.OK) } }
vooolll/facebook4s
src/test/scala/com/github/vooolll/services/AsyncRequestServiceSpec.scala
Scala
apache-2.0
682
package lila.video import play.api.libs.ws.StandaloneWSClient import play.api.Mode import com.softwaremill.macwire._ import io.methvin.play.autoconfig._ import play.api.Configuration import scala.concurrent.duration._ import lila.common.config._ @Module private class VideoConfig( @ConfigName("collection.video") val videoColl: CollName, @ConfigName("collection.view") val viewColl: CollName, @ConfigName("sheet.url") val sheetUrl: String, @ConfigName("sheet.delay") val sheetDelay: FiniteDuration, @ConfigName("youtube.url") val youtubeUrl: String, @ConfigName("youtube.api_key") val youtubeApiKey: Secret, @ConfigName("youtube.max") val youtubeMax: Max, @ConfigName("youtube.delay") val youtubeDelay: FiniteDuration ) final class Env( appConfig: Configuration, ws: StandaloneWSClient, scheduler: akka.actor.Scheduler, db: lila.db.Db, cacheApi: lila.memo.CacheApi, mode: Mode )(implicit ec: scala.concurrent.ExecutionContext) { private val config = appConfig.get[VideoConfig]("video")(AutoConfig.loader) lazy val api = new VideoApi( cacheApi = cacheApi, videoColl = db(config.videoColl), viewColl = db(config.viewColl) ) private lazy val sheet = new VideoSheet(ws, config.sheetUrl, api) private lazy val youtube = new Youtube( ws = ws, url = config.youtubeUrl, apiKey = config.youtubeApiKey, max = config.youtubeMax, api = api ) def cli = new lila.common.Cli { def process = { case "video" :: "sheet" :: Nil => sheet.fetchAll map { nb => s"Processed $nb videos" } } } if (mode == Mode.Prod) { scheduler.scheduleWithFixedDelay(config.sheetDelay, config.sheetDelay) { () => sheet.fetchAll.logFailure(logger).unit } scheduler.scheduleWithFixedDelay(config.youtubeDelay, config.youtubeDelay) { () => youtube.updateAll.logFailure(logger).unit } } }
luanlv/lila
modules/video/src/main/Env.scala
Scala
mit
1,918
package org.scalamu.plugin import org.scalamu.common.filtering.{InverseRegexFilter, NameFilter} import org.scalamu.plugin.fixtures.IsolatedScalamuCompilerFixture import org.scalamu.plugin.mutators.controllflow.{NegateConditionals, NeverExecuteConditionals, ReplaceCaseWithWildcard} import org.scalamu.plugin.mutators.methodcalls.ReplaceWithIdentityFunction import org.scalamu.plugin.testutil.MutationTestRunner class ScalamuPluginSpec extends MutationTestRunner with IsolatedScalamuCompilerFixture { override val guard: MutationGuard = FqnGuard( s"${ScalamuPluginConfig.mutationGuardPrefix}.FooGuard.enabledMutation" ) override val mutators: Seq[Mutator] = ScalamuPluginConfig.allMutators override val sanitizeTrees: Boolean = true override val verifyTrees: Boolean = true override val filter: NameFilter = InverseRegexFilter(".*ignored.*".r) private val guards = s""" |package ${ScalamuPluginConfig.mutationGuardPrefix} | |object FooGuard { | def enabledMutation: Int = 1 |} """.stripMargin "ScalamuPlugin" should "insert all possible mutants" in withScalamuCompiler { (global, reporter) => val code = """ |object Foo { | val xs = List(1, 2, 3) | val length = xs match { | case _ if xs.isEmpty => 0 | case named @ _ => -10 | } | | if (xs.forall(_ + 1 > 0)) { | println("length = " + length) | } | | val x = 123 | val y = 456d | | val z = (x * y + x / y) - -y | | (1 until 100 by 10).map(x => List(x)) | | type Maybe[T] = Option[T] | def toOption[T](x: T): Maybe[T] = Option(x) | | val t = true | val f = t && false | | def ignored(): Unit = println("I am ignored") | | (xs.toSet | Set(4)).foreach(x => ignored()) |} """.stripMargin compile(NamedSnippet("Guards.scala", guards))(global) val mutantsInfo = mutantsFor(NamedSnippet("Foo.scala", code))(global, reporter) mutantsInfo should have size 41 } it should "ignore macro bodies" in withScalamuCompiler { (global, reporter) => val code = """ |object Macro { | | import scala.language.experimental.macros | import scala.reflect.macros.blackbox | | def test: Int = macro testImpl | | def testImpl(c: blackbox.Context): c.Tree = { | import c.universe._ | val a = 10 | val expr = if (a >= 1) a * -100 else -a | q"$expr" | } |} """.stripMargin val mutantsInfo = mutantsFor(NamedSnippet("Macro.scala", code))(global, reporter) mutantsInfo shouldBe empty } it should "ignore macro expansions" in withScalamuCompiler { (global, reporter) => val macroDef = """ |object Macro { | | import scala.language.experimental.macros | import scala.reflect.macros.blackbox | | def test(cond: Boolean): Int = macro testImpl | | def testImpl(c: blackbox.Context)(cond: c.Expr[Boolean]): c.Tree = { | import c.universe._ | q"123 + (if ($cond) -10 else 71)" | } |} """.stripMargin val code = """ |object Foo { | import Macro._ | val a = test(1 > 10) |} """.stripMargin compile( NamedSnippet("Macro.scala", macroDef), NamedSnippet("Guards.scala", guards) )(global) val mutantsInfo = mutantsFor(NamedSnippet("Foo.scala", code))(global, reporter) mutantsInfo shouldBe empty } it should "work when arrays are involved" in withPluginConfig { cfg => withScalamuCompiler(Seq(NegateConditionals), cfg) { (global, _) => val code = """ |object Foo { | val a = 10 | | def foo(): Unit = { | val lengths = Array(10) | if (lengths.apply(10) == 10) { | ??? | } | } |} """.stripMargin compile( NamedSnippet("Guards.scala", guards), NamedSnippet("Foo.scala", code) )(global) } } it should "test case where NeverExecuteConditionals fail on LambdaLift" in withPluginConfig { cfg => withScalamuCompiler(Seq(NeverExecuteConditionals), cfg) { (global, _) => val code = """ |object Foo { | type Occurrence = (Int, Int) | private case class Stacked(idx1: Int, idx2: Int, next: Option[Stacked]) { | lazy val chain: List[(Int, Int)] = ??? | } | | val l: List[(Occurrence, Int)] = ??? | if (l.length > 0) { | Nil | } else { | def sort(l: List[(Occurrence, Int)]): List[List[Stacked]] = | l.foldLeft(List[List[Stacked]]()) { | case (acc, ((_, idx1), idx2)) => acc | } | Nil | } |} """.stripMargin compile( NamedSnippet("Guards.scala", guards), NamedSnippet("Foo.scala", code) )(global) } } it should "correctly work when splicing original tree changes its owner" in withPluginConfig { cfg => withScalamuCompiler(Seq(ReplaceWithIdentityFunction), cfg) { (global, _) => val code = """ |object Foo { | Some(1).filter { v => val f = v; v > 0 }.filter { v => val f = v; v > 0 }.filter { v => val f = v; v > 0 } |} """.stripMargin compile( NamedSnippet("Guards.scala", guards), NamedSnippet("Foo.scala", code) )(global) } } it should "not fail when using GuardedMutant.unapply" in withScalamuCompiler { (global, _) => val code = """ |object Foo { | val x = 1 | if (1 == x) { | println(123) | } |} """.stripMargin compile( NamedSnippet("Guards.scala", guards), NamedSnippet("Foo.scala", code) )(global) } it should "not mutate case patterns" in withScalamuCompiler { (global, _) => val code = """ |object Foo { | val x = 123 | | object Bar { def unapply(b: Int): Option[Boolean] = Some(true) } | | val y = x match { | case -1 => 1 | case _ => 2 | } |} """.stripMargin compile( NamedSnippet("Guards.scala", guards), NamedSnippet("Foo.scala", code) )(global) } it should "work with anonymous classes" in withPluginConfig { cfg => withScalamuCompiler(Seq(ReplaceCaseWithWildcard), cfg) { (global, r) => val code = """ |object Foo { | val x = 123 | | x match { | case 0 => ??? | case _ => val v = new Runnable { def run(): Unit = println(123) } | } |} """.stripMargin compile( NamedSnippet("Guards.scala", guards), NamedSnippet("Foo.scala", code) )(global) } } }
sugakandrey/scalamu
scalac-plugin/src/test/scala/org/scalamu/plugin/ScalamuPluginSpec.scala
Scala
gpl-3.0
7,197
package demo import java.util.concurrent.Executors import doobie.imports._ import io.circe._ import io.circe.generic.auto._ import io.circe.java8.time._ import io.circe.parser._ import io.circe.syntax._ import org.http4s._ import org.http4s.circe._ import org.http4s.dsl._ import scalaz.concurrent.Task import scalaz.stream.Process object DemoService { val dbExecutor: java.util.concurrent.ExecutorService = Executors.newFixedThreadPool(64) implicit def circeJsonDecoder[A](implicit decoder: Decoder[A]) = org.http4s.circe.jsonOf[A] implicit def circeJsonEncoder[A](implicit encoder: Encoder[A]) = org.http4s.circe.jsonEncoderOf[A] def service(xa: Transactor[Task]) = HttpService { case GET -> Root / "stream" => Ok(PersonDAO.streamPeople.transact(xa).map(p => p.id + "\n")) case GET -> Root / "people" => Ok(PersonDAO.listPeople.transact(xa)) case GET -> Root / "people" / IntVar(id) => for { person <- PersonDAO.getPerson(id).transact(xa) result <- person.fold(NotFound())(Ok(_)) } yield result case req @ PUT -> Root / "people" / IntVar(id) => req.decode[PersonForm] { form => Ok(PersonDAO.updatePerson(id, form.firstName, form.familyName).transact(xa)) } case req @ POST -> Root / "people" => req.decode[PersonForm] { form => Ok(PersonDAO.insertPerson(form.firstName, form.familyName).transact(xa)) } }.mapK(Task.fork(_)(dbExecutor)) }
fiadliel/http4s-talk
src/main/scala/DemoService.scala
Scala
mit
1,459
package com.scalaAsm.x86 package Instructions package General // Description: Conditional Move - less/not greater (SF!=OF) // Category: general/datamov trait CMOVL extends InstructionDefinition { val mnemonic = "CMOVL" } object CMOVL extends TwoOperands[CMOVL] with CMOVLImpl trait CMOVLImpl extends CMOVL { implicit object _0 extends TwoOp[r16, rm16] { val opcode: TwoOpcodes = (0x0F, 0x4C) /r val format = RegRmFormat } implicit object _1 extends TwoOp[r32, rm32] { val opcode: TwoOpcodes = (0x0F, 0x4C) /r val format = RegRmFormat } implicit object _2 extends TwoOp[r64, rm64] { val opcode: TwoOpcodes = (0x0F, 0x4C) /r override def prefix = REX.W(true) val format = RegRmFormat } }
bdwashbu/scala-x86-inst
src/main/scala/com/scalaAsm/x86/Instructions/General/CMOVL.scala
Scala
apache-2.0
734
package org.cloudio.morpheus.dci.gambling.whist4.uses import org.morpheus._ import org.morpheus.Morpheus._ import org.cloudio.morpheus.dci.gambling.whist4.data._ import org.cloudio.morpheus.dci.gambling.objects._ import scala.util.Random /** * * Created by zslajchrt on 23/06/15. */ // objective roles @fragment trait Player { protected var holding: List[Card] = Nil protected var tricks = List.empty[List[Card]] def trickCount = tricks.length } @fragment trait PlayerFaceForRound { this: Person with Player with PlayerScene with PlayerLogic => def reset(): Unit = { holding = Nil tricks = Nil } def addTrick(trick: List[Card]): Unit = { tricks ::= trick } def playCard(trick: List[Card]): Card = { val maybeLastTrick: Option[List[Card]] = for (ltw <- lastTrickWinner) yield ltw.showLastTrick var selectedCard = selectCard(maybeLastTrick) // remove the selected card from the deck removeCard(selectedCard) // if the selected card is not the highest in the trick then // ask the partner to swap the selected card for the partner's best corresponding one. //val ord: CardOrdering = roundScene.cardOrd val ord: CardOrdering = CardOrdering if (!trick.forall(card => ord.compare(card, selectedCard) > 0)) { selectedCard = partner.swapCards(selectedCard) match { case None => selectedCard case Some(swapped) => swapped } } selectedCard } private def removeCard(card: Card): Unit = { val selectedCardIndex = holding.indexOf(card) val splitCards = holding.splitAt(selectedCardIndex) holding = splitCards._1 ::: splitCards._2.tail } def won(): Unit = { addWin() } def lost(): Unit = { addLoss() } } @dimension trait PlayerLogic { def selectCard(lastTrick: Option[List[Card]]): Card } @fragment trait RobotPlayerLogic extends PlayerLogic { this: Player => override def selectCard(lastTrick: Option[List[Card]]): Card = { // todo holding.head } } @fragment trait LivePlayerLogic extends PlayerLogic { this: Player => override def selectCard(lastTrick: Option[List[Card]]): Card = { // todo holding.head } } @fragment trait Dealer { this: DealerScene => def deal(cards: List[Card]): Unit = { for (cardNum <- 0 until cards.size; cardHolder = cardHolders(cardNum % 4); card = cards(cardNum)) { cardHolder.receiveCard(card) } } } @fragment trait CardReceiver { this: Player => def receiveCard(card: Card): Unit = { holding ::= card } } @fragment trait LastTrickWinner { this: Player => def showLastTrick: List[Card] = { tricks.head } } // subjective roles @fragment trait PartnerFace { this: Player with PlayerScene => def swapCards(card: Card): Option[Card] = { //val ord: CardOrdering = roundScene.cardOrd val ord: CardOrdering = CardOrdering holding.find(myCard => { ord.compare(card, myCard) > 0 }) match { case None => None case c => c } } } @fragment trait OpponentFace { } @fragment trait Round { this: RoundScene => def start(): Either[Int, Int] = { for (p <- players) { p.reset() } val cards1 = shuffle(Card.deck) //cards1 = dealer.nextPlayer.shuffle(cards1) // opt. //cards1 = dealer.shuffle(cards1) // opt. let the dealer's partner shuffle the second deck of cards //cards2 = cardHolders((cardHolders.indexOf(dealer) + 2) % 4).shuffle(cards2) dealer.deal(cards1) for (trickNum <- 0 to 12) { var trick = List.empty[Card] for (playerNum <- trickNum to (trickNum + 3); playerNumMod = playerNum % 4) { trick ::= validatePlayedCard(players(playerNumMod).playCard(trick)) } val winnerNum = (trickNum + trick.indexOf(trick.max(new CardOrdering(trumpSuite)))) % 4 val winner = players(winnerNum) winner.addTrick(trick) } val leftScore = players(0).trickCount + players(2).trickCount val rightScore = players(1).trickCount + players(3).trickCount if (leftScore > rightScore) { players(0).won() players(2).won() players(1).lost() players(3).lost() Left(leftScore - 6) } else { players(0).lost() players(2).lost() players(1).won() players(3).won() Right(rightScore - 6) } //Left(0) } private def validatePlayedCard(card: Card) = { // todo: card } private def shuffle(deck: List[Card]): List[Card] = { val rnd = new Random // a possible bug in Scala/Java7: this does not work under jdk1.7.0_80 //deck.sortBy(c => rnd.nextInt()) deck.map(c => (c, rnd.nextInt())).sortWith(_._2 < _._2).map(_._1) } } trait PlayerScene { def roundScene: RoundScene def partner: Player with PartnerFace def leftOpponent: Player with OpponentFace def rightOpponent: Player with OpponentFace def lastTrickWinner: Option[Player with LastTrickWinner] } trait DealerScene { def cardHolders: IndexedSeq[Player with CardReceiver] } trait RoundScene { def dealer: Player with PlayerFaceForRound with Dealer def players: IndexedSeq[Player with PlayerFaceForRound] val trumpSuite: Int lazy val cardOrd = new CardOrdering(trumpSuite) } //object RoundContext { // // type PlayerMorphType = Person with Player with PlayerScene with // (LivePlayerLogic or RobotPlayerLogic) with // (OpponentFace or PartnerFace) with // \?[Dealer with DealerScene] with // \?[CardReceiver] with // \?[LastTrickWinner] with // \?[PlayerFaceForRound] // val playerMorphModel = parse[PlayerMorphType](true) // // // def main(args: Array[String]): Unit = { // val r = round() // val ts = System.currentTimeMillis() // for (i <- 0 until 100) { // //while (true) { // //val r = round() // val res = r.start() // //println(res) // } // val ts2 = System.currentTimeMillis() // println(s"Finished in ${ts2 - ts} msec") // } // // def round() = { // val group = (0 to 3).map(i => new DefaultPerson(s"Player$i", Score(0, 0))) // new RoundContext(group, 0, None, 0) // //new RoundContextTest(group, 0, None, 0) // } //} // //class RoundContext(group: IndexedSeq[Person], dealerNum: Int, maybeLastTrickWinnerNum: Option[Int], gameTrumpSuite: Int) { // // import RoundContext._ // // private def newPlayerKernel(playerNum: Int): playerMorphModel.Kernel = { // implicit val personFrag = external[Person](group(playerNum)) // implicit val playerSceneFrag = external[PlayerScene](new PlayerSceneImpl(playerNum)) // implicit val dealerSceneFrag = external[DealerScene](new DealerSceneImpl) // // singleton(playerMorphModel, rootStrategy(playerMorphModel)) // } // // val playerKernels = { // (0 until group.size).map(newPlayerKernel) // } // // object RoundScene extends RoundScene { // lazy val dealer = asMorphOf[Player with PlayerFaceForRound with Dealer](playerKernels(dealerNum)) // // lazy val players = playerKernels.map(asMorphOf[Player with PlayerFaceForRound](_)) // // override val trumpSuite: Int = gameTrumpSuite // } // // class PlayerSceneImpl(playerNum: Int) extends PlayerScene { // val roundScene = RoundScene // lazy val partner = asMorphOf[Player with PartnerFace](playerKernels(playerNum)) // lazy val leftOpponent = asMorphOf[Player with OpponentFace](playerKernels((playerNum + 1) % 4)) // lazy val rightOpponent = asMorphOf[Player with OpponentFace](playerKernels((playerNum + 2) % 4)) // lazy val lastTrickWinner = maybeLastTrickWinnerNum match { // case None => None // case Some(lastTrickWinnerNum) => Some(asMorphOf[Player with LastTrickWinner](playerKernels(lastTrickWinnerNum))) // } // val trumpSuite: Int = 0 // todo // } // // class DealerSceneImpl extends DealerScene { // lazy val cardHolders = playerKernels.map(asMorphOf[Player with CardReceiver](_)) // } // // val round = { // implicit val roundSceneFrag = external[RoundScene](RoundScene) // singleton[Round with RoundScene].! // } // // def start(): Either[Int, Int] = { // round.start() // } // //}
zslajchrt/morpheus-tutor
src/main/scala/org/cloudio/morpheus/dci/gambling/whist4/uses/Whist.scala
Scala
apache-2.0
8,133
def map[B](f: A => B): Either[E, B] = this match { case Right(a) => Right(f(a)) case Left(e) => Left(e) } def flatMap[EE >: E, B](f: A => Either[EE, B]): Either[EE, B] = this match { case Left(e) => Left(e) case Right(a) => f(a) } def orElse[EE >: E, AA >: A](b: => Either[EE, AA]): Either[EE, AA] = this match { case Left(_) => b case Right(a) => Right(a) } def map2[EE >: E, B, C](b: Either[EE, B])(f: (A, B) => C): Either[EE, C] = for { a <- this; b1 <- b } yield f(a,b1)
ud3sh/coursework
functional-programming-in-scala-textbook/answerkey/errorhandling/06.answer.scala
Scala
unlicense
517
package com.kakao.cuesheet.deps import java.io.{BufferedOutputStream, File, FileOutputStream, IOException} import java.net.{URL, URLDecoder} import java.nio.file.{Files, Paths} import java.util.zip.{ZipEntry, ZipOutputStream} import com.kakao.mango.io.FileSystems import com.kakao.mango.logging.Logging import com.kakao.shaded.guava.io.Files.createTempDir sealed trait DependencyNode { def path: String } case class ManagedDependency(group: String, artifact: String, classifier: String = "jar") case class ManagedDependencyNode( path: String, group: String, artifact: String, classifier: String, version: String, children: Seq[ManagedDependency] ) extends DependencyNode { def key = ManagedDependency(group, artifact, classifier) } case class DirectoryDependencyNode(path: String) extends DependencyNode with Logging { lazy val compressed: UnmanagedDependencyNode = { val tmpdir = createTempDir() val jar = new File(s"${tmpdir.getAbsolutePath}/local-${tmpdir.getName}.jar") val root = Paths.get(path) val output = new ZipOutputStream(new BufferedOutputStream(new FileOutputStream(jar))) var count = 0 FileSystems.entries(root).foreach { path => if (resourceExtensions.exists(path.toString.endsWith)) { val entry = new ZipEntry(root.relativize(path).toString) output.putNextEntry(entry) try { Files.copy(path, output) count += 1 } catch { case e: IOException => logger.warn(s"skipping $path due to an IOException: ${e.getMessage}") } output.closeEntry() } } output.close() logger.debug(s"Successfully zipped $count files in $path into $jar") UnmanagedDependencyNode(jar.getAbsolutePath) } } case class JavaRuntimeDependencyNode(path: String) extends DependencyNode case class UnmanagedDependencyNode(path: String) extends DependencyNode object DependencyNode { val resolver = new ChainedArtifactResolver( new IvyPathArtifactResolver, new IvyOriginalPathArtifactResolver, new MavenPathArtifactResolver, new GradlePathArtifactResolver, new JavaRuntimeResolver, new MavenMetadataArtifactResolver, new UnmanagedJarResolver ) def resolve(url: URL): DependencyNode = { if (url.getProtocol != "file") { throw new IllegalArgumentException("non-file dependency is not supported") } val path = URLDecoder.decode(url.getFile, "UTF-8") val file = new File(path) if (file.isDirectory) { return DirectoryDependencyNode(file.getAbsolutePath) } if (!file.isFile || !file.canRead) { throw new IllegalArgumentException(s"$path is not a file or readable") } DependencyNode.resolver.resolve(file.getAbsolutePath) match { case Some(node) => node case None => throw new IllegalArgumentException(s"Could not determine the dependency of $path") } } }
kakao/cuesheet
src/main/scala/com/kakao/cuesheet/deps/DependencyNode.scala
Scala
apache-2.0
2,901
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package kafka.producer import kafka.metrics.KafkaMetricsGroup import kafka.common.ClientIdAndTopic import kafka.utils.{Pool, threadsafe} import java.util.concurrent.TimeUnit @threadsafe class ProducerTopicMetrics(metricId: ClientIdAndTopic) extends KafkaMetricsGroup { val messageRate = newMeter(metricId + "MessagesPerSec", "messages", TimeUnit.SECONDS) val byteRate = newMeter(metricId + "BytesPerSec", "bytes", TimeUnit.SECONDS) val droppedMessageRate = newMeter(metricId + "DroppedMessagesPerSec", "drops", TimeUnit.SECONDS) } /** * Tracks metrics for each topic the given producer client has produced data to. * @param clientId The clientId of the given producer client. */ class ProducerTopicStats(clientId: String) { private val valueFactory = (k: ClientIdAndTopic) => new ProducerTopicMetrics(k) private val stats = new Pool[ClientIdAndTopic, ProducerTopicMetrics](Some(valueFactory)) private val allTopicsStats = new ProducerTopicMetrics(new ClientIdAndTopic(clientId, "AllTopics")) // to differentiate from a topic named AllTopics def getProducerAllTopicsStats(): ProducerTopicMetrics = allTopicsStats def getProducerTopicStats(topic: String): ProducerTopicMetrics = { stats.getAndMaybePut(new ClientIdAndTopic(clientId, topic + "-")) } } /** * Stores the topic stats information of each producer client in a (clientId -> ProducerTopicStats) map. */ object ProducerTopicStatsRegistry { private val valueFactory = (k: String) => new ProducerTopicStats(k) private val globalStats = new Pool[String, ProducerTopicStats](Some(valueFactory)) def getProducerTopicStats(clientId: String) = { globalStats.getAndMaybePut(clientId) } }
unix1986/universe
tool/kafka-0.8.1.1-src/core/src/main/scala/kafka/producer/ProducerTopicStats.scala
Scala
bsd-2-clause
2,485
package com.github.ldaniels528.trifecta.messages import com.github.ldaniels528.trifecta.TxConfig /** * Provides implementing classes with the capability of displaying binary messages */ trait BinaryMessaging { /** * Displays the contents of the given message * @param offset the offset of the given message * @param message the given message * @return the size of the message in bytes */ def dumpMessage(offset: Long, message: Array[Byte])(implicit config: TxConfig) { // determine the widths for each section: bytes & characters val columns = config.columns val byteWidth = config.columns * 3 val charWidth = config.columns + 1 // display the message var index = 0 val length1 = Math.max(4, 1 + Math.log10(offset).toInt) val length2 = Math.max(3, 1 + Math.log10(message.length).toInt) val myFormat = s"[%0${length1}d:%0${length2}d] %-${byteWidth}s| %-${charWidth}s|" message.sliding(columns, columns) foreach { bytes => config.out.println(myFormat.format(offset, index, asHexString(bytes), asChars(bytes))) index += columns } } /** * Displays the contents of the given message * @param message the given message * @return the size of the message in bytes */ def dumpMessage(message: Array[Byte])(implicit config: TxConfig) { // determine the widths for each section: bytes & characters val columns = config.columns val byteWidth = config.columns * 3 val charWidth = config.columns + 1 // display the message var offset = 0 val length = Math.max(3, 1 + Math.log10(message.length).toInt) val myFormat = s"[%0${length}d] %-${byteWidth}s| %-${charWidth}s|" message.sliding(columns, columns) foreach { bytes => config.out.println(myFormat.format(offset, asHexString(bytes), asChars(bytes))) offset += columns } } /** * Converts the given hexadecimal string into a byte array * @param hex the given hexadecimal string (e.g. "51002b2d84aebf0342cfb659") * @return the byte array */ protected def hexToBytes(hex: String): Array[Byte] = { (hex.sliding(2, 2) map (Integer.parseInt(_, 16).toByte)).toArray } /** * Returns the ASCII array as a character string * @param bytes the byte array * @return a character string representing the given byte array */ protected def asChars(bytes: Array[Byte]): String = { String.valueOf(bytes map (b => if (b >= 32 && b <= 126) b.toChar else '.')) } /** * Returns the byte array as a hex string * @param bytes the byte array * @return a hex string representing the given byte array */ protected def asHexString(bytes: Array[Byte]): String = { bytes map ("%02x".format(_)) mkString "." } }
ldaniels528/trifecta
src/main/scala/com/github/ldaniels528/trifecta/messages/BinaryMessaging.scala
Scala
apache-2.0
2,741
package com.twitter.finagle.netty4.ssl.server import com.twitter.finagle.netty4.ssl.Netty4SslConfigurations import com.twitter.finagle.ssl.{ ApplicationProtocols, Engine, KeyCredentials, SslConfigurationException } import com.twitter.finagle.ssl.server.{SslServerConfiguration, SslServerEngineFactory} import com.twitter.util.security.{PrivateKeyFile, X509CertificateFile} import io.netty.buffer.ByteBufAllocator import io.netty.handler.ssl.{SslContext, SslContextBuilder} import io.netty.handler.ssl.ApplicationProtocolConfig.Protocol /** * Convenience functions for setting values on a Netty `SslContextBuilder` * which are applicable to server configurations and engines. */ private[finagle] object Netty4ServerSslConfigurations { /** * Configures the application protocols of the `SslContextBuilder`. This * method mutates the `SslContextBuilder`, and returns it as the result. * * @note This sets which application level protocol negotiation to * use NPN and ALPN. * * @note This also sets the `SelectorFailureBehavior` to NO_ADVERTISE, * and the `SelectedListenerFailureBehavior` to ACCEPT as those are the * only modes supported by both JDK and Native engines. */ private def configureServerApplicationProtocols( builder: SslContextBuilder, applicationProtocols: ApplicationProtocols ): SslContextBuilder = Netty4SslConfigurations.configureApplicationProtocols( builder, applicationProtocols, Protocol.NPN_AND_ALPN) /** * Creates an `SslContextBuilder` for a server with the supplied `KeyCredentials`. * * @note `KeyCredentials` must be specified, using `Unspecified` is not supported. * @note An `SslConfigurationException` will be thrown if there is an issue loading * the certificate(s) or private key. */ private def startServerWithKey(keyCredentials: KeyCredentials): SslContextBuilder = { val builder = keyCredentials match { case KeyCredentials.Unspecified => throw SslConfigurationException.notSupported( "KeyCredentials.Unspecified", "Netty4ServerEngineFactory" ) case KeyCredentials.CertAndKey(certFile, keyFile) => for { key <- new PrivateKeyFile(keyFile).readPrivateKey() cert <- new X509CertificateFile(certFile).readX509Certificate() } yield SslContextBuilder.forServer(key, cert) case KeyCredentials.CertKeyAndChain(certFile, keyFile, chainFile) => for { key <- new PrivateKeyFile(keyFile).readPrivateKey() cert <- new X509CertificateFile(certFile).readX509Certificate() chain <- new X509CertificateFile(chainFile).readX509Certificates() } yield SslContextBuilder.forServer(key, cert +: chain: _*) } Netty4SslConfigurations.unwrapTryContextBuilder(builder) } /** * Creates an `SslContext` based on the supplied `SslServerConfiguration`. This method uses * the `KeyCredentials`, `TrustCredentials`, and `ApplicationProtocols` from the provided * configuration, and forces the JDK provider if forceJdk is true. */ def createServerContext(config: SslServerConfiguration, forceJdk: Boolean): SslContext = { val builder = startServerWithKey(config.keyCredentials) val withProvider = Netty4SslConfigurations.configureProvider(builder, forceJdk) val withTrust = Netty4SslConfigurations.configureTrust(withProvider, config.trustCredentials) val withAppProtocols = Netty4ServerSslConfigurations.configureServerApplicationProtocols( withTrust, config.applicationProtocols) withAppProtocols.build() } /** * Creates an `Engine` based on the supplied `SslContext` and `ByteBufAllocator`, and then * configures the underlying `SSLEngine` based on the supplied `SslServerConfiguration`. */ def createServerEngine( config: SslServerConfiguration, context: SslContext, allocator: ByteBufAllocator ): Engine = { val engine = new Engine(context.newEngine(allocator)) SslServerEngineFactory.configureEngine(engine, config) engine } }
mkhq/finagle
finagle-netty4/src/main/scala/com/twitter/finagle/netty4/ssl/server/Netty4ServerSslConfigurations.scala
Scala
apache-2.0
4,072
package org.jetbrains.plugins.scala.lang.resolve2 class Scala29Test extends ResolveTestBase { override def folderPath: String = { super.folderPath + "scala29/" } def testSCL2913 = doTest def testSCL3212 = doTest }
triggerNZ/intellij-scala
test/org/jetbrains/plugins/scala/lang/resolve2/Scala29Test.scala
Scala
apache-2.0
229
package com.tribbloids.spookystuff.dsl import com.tribbloids.spookystuff.SpookyEnvFixture import com.tribbloids.spookystuff.utils.locality.PartitionIdPassthrough import com.tribbloids.spookystuff.utils.serialization.NOTSerializable import org.apache.spark.{HashPartitioner, TaskContext} import org.apache.spark.rdd.RDD import scala.util.Random /** * Created by peng on 20/12/16. */ class GenPartitionerSuite extends SpookyEnvFixture { import com.tribbloids.spookystuff.utils.SpookyViews._ it("DocCacheAware can co-partition 2 RDDs") { val numPartitions = Random.nextInt(80) + 9 val gp = GenPartitioners .DocCacheAware(_ => new HashPartitioner(numPartitions)) .getInstance[Int](defaultSchema) val beaconOpt = gp.createBeaconRDD(sc.emptyRDD[Int]) // val beacon = sc.makeRDD(1 to 1000, 1000).map(v => v -> v*v) // val tlStrs = sc.allExecutorCoreLocationStrs // val size = tlStrs.length val srcRDD: RDD[(Int, String)] = sc .parallelize( { (1 to 1000).map { v => v -> v.toString } }, numPartitions + 5 ) .persist() val ref1 = srcRDD.shufflePartitions.persist() ref1.count() val ref2 = srcRDD.shufflePartitions.persist() ref2.count() // ref1.mapPartitions(i => Iterator(i.toList)).collect().foreach(println) // ref2.mapPartitions(i => Iterator(i.toList)).collect().foreach(println) val zipped1 = ref1 .map(_._2) .zipPartitions(ref2.map(_._2))( (i1, i2) => Iterator(i1.toSet == i2.toSet) ) .collect() assert(zipped1.length > zipped1.count(identity)) assert(zipped1.count(identity) < 2) val grouped1 = gp.groupByKey(ref1, beaconOpt).flatMap(_._2) val grouped2 = gp.groupByKey(ref2, beaconOpt).flatMap(_._2) val zipped2RDD = grouped1.zipPartitions(grouped2)( (i1, i2) => Iterator(i1.toSet == i2.toSet) ) val zipped2 = zipped2RDD.collect() assert(zipped2.length == zipped2.count(identity)) assert(zipped2RDD.partitions.length == numPartitions) } }
tribbloid/spookystuff
core/src/test/scala/com/tribbloids/spookystuff/dsl/GenPartitionerSuite.scala
Scala
apache-2.0
2,094
package hclu.hreg.dao import java.util.UUID import com.typesafe.scalalogging.LazyLogging import hclu.hreg.test.{DocTestHelpers, FlatSpecWithSql} import org.joda.time.{DateTime, DateTimeZone} import org.scalatest.Matchers import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.Future import scala.language.implicitConversions class DocDaoSpec extends FlatSpecWithSql with LazyLogging with Matchers with DocTestHelpers { behavior of "DocDao" val createdOn = new DateTime(2015, 6, 3, 13, 25, 3, DateTimeZone.UTC) var dao: DocDao = new DocDao(sqlDatabase) def generateRandomId = UUID.randomUUID() lazy val randomIds: List[UUID] = List.fill(3)(generateRandomId) override def beforeAll() { super.beforeAll() for (i <- 1 to randomIds.size) { dao.add(newDoc(randomIds(i - 1), "foo", "bar", createdOn), { id => Future.successful() }).futureValue } } it should "add new doc" in { // Given val id = UUID.randomUUID() // When val newRegId = dao.add(newDoc(id, "foo", "bar", createdOn), { id => Future.successful() }).futureValue //should be (4) // Then newRegId should be(10003) dao.findById(id).futureValue should be('defined) } it should "find doc by regId" in { // Given val ids = 10000 until 10000 + randomIds.size ids foreach { id => dao.findByRegId(id).futureValue should be('defined) } } }
tsechov/hclu-registry
backend/src/test/scala/hclu/hreg/dao/DocDaoSpec.scala
Scala
apache-2.0
1,418
package org.jetbrains.plugins.scala.codeInsight.intention.types import com.intellij.codeInsight.completion.{InsertHandler, InsertionContext} import com.intellij.codeInsight.lookup._ import com.intellij.codeInsight.template.impl.TemplateManagerImpl import com.intellij.codeInsight.template.{Expression, ExpressionContext, Result, TextResult} import com.intellij.openapi.util.TextRange import com.intellij.psi.PsiDocumentManager import com.intellij.psi.impl.source.tree.injected.InjectedLanguageUtil import org.jetbrains.plugins.scala.lang.psi.TypeAdjuster import org.jetbrains.plugins.scala.lang.psi.types.api.ScTypeText import org.jetbrains.plugins.scala.lang.refactoring.util.ScalaRefactoringUtil /** * Author: Svyatoslav Ilinskiy * Date: 22.12.15. */ abstract class ChooseValueExpression[T](lookupItems: Seq[T], defaultItem: T) extends Expression { def lookupString(elem: T): String def result(element: T): String val lookupElements: Array[LookupElement] = calcLookupElements().toArray def calcLookupElements(): Seq[LookupElementBuilder] = lookupItems.map { elem => LookupElementBuilder.create(elem, lookupString(elem)).withInsertHandler(new InsertHandler[LookupElement] { override def handleInsert(context: InsertionContext, item: LookupElement): Unit = { val topLevelEditor = InjectedLanguageUtil.getTopLevelEditor(context.getEditor) val templateState = TemplateManagerImpl.getTemplateState(topLevelEditor) if (templateState != null) { val range = templateState.getCurrentVariableRange if (range != null) { //need to insert with FQNs val newText = result(item.getObject.asInstanceOf[T]) val document = topLevelEditor.getDocument val startOffset = range.getStartOffset document.replaceString(startOffset, range.getEndOffset, newText) val file = context.getFile PsiDocumentManager.getInstance(file.getProject).commitDocument(document) val newRange = TextRange.create(startOffset, startOffset + newText.length) val elem = ScalaRefactoringUtil.commonParent(file, newRange) TypeAdjuster.markToAdjust(elem) } } } }) } override def calculateResult(context: ExpressionContext): Result = new TextResult(lookupString(defaultItem)) override def calculateLookupItems(context: ExpressionContext): Array[LookupElement] = if (lookupElements.length > 1) lookupElements else null override def calculateQuickResult(context: ExpressionContext): Result = calculateResult(context) } class ChooseTypeTextExpression(lookupItems: Seq[ScTypeText], default: ScTypeText) extends ChooseValueExpression[ScTypeText](lookupItems, default) { def this(lookupItems: Seq[ScTypeText]) { this(lookupItems, lookupItems.head) } override def lookupString(elem: ScTypeText): String = { val useCanonicalText: Boolean = lookupItems.count(_.presentableText == elem.presentableText) > 1 if (useCanonicalText) elem.canonicalText.replace("_root_.", "") else elem.presentableText } override def calcLookupElements(): Seq[LookupElementBuilder] = { super.calcLookupElements().map { le => val text = le.getObject.asInstanceOf[ScTypeText] //if we use canonical text we still want to be able to search search by presentable text le.withLookupString(text.presentableText) } } override def result(element: ScTypeText): String = element.canonicalText }
ilinum/intellij-scala
src/org/jetbrains/plugins/scala/codeInsight/intention/types/ChooseValueExpression.scala
Scala
apache-2.0
3,505
package models.audit import com.vividsolutions.jts.geom.{Coordinate, LineString} import java.sql.Timestamp import java.util.{UUID, Calendar, Date} import models.street.{StreetEdgeAssignmentCountTable, StreetEdge, StreetEdgeTable} import models.user.User import models.utils.MyPostgresDriver import models.utils.MyPostgresDriver.simple._ import models.daos.slick.DBTableDefinitions.{UserTable, DBUser} import play.api.libs.json._ import play.api.Play.current import play.extras.geojson import scala.slick.lifted.ForeignKeyQuery import scala.slick.jdbc.{StaticQuery => Q, GetResult} import scala.util.Random case class AuditTask(auditTaskId: Int, amtAssignmentId: Option[Int], userId: String, streetEdgeId: Int, taskStart: Timestamp, taskEnd: Option[Timestamp]) case class NewTask(edgeId: Int, geom: LineString, x1: Float, y1: Float, x2: Float, y2: Float, taskStart: Timestamp, completed: Boolean) { def toJSON: JsObject = { val coordinates: Array[Coordinate] = geom.getCoordinates val latlngs: List[geojson.LatLng] = coordinates.map(coord => geojson.LatLng(coord.y, coord.x)).toList val linestring: geojson.LineString[geojson.LatLng] = geojson.LineString(latlngs) val properties = Json.obj( "street_edge_id" -> edgeId, "x1" -> x1, "y1" -> y1, "x2" -> x2, "y2" -> y2, "task_start" -> taskStart.toString, "completed" -> completed ) val feature = Json.obj("type" -> "Feature", "geometry" -> linestring, "properties" -> properties) Json.obj("type" -> "FeatureCollection", "features" -> List(feature)) } } /** * */ class AuditTaskTable(tag: Tag) extends Table[AuditTask](tag, Some("sidewalk"), "audit_task") { def auditTaskId = column[Int]("audit_task_id", O.PrimaryKey, O.AutoInc) def amtAssignmentId = column[Option[Int]]("amt_assignment_id", O.Nullable) def userId = column[String]("user_id", O.NotNull) def streetEdgeId = column[Int]("street_edge_id", O.NotNull) def taskStart = column[Timestamp]("task_start", O.NotNull) def taskEnd = column[Option[Timestamp]]("task_end", O.Nullable) def * = (auditTaskId, amtAssignmentId, userId, streetEdgeId, taskStart, taskEnd) <> ((AuditTask.apply _).tupled, AuditTask.unapply) def streetEdge: ForeignKeyQuery[StreetEdgeTable, StreetEdge] = foreignKey("audit_task_street_edge_id_fkey", streetEdgeId, TableQuery[StreetEdgeTable])(_.streetEdgeId) def user: ForeignKeyQuery[UserTable, DBUser] = foreignKey("audit_task_user_id_fkey", userId, TableQuery[UserTable])(_.userId) } /** * Data access object for the audit_task table */ object AuditTaskTable { import MyPostgresDriver.plainImplicits._ implicit val auditTaskConverter = GetResult[AuditTask](r => { AuditTask(r.nextInt, r.nextIntOption, r.nextString, r.nextInt, r.nextTimestamp, r.nextTimestampOption) }) // case class NewTask(edgeId: Int, geom: LineString, x1: Float, y1: Float, x2: Float, y2: Float, taskStart: Timestamp, completed: Boolean) implicit val newTaskConverter = GetResult[NewTask](r => { val edgeId = r.nextInt val geom = r.nextGeometry[LineString] val x1 = r.nextFloat val y1 = r.nextFloat val x2 = r.nextFloat val y2 = r.nextFloat val taskStart = r.nextTimestamp val completed = r.nextIntOption.isDefined NewTask(edgeId, geom, x1, y1, x2, y2, taskStart, completed) }) val db = play.api.db.slick.DB val assignmentCount = TableQuery[StreetEdgeAssignmentCountTable] val auditTasks = TableQuery[AuditTaskTable] val streetEdges = TableQuery[StreetEdgeTable] val users = TableQuery[UserTable] case class AuditCountPerDay(date: String, count: Int) /** * This method returns all the tasks * * @return */ def all: List[AuditTask] = db.withSession { implicit session => auditTasks.list } /** * This method returns the size of the entire table * * @return */ def size: Int = db.withSession { implicit session => auditTasks.list.size } /** * Get the last audit task that the user conducted * * @param userId User id * @return */ def lastAuditTask(userId: UUID): Option[AuditTask] = db.withSession { implicit session => auditTasks.filter(_.userId === userId.toString).list.lastOption } /** * Return audited street edges * * @return */ def auditedStreets: List[StreetEdge] = db.withSession { implicit session => val _streetEdges = (for { (_auditTasks, _streetEdges) <- auditTasks.innerJoin(streetEdges).on(_.streetEdgeId === _.streetEdgeId) } yield _streetEdges).filter(edge => edge.deleted === false) _streetEdges.list.groupBy(_.streetEdgeId).map(_._2.head).toList // Filter out the duplicated street edge } /** * Return street edges audited by the given user * * @param userId User Id * @return */ def auditedStreets(userId: UUID): List[StreetEdge] = db.withSession { implicit session => val _streetEdges = (for { (_auditTasks, _streetEdges) <- auditTasks.innerJoin(streetEdges).on(_.streetEdgeId === _.streetEdgeId) if _auditTasks.userId === userId.toString } yield _streetEdges).filter(edge => edge.deleted === false) _streetEdges.list } def auditCounts: List[AuditCountPerDay] = db.withSession { implicit session => val selectAuditCountQuery = Q.queryNA[(String, Int)]( """SELECT calendar_date::date, COUNT(audit_task_id) FROM (SELECT current_date - (n || ' day')::INTERVAL AS calendar_date |FROM generate_series(0, 30) n) AS calendar |LEFT JOIN sidewalk.audit_task |ON audit_task.task_start::date = calendar_date::date |GROUP BY calendar_date |ORDER BY calendar_date""".stripMargin ) selectAuditCountQuery.list.map(x => AuditCountPerDay.tupled(x)) } /** * Return audit counts for the last 31 days. * * @param userId User id */ def auditCounts(userId: UUID): List[AuditCountPerDay] = db.withSession { implicit session => val selectAuditCountQuery = Q.query[String, (String, Int)]( """SELECT calendar_date::date, COUNT(audit_task_id) FROM (SELECT current_date - (n || ' day')::INTERVAL AS calendar_date |FROM generate_series(0, 30) n) AS calendar |LEFT JOIN sidewalk.audit_task |ON audit_task.task_start::date = calendar_date::date |AND audit_task.user_id = ? |GROUP BY calendar_date |ORDER BY calendar_date""".stripMargin ) selectAuditCountQuery(userId.toString).list.map(x => AuditCountPerDay.tupled(x)) } /** * get a new task for the user * * Reference for creating java.sql.timestamp * http://stackoverflow.com/questions/308683/how-can-i-get-the-current-date-and-time-in-utc-or-gmt-in-java * http://alvinalexander.com/java/java-timestamp-example-current-time-now * * Subqueries in Slick * http://stackoverflow.com/questions/14425844/why-does-slick-generate-a-subquery-when-take-method-is-called * http://stackoverflow.com/questions/14920153/how-to-write-nested-queries-in-select-clause * * @param username User name. Todo. Change it to user id * @return */ def getNewTask(username: String): NewTask = db.withSession { implicit session => val calendar: Calendar = Calendar.getInstance val now: Date = calendar.getTime val currentTimestamp: Timestamp = new Timestamp(now.getTime) val completedTasks = for { u <- users.filter(_.username === username) at <- auditTasks if at.userId === u.userId } yield (u.username.?, at.streetEdgeId.?) val edges = (for { (e, c) <- streetEdges.leftJoin(completedTasks).on(_.streetEdgeId === _._2) if c._1.isEmpty } yield e).filter(edge => edge.deleted === false).take(100).list // Increment the assignment count and return the task val e: StreetEdge = Random.shuffle(edges).head StreetEdgeAssignmentCountTable.incrementAssignment(e.streetEdgeId) NewTask(e.streetEdgeId, e.geom, e.x1, e.y1, e.x2, e.y2, currentTimestamp, completed=false) } /** * Get task without username * * @return */ def getNewTask: NewTask = db.withSession { implicit session => val calendar: Calendar = Calendar.getInstance val now: Date = calendar.getTime val currentTimestamp: Timestamp = new Timestamp(now.getTime) val edges = (for { (_streetEdges, _asgCount) <- streetEdges.innerJoin(assignmentCount) .on(_.streetEdgeId === _.streetEdgeId).sortBy(_._2.completionCount) } yield _streetEdges).filter(edge => edge.deleted === false).take(100).list assert(edges.nonEmpty) val e: StreetEdge = Random.shuffle(edges).head StreetEdgeAssignmentCountTable.incrementAssignment(e.streetEdgeId) NewTask(e.streetEdgeId, e.geom, e.x1, e.y1, e.x2, e.y2, currentTimestamp, completed=false) } /** * Get a new task specified by the street edge id. * * @param streetEdgeId Street edge id * @return */ def getNewTask(streetEdgeId: Int): NewTask = db.withSession { implicit session => val calendar: Calendar = Calendar.getInstance val now: Date = calendar.getTime val currentTimestamp: Timestamp = new Timestamp(now.getTime) val edges = (for { (_streetEdges, _asgCount) <- streetEdges.innerJoin(assignmentCount) .on(_.streetEdgeId === _.streetEdgeId).sortBy(_._2.completionCount) } yield _streetEdges).filter(edge => edge.deleted === false && edge.streetEdgeId === streetEdgeId).list assert(edges.nonEmpty) val e: StreetEdge = edges.head StreetEdgeAssignmentCountTable.incrementAssignment(e.streetEdgeId) NewTask(e.streetEdgeId, e.geom, e.x1, e.y1, e.x2, e.y2, currentTimestamp, completed=false) } /** * Get a task that is connected to the end point of the current task (street edge) * * @param streetEdgeId Street edge id */ def getConnectedTask(streetEdgeId: Int, lat: Float, lng: Float): NewTask = db.withSession { implicit session => import models.street.StreetEdgeTable.streetEdgeConverter // For plain query val calendar: Calendar = Calendar.getInstance val now: Date = calendar.getTime val currentTimestamp: Timestamp = new Timestamp(now.getTime) // Todo: I don't think this query takes into account if the auditor has looked at the area or not. val selectEdgeQuery = Q.query[(Float, Float, Int), StreetEdge]( """SELECT st_e.street_edge_id, st_e.geom, st_e.source, st_e.target, st_e.x1, st_e.y1, st_e.x2, st_e.y2, st_e.way_type, st_e.deleted, st_e.timestamp | FROM sidewalk.street_edge_street_node AS st_e_st_n | INNER JOIN (SELECT st_n.street_node_id FROM sidewalk.street_node AS st_n | ORDER BY st_n.geom <-> st_setsrid(st_makepoint(?, ?), 4326) | LIMIT 1) AS st_n_view | ON st_e_st_n.street_node_id = st_n_view.street_node_id | INNER JOIN sidewalk.street_edge AS st_e | ON st_e_st_n.street_edge_id = st_e.street_edge_id | INNER JOIN sidewalk.street_edge_assignment_count AS st_e_asg | ON st_e.street_edge_id = st_e_asg.street_edge_id | WHERE NOT st_e_st_n.street_edge_id = ? | ORDER BY st_e_asg.completion_count ASC""".stripMargin ) val edges: List[StreetEdge] = selectEdgeQuery((lng, lat, streetEdgeId)).list edges match { case edges if edges.nonEmpty => val e = edges.head StreetEdgeAssignmentCountTable.incrementAssignment(e.streetEdgeId) NewTask(e.streetEdgeId, e.geom, e.x1, e.y1, e.x2, e.y2, currentTimestamp, completed=false) case _ => getNewTask // The list is empty for whatever the reason } } /** * Get a task that is in a given region * * @param regionId region id * @return */ def getNewTaskInRegion(regionId: Int): NewTask = db.withSession { implicit session => import models.street.StreetEdgeTable.streetEdgeConverter val calendar: Calendar = Calendar.getInstance val now: Date = calendar.getTime val currentTimestamp: Timestamp = new Timestamp(now.getTime) val selectEdgeQuery = Q.query[Int, StreetEdge]( """SELECT st_e.street_edge_id, st_e.geom, st_e.source, st_e.target, st_e.x1, st_e.y1, st_e.x2, st_e.y2, st_e.way_type, st_e.deleted, st_e.timestamp FROM region |INNER JOIN street_edge AS st_e |ON ST_Intersects(st_e.geom, region.geom) |WHERE st_e.deleted = FALSE AND region.region_id = ?""".stripMargin ) val edges: List[StreetEdge] = selectEdgeQuery(regionId).list edges match { case edges if edges.nonEmpty => // Increment the assignment count and return the task val e: StreetEdge = Random.shuffle(edges).head StreetEdgeAssignmentCountTable.incrementAssignment(e.streetEdgeId) NewTask(e.streetEdgeId, e.geom, e.x1, e.y1, e.x2, e.y2, currentTimestamp, completed=false) case _ => getNewTask // The list is empty for whatever the reason } } /** * et a task that is in a given region * * @param regionId region id * @param user User object. Todo. Change this to user id. * @return */ def getNewTaskInRegion(regionId: Int, user: User) = db.withSession { implicit session => import models.street.StreetEdgeTable.streetEdgeConverter val calendar: Calendar = Calendar.getInstance val now: Date = calendar.getTime val currentTimestamp: Timestamp = new Timestamp(now.getTime) val userId: String = user.userId.toString val selectEdgeQuery = Q.query[(String, Int), StreetEdge]( """SELECT st_e.street_edge_id, st_e.geom, st_e.source, st_e.target, st_e.x1, st_e.y1, st_e.x2, st_e.y2, st_e.way_type, st_e.deleted, st_e.timestamp FROM sidewalk.region | INNER JOIN sidewalk.street_edge AS st_e | ON ST_Intersects(st_e.geom, region.geom) | LEFT JOIN sidewalk.audit_task | ON st_e.street_edge_id = audit_task.street_edge_id AND audit_task.user_id = ? | WHERE st_e.deleted = FALSE AND region.region_id = ? AND audit_task.audit_task_id ISNULL""".stripMargin ) val edges: List[StreetEdge] = selectEdgeQuery((userId, regionId)).list edges match { case edges if edges.nonEmpty => // Increment the assignment count and return the task val e: StreetEdge = Random.shuffle(edges).head StreetEdgeAssignmentCountTable.incrementAssignment(e.streetEdgeId) NewTask(e.streetEdgeId, e.geom, e.x1, e.y1, e.x2, e.y2, currentTimestamp, completed=false) case _ => getNewTask // The list is empty for whatever the reason. Probably the user has audited all the streets in the region } } /** * Get tasks in the region * @param regionId Region id * @return */ def getTasksInRegion(regionId: Int): List[NewTask] = db.withSession { implicit session => val selectTaskQuery = Q.query[Int, NewTask]( """SELECT st_e.street_edge_id, st_e.geom, st_e.x1, st_e.y1, st_e.x2, st_e.y2, st_e.timestamp, NULL as audit_task_id |FROM sidewalk.region |INNER JOIN sidewalk.street_edge AS st_e |ON st_e.geom && region.geom |WHERE region.region_id = ? AND st_e.deleted IS FALSE""".stripMargin ) selectTaskQuery(regionId).list } /** * Get tasks in the region * @param regionId Region id * @param userId User id * @return */ def getTasksInRegion(regionId: Int, userId: UUID): List[NewTask] = db.withSession { implicit session => val calendar: Calendar = Calendar.getInstance val now: Date = calendar.getTime val currentTimestamp: Timestamp = new Timestamp(now.getTime) val selectTaskQuery = Q.query[(String, Int), NewTask]( """SELECT st_e.street_edge_id, st_e.geom, st_e.x1, st_e.y1, st_e.x2, st_e.y2, st_e.timestamp, completed_audit.audit_task_id |FROM sidewalk.region |INNER JOIN sidewalk.street_edge AS st_e |ON st_e.geom && region.geom |LEFT JOIN ( | SELECT street_edge_id, audit_task_id FROM sidewalk.audit_task | WHERE user_id = ? |) AS completed_audit |ON st_e.street_edge_id = completed_audit.street_edge_id |WHERE region.region_id = ? AND st_e.deleted IS FALSE""".stripMargin ) selectTaskQuery((userId.toString, regionId)).list } /** * Verify if there are tasks available for the user in the given region * * @param userId user id */ def isTaskAvailable(userId: UUID, regionId: Int): Boolean = db.withSession { implicit session => val selectAvailableTaskQuery = Q.query[(Int, String), AuditTask]( """SELECT audit_task.* FROM sidewalk.user_current_region |INNER JOIN sidewalk.region |ON user_current_region.region_id = ? |INNER JOIN sidewalk.street_edge |ON region.geom && street_edge.geom |LEFT JOIN sidewalk.audit_task |ON street_edge.street_edge_id = audit_task.street_edge_id |WHERE user_current_region.user_id = ? |AND audit_task.audit_task_id IS NULL """.stripMargin ) val availableTasks = selectAvailableTaskQuery((regionId, userId.toString)).list availableTasks.nonEmpty } /** * Saves a new audit task. * * Reference for rturning the last inserted item's id * http://stackoverflow.com/questions/21894377/returning-autoinc-id-after-insert-in-slick-2-0 * * @param completedTask completed task * @return */ def save(completedTask: AuditTask): Int = db.withTransaction { implicit session => val auditTaskId: Int = (auditTasks returning auditTasks.map(_.auditTaskId)) += completedTask auditTaskId } }
danZzyy/SidewalkWebpage
sidewalk-webpage/app/models/audit/AuditTaskTable.scala
Scala
mit
17,524
/* * Copyright 2011 WorldWide Conferencing, LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.liftweb package mapper import java.util.Locale import common._ import json._ import util._ import Helpers._ /* * This file contains a number of objects that are common to several * of the Mapper specs. By placing them here we reduce code duplication * and get rid of some timing errors found when we moved to SBT for build. * * Derek Chen-Becker, Mar 8, 2011 */ object MapperSpecsModel { // These rules are common to all Mapper specs def snakify(connid: ConnectionIdentifier, name: String): String = { if (connid.jndiName == "snake") { StringHelpers.snakify(name) } else { name.toLowerCase } } MapperRules.columnName = snakify MapperRules.tableName = snakify // Simple name calculator def displayNameCalculator(bm: BaseMapper, l: Locale, name: String) = { val mapperName = bm.dbName val displayName = name match { case "firstName" if l == Locale.getDefault() => "DEFAULT:" + mapperName + "." + name case "firstName" if l == new Locale("xx", "YY") => "xx_YY:" + mapperName + "." + name case _ => name } displayName } MapperRules.displayNameCalculator.default.set(displayNameCalculator _) def setup() { // For now, do nothing. Just force this object to load } def doLog = false private def ignoreLogger(f: => AnyRef): Unit = () def cleanup() { // Snake connection doesn't create FK constraints (put this here to be absolutely sure it gets set before Schemify) MapperRules.createForeignKeys_? = c => { c.jndiName != "snake" } Schemifier.destroyTables_!!(DefaultConnectionIdentifier, if (doLog) Schemifier.infoF _ else ignoreLogger _, SampleTag, SampleModel, Dog, Mixer, Dog2, User, TstItem, Thing) Schemifier.destroyTables_!!(DbProviders.SnakeConnectionIdentifier, if (doLog) Schemifier.infoF _ else ignoreLogger _, SampleTagSnake, SampleModelSnake) Schemifier.schemify(true, if (doLog) Schemifier.infoF _ else ignoreLogger _, DefaultConnectionIdentifier, SampleModel, SampleTag, User, Dog, Mixer, Dog2, TstItem, Thing) Schemifier.schemify(true, if (doLog) Schemifier.infoF _ else ignoreLogger _, DbProviders.SnakeConnectionIdentifier, SampleModelSnake, SampleTagSnake) } } object SampleTag extends SampleTag with LongKeyedMetaMapper[SampleTag] { override def dbAddTable = Full(populate _) private def populate { val samp = SampleModel.findAll() val tags = List("Hello", "Moose", "Frog", "WooHoo", "Sloth", "Meow", "Moof") for (t <- tags; m <- samp) SampleTag.create.tag(t).model(m).save } } class SampleTag extends LongKeyedMapper[SampleTag] with IdPK { def getSingleton = SampleTag // what's the "meta" server object tag extends MappedString(this, 32) object model extends MappedLongForeignKey(this, SampleModel) object extraColumn extends MappedString(this, 32) { override def dbColumnName = "AnExtraColumn" } } object SampleStatus extends Enumeration { val Active, Disabled, Hiatus = Value } object SampleModel extends SampleModel with KeyedMetaMapper[Long, SampleModel] { override def dbAddTable = Full(populate _) def encodeAsJson(in: SampleModel): JsonAST.JObject = encodeAsJSON_!(in) def buildFromJson(json: JsonAST.JObject): SampleModel = decodeFromJSON_!(json, false) private def populate { create.firstName("Elwood").save create.firstName("Madeline").save create.firstName("Archer").status(SampleStatus.Disabled).save create.firstName("NotNull").moose(Full(99L)).save } } class SampleModel extends KeyedMapper[Long, SampleModel] { def getSingleton = SampleModel // what's the "meta" server def primaryKeyField: MappedLongIndex[SampleModel] = id object id extends MappedLongIndex(this) object firstName extends MappedString(this, 32) object moose extends MappedNullableLong(this) object notNull extends MappedString(this, 32) { override def dbNotNull_? = true } object status extends MappedEnum(this, SampleStatus) def encodeAsJson(): JsonAST.JObject = SampleModel.encodeAsJson(this) } object SampleTagSnake extends SampleTagSnake with LongKeyedMetaMapper[SampleTagSnake] { override def dbAddTable = Full(populate _) private def populate { val samp = SampleModelSnake.findAll() val tags = List("Hello", "Moose", "Frog", "WooHoo", "Sloth", "Meow", "Moof") for (t <- tags; m <- samp) SampleTagSnake.create.tag(t).model(m).save } override def dbDefaultConnectionIdentifier = DbProviders.SnakeConnectionIdentifier } class SampleTagSnake extends LongKeyedMapper[SampleTagSnake] with IdPK { def getSingleton = SampleTagSnake // what's the "meta" server object tag extends MappedString(this, 32) object model extends MappedLongForeignKey(this, SampleModelSnake) object extraColumn extends MappedString(this, 32) { override def dbColumnName = "AnExtraColumn" } } object SampleModelSnake extends SampleModelSnake with KeyedMetaMapper[Long, SampleModelSnake] { override def dbAddTable = Full(populate _) def encodeAsJson(in: SampleModelSnake): JsonAST.JObject = encodeAsJSON_!(in) def buildFromJson(json: JsonAST.JObject): SampleModelSnake = decodeFromJSON_!(json, false) private def populate { create.firstName("Elwood").save create.firstName("Madeline").save create.firstName("Archer").save create.firstName("NotNull").moose(Full(99L)).save } override def dbDefaultConnectionIdentifier = DbProviders.SnakeConnectionIdentifier } class SampleModelSnake extends KeyedMapper[Long, SampleModelSnake] { def getSingleton = SampleModelSnake // what's the "meta" server def primaryKeyField = id object id extends MappedLongIndex(this) object firstName extends MappedString(this, 32) object moose extends MappedNullableLong(this) object notNull extends MappedString(this, 32) { override def dbNotNull_? = true } def encodeAsJson(): JsonAST.JObject = SampleModelSnake.encodeAsJson(this) } /** * The singleton that has methods for accessing the database */ object User extends User with MetaMegaProtoUser[User] { override def dbAddTable = Full(populate _) private def populate { create.firstName("Elwood").save create.firstName("Madeline").save create.firstName("Archer").save } override def dbTableName = "users" // define the DB table name override def screenWrap = Full(<lift:surround with="default" at="content"><lift:bind/></lift:surround>) // define the order fields will appear in forms and output override def fieldOrder = List(id, firstName, lastName, email, locale, timezone, password, textArea) // comment this line out to require email validations override def skipEmailValidation = true } /** * An O-R mapped "User" class that includes first name, last name, password and we add a "Personal Essay" to it */ class User extends MegaProtoUser[User] { def getSingleton = User // what's the "meta" server // define an additional field for a personal essay object textArea extends MappedTextarea(this, 2048) { override def textareaRows = 10 override def textareaCols = 50 override def displayName = "Personal Essay" } } class Dog extends LongKeyedMapper[Dog] with IdPK { def getSingleton = Dog object name extends MappedPoliteString(this, 128) object weight extends MappedInt(this) object owner extends MappedLongForeignKey(this, User) object price extends MappedDecimal(this, new java.math.MathContext(7), 2) } object Dog extends Dog with LongKeyedMetaMapper[Dog] { override def dbAddTable = Full(populate _) private def populate { create.name("Elwood").save create.name("Madeline").save create.name("Archer").save create.name("fido").owner(User.find(By(User.firstName, "Elwood"))).save } def who(in: Dog): Box[User] = in.owner } class Mixer extends LongKeyedMapper[Mixer] with IdPK { def getSingleton = Mixer object name extends MappedPoliteString(this, 128) { override def dbColumnName = "NaM_E" override def defaultValue = "wrong" } object weight extends MappedInt(this) { override def dbColumnName = "WEIGHT" override def defaultValue = -99 } } object Mixer extends Mixer with LongKeyedMetaMapper[Mixer] { override def dbAddTable = Full(populate _) override def dbTableName = "MIXME_UP" private def populate { create.name("Elwood").weight(33).save create.name("Madeline").weight(44).save create.name("Archer").weight(105).save } } object Thing extends Thing with KeyedMetaMapper[String, Thing] { override def dbTableName = "things" import java.util.UUID override def beforeCreate = List((thing: Thing) => { thing.thing_id(UUID.randomUUID().toString()) }) } class Thing extends KeyedMapper[String, Thing] { def getSingleton = Thing def primaryKeyField = thing_id object thing_id extends MappedStringIndex(this, 36) { override def writePermission_? = true override def dbAutogenerated_? = false override def dbNotNull_? = true } object name extends MappedString(this, 64) } /** * Test class to see if you can have a non-autogenerated primary key * Issue 552 */ class TstItem extends LongKeyedMapper[TstItem] { def getSingleton = TstItem def primaryKeyField = tmdbId object tmdbId extends MappedLongIndex(this) { override def writePermission_? = true override def dbAutogenerated_? = false } object name extends MappedText(this) } object TstItem extends TstItem with LongKeyedMetaMapper[TstItem] class Dog2 extends LongKeyedMapper[Dog2] with CreatedUpdated { def getSingleton = Dog2 override def primaryKeyField = dog2id object dog2id extends MappedLongIndex[Dog2](this.asInstanceOf[MapperType]) { override def dbColumnName = "DOG2_Id" } object name extends MappedPoliteString(this, 128) object weight extends MappedInt(this) object owner extends MappedLongForeignKey(this, User) object actualAge extends MappedInt(this) { override def dbColumnName = "ACTUAL_AGE" override def defaultValue = 1 override def dbIndexed_? = true } object isDog extends MappedBoolean(this) { override def dbColumnName = "is_a_dog" override def defaultValue = false override def dbIndexed_? = true } object createdTime extends MappedDateTime(this) { override def dbColumnName = "CreatedTime" override def defaultValue = new _root_.java.util.Date() override def dbIndexed_? = true } } object Dog2 extends Dog2 with LongKeyedMetaMapper[Dog2] { override def dbTableName = "DOG2" override def dbAddTable = Full(populate _) private def populate { create.name("Elwood").actualAge(66).save create.name("Madeline").save create.name("Archer").save create.name("fido").owner(User.find(By(User.firstName, "Elwood"))).isDog(true).save create.name("toto").owner(User.find(By(User.firstName, "Archer"))).actualAge(3).isDog(true) .createdTime(Dog2.getRefDate).save } // Get new instance of fixed point-in-time reference date def getRefDate: _root_.java.util.Date = { new _root_.java.util.Date(1257089309453L) } }
lzpfmh/framework-2
persistence/mapper/src/test/scala/net/liftweb/mapper/MapperSpecsModel.scala
Scala
apache-2.0
11,828
/* * Copyright 2019 HM Revenue & Customs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package uk.gov.hmrc.play.views.helpers import play.twirl.api.Html trait FieldType case class InputType(inputType: String, key: String, value: String, divClass: Option[String] = None, labelClass: Option[String] = None, inputClass: Option[String] = None, dataAttribute: Option[String] = None, label:Option[String] = None) extends FieldType object RadioButton { def apply(key: String, value: String, divClass: Option[String] = None, labelClass: Option[String] = None, inputClass: Option[String] = None, dataAttribute: Option[String] = None) = { InputType("radio", key, value, divClass, labelClass, inputClass, dataAttribute) } } object InputText { def apply(fieldLabel: String, divClass: Option[String] = None, labelClass: Option[String] = None, inputClass: Option[String] = None, label:Option[String] = None) = { InputType("text", "", fieldLabel, divClass, labelClass, inputClass, label) } } case class Select(values: Seq[(String, String)], emptyValueText: Option[String], label: String, labelClass: Option[String] = None, groupClass: Option[String] = None, selectClass: Option[String] = None, additionalTitleText: Option[String] = None) extends FieldType case class DateControl(yearRange: Range, extraClass: Option[String] = None) extends FieldType case class FormField(field: play.api.data.Field, inputs: Seq[FieldType], explanationText: Option[Html] = None)
nicf82/play-ui
src/main/twirl/uk/gov/hmrc/play/views/helpers/model.scala
Scala
apache-2.0
1,990
package com.karasiq.shadowcloud.actors.internal import scala.collection.mutable import scala.concurrent.Future import akka.event.Logging import akka.pattern.ask import com.karasiq.shadowcloud.ShadowCloudExtension import com.karasiq.shadowcloud.actors.{RegionIndex, StorageIndex} import com.karasiq.shadowcloud.actors.events.RegionEvents import com.karasiq.shadowcloud.actors.RegionIndex.WriteDiff import com.karasiq.shadowcloud.index.{ChunkIndex, FolderIndex} import com.karasiq.shadowcloud.index.diffs.IndexDiff import com.karasiq.shadowcloud.model._ import com.karasiq.shadowcloud.model.utils.{FileAvailability, IndexScope, SyncReport} import com.karasiq.shadowcloud.storage.replication.StorageSelector import com.karasiq.shadowcloud.storage.replication.RegionStorageProvider.RegionStorage import com.karasiq.shadowcloud.storage.utils.IndexMerger import com.karasiq.shadowcloud.storage.utils.IndexMerger.RegionKey import com.karasiq.shadowcloud.utils.Utils private[actors] object RegionIndexTracker { def apply(regionId: RegionId, chunksTracker: ChunksTracker) (implicit sc: ShadowCloudExtension): RegionIndexTracker = { new RegionIndexTracker(regionId, chunksTracker) } } private[actors] final class RegionIndexTracker(regionId: RegionId, chunksTracker: ChunksTracker) (implicit sc: ShadowCloudExtension) { import sc.implicits.{defaultTimeout, executionContext} // ----------------------------------------------------------------------- // Context // ----------------------------------------------------------------------- private[this] val log = Logging(sc.implicits.actorSystem, s"$regionId-index") val globalIndex = IndexMerger.region() // ----------------------------------------------------------------------- // Storages // ----------------------------------------------------------------------- object storages { object state { def extractDiffs(storageId: StorageId): Seq[(SequenceNr, IndexDiff)] = { val diffs = for ((RegionKey(_, `storageId`, sequenceNr), diff) ← globalIndex.diffs) yield (sequenceNr, diff) diffs.toVector } def extractIndex(storageId: StorageId): IndexMerger[SequenceNr] = { IndexMerger.restore(IndexMerger.State(extractDiffs(storageId))) } def addStorageDiffs(storageId: StorageId, diffs: Seq[(SequenceNr, IndexDiff)]): Unit = { // dropStorageDiffs(storageId, diffs.map(_._1).toSet) diffs.foreach { case (sequenceNr, diff) ⇒ val regionKey = RegionKey(diff.time, storageId, sequenceNr) globalIndex.add(regionKey, diff) chunksTracker.storages.state.registerDiff(storageId, diff.chunks) log.debug("Virtual region [{}] index updated: {} -> {}", regionId, regionKey, diff) sc.eventStreams.publishRegionEvent(regionId, RegionEvents.IndexUpdated(regionKey, diff)) } } def addStorageDiff(storageId: StorageId, sequenceNr: SequenceNr, diff: IndexDiff) = { addStorageDiffs(storageId, Seq((sequenceNr, diff))) } def dropStorageDiffs(storageId: StorageId, sequenceNrs: Set[SequenceNr]): Unit = { val preDel = globalIndex.chunks val regionKeys = globalIndex.diffs.keys .filter(rk ⇒ rk.storageId == storageId && sequenceNrs.contains(rk.sequenceNr)) .toSet globalIndex.delete(regionKeys) val deleted = globalIndex.chunks.diff(preDel).deletedChunks deleted.foreach(chunksTracker.storages.state.unregisterChunk(storageId, _)) sc.eventStreams.publishRegionEvent(regionId, RegionEvents.IndexDeleted(regionKeys)) } def dropStorageDiffs(storageId: StorageId): Unit = { globalIndex.delete(globalIndex.diffs.keys.filter(_.storageId == storageId).toSet) } } object io { def synchronize(storage: RegionStorage): Future[SyncReport] = { val future = (storage.dispatcher ? StorageIndex.Envelope(regionId, RegionIndex.Synchronize))(sc.config.timeouts.synchronize) RegionIndex.Synchronize.unwrapFuture(future) } def getIndex(storage: RegionStorage): Future[IndexMerger.State[SequenceNr]] = { RegionIndex.GetIndex.unwrapFuture(storage.dispatcher ? StorageIndex.Envelope(regionId, RegionIndex.GetIndex)) } def writeIndex(storage: RegionStorage, diff: IndexDiff): Unit = { log.debug("Writing index to {}: {}", storage.id, diff) storage.dispatcher ! StorageIndex.Envelope(regionId, WriteDiff(diff)) } def writeIndex(diff: IndexDiff)(implicit storageSelector: StorageSelector): Seq[RegionStorage] = { log.debug("Writing region index diff: {}", diff) val storages = storageSelector.forIndexWrite(diff) if (storages.isEmpty) { log.warning("No index storages available on {}", regionId) } else { if (log.isDebugEnabled) { log.debug("Writing to virtual region [{}] index: {} (storages = [{}])", regionId, diff, Utils.printValues(storages.map(_.id))) } storages.foreach(_.dispatcher ! StorageIndex.Envelope(regionId, WriteDiff(diff))) } storages } } } // ----------------------------------------------------------------------- // Local index operations // ----------------------------------------------------------------------- object indexes { private[this] val indexScopeCache = mutable.WeakHashMap.empty[IndexScope, IndexMerger[RegionKey]] def chunks(scope: IndexScope = IndexScope.default): ChunkIndex = { val index = this.withScope(scope) index.chunks.patch(index.pending.chunks) } def folders(scope: IndexScope = IndexScope.default): FolderIndex = { val index = this.withScope(scope) index.folders.patch(index.pending.folders) } def withScope(scope: IndexScope): IndexMerger[RegionKey] = scope match { case IndexScope.UntilSequenceNr(_) | IndexScope.UntilTime(_) ⇒ getOrCreateScopedIndex(scope) case _ ⇒ createScopedIndex(scope) } def getState(scope: IndexScope): IndexMerger.State[RegionKey] = { IndexMerger.createState(this.withScope(scope)) } def pending: IndexDiff = { globalIndex.pending } def markAsPending(diff: IndexDiff): Unit = { globalIndex.addPending(diff) } def registerChunk(chunk: Chunk): Unit = { globalIndex.addPending(IndexDiff.newChunks(chunk)) } def toMergedDiff: IndexDiff = { globalIndex.mergedDiff.merge(globalIndex.pending) } def getFileAvailability(file: File): FileAvailability = { val chunkStoragePairs = file.chunks.flatMap { chunk ⇒ chunksTracker.chunks.getChunkStatus(chunk) .toSeq .flatMap(_.availability.hasChunk) .map(_ → chunk) } val chunksByStorage = chunkStoragePairs .groupBy(_._1) .mapValues(_.map(_._2).toSet) FileAvailability(file, chunksByStorage) } private[this] def createScopedIndex(scope: IndexScope): IndexMerger[RegionKey] = { IndexMerger.scopedView(globalIndex, scope) } private[this] def getOrCreateScopedIndex(scope: IndexScope): IndexMerger[RegionKey] = { indexScopeCache.getOrElseUpdate(scope, createScopedIndex(scope)) } } }
Karasiq/shadowcloud
core/src/main/scala/com/karasiq/shadowcloud/actors/internal/RegionIndexTracker.scala
Scala
apache-2.0
7,374
/* Copyright 2009-2011 Jay Conrod * * This file is part of Tungsten. * * Tungsten is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation, either version 2 of * the License, or (at your option) any later version. * * Tungsten is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with Tungsten. If not, see * <http://www.gnu.org/licenses/>. */ package tungsten.llvm import tungsten.Utilities._ import Utilities._ class Module(val targetDataLayout: Option[String], val targetTriple: Option[String], val definitions: Map[String, Definition]) { override def equals(that: Any): Boolean = { that match { case m: Module if targetDataLayout == m.targetDataLayout && targetTriple == m.targetTriple && definitions == m.definitions => true case _ => false } } override def hashCode: Int = hash("Module", targetDataLayout, targetTriple, definitions) override def toString = { val dataLayoutStr = targetDataLayout match { case Some(dl) => "target datalayout = %s\\n".format(escapeString(dl)) case None => "" } val tripleStr = targetTriple match { case Some(t) => "target triple = %s\\n".format(escapeString(t)) case None => "" } val buffer = new StringBuilder(dataLayoutStr + tripleStr + "\\n") definitions.values.collect { case s: Struct => s }.foreach { s => buffer.append(s + "\\n\\n") } definitions.values.collect { case g: Global => g }.foreach { g => buffer.append(g + "\\n\\n") } definitions.values.collect { case f: Function => f }.foreach { f => buffer.append(f + "\\n\\n") } buffer.append("\\n") buffer.toString } }
jayconrod/tungsten
llvm/src/main/scala/tungsten/llvm/Module.scala
Scala
gpl-2.0
2,099
package controllers import java.time.OffsetDateTime import play.api.mvc.{Action, AnyContent} import controllers.sugar.Requests.AuthRequest import form.OreForms import ore.data.user.notification.NotificationType import ore.db.access.ModelView import ore.db.impl.OrePostgresDriver.api._ import ore.db.impl.schema.{OrganizationMembersTable, OrganizationRoleTable, OrganizationTable, UserTable} import ore.db.{DbRef, Model} import ore.markdown.MarkdownRenderer import ore.models.admin.{Message, Review} import ore.models.project.{Project, ReviewState, Version} import ore.models.user.{LoggedActionType, LoggedActionVersion, Notification, User} import ore.permission.Permission import ore.permission.role.Role import util.UserActionLogger import util.syntax._ import views.{html => views} import cats.data.NonEmptyList import cats.syntax.all._ import io.circe.Json import slick.lifted.{Rep, TableQuery} import zio.interop.catz._ import zio.{UIO, ZIO} import zio.interop.catz._ /** * Controller for handling Review related actions. */ final class Reviews(forms: OreForms)( implicit oreComponents: OreControllerComponents, renderer: MarkdownRenderer ) extends OreBaseController { def showReviews(author: String, slug: String, versionString: String): Action[AnyContent] = Authenticated.andThen(PermissionAction(Permission.Reviewer)).andThen(ProjectAction(author, slug)).asyncF { implicit request => for { version <- getVersion(request.project, versionString) dbio = version .mostRecentReviews(ModelView.raw(Review)) .joinLeft(TableQuery[UserTable]) .on(_.userId === _.id) .map(t => t._1 -> t._2.map(_.name)) .result rv <- service.runDBIO(dbio) } yield { val unfinished = rv.map(_._1).filter(_.endedAt.isEmpty).sorted(Review.ordering2).headOption Ok(views.users.admin.reviews(Model.unwrapNested(unfinished), rv, request.project, version)) } } def createReview(author: String, slug: String, versionString: String): Action[AnyContent] = { Authenticated.andThen(PermissionAction(Permission.Reviewer)).asyncF { implicit request => getProjectVersion(author, slug, versionString).flatMap { version => val review = Review( version.id, request.user.id, None, Json.obj() ) this.service.insert(review).as(Redirect(routes.Reviews.showReviews(author, slug, versionString))) } } } def reopenReview(author: String, slug: String, versionString: String): Action[AnyContent] = { Authenticated.andThen(PermissionAction(Permission.Reviewer)).asyncF { implicit request => for { version <- getProjectVersion(author, slug, versionString) review <- version.mostRecentReviews(ModelView.now(Review)).one.toZIOWithError(notFound) _ <- service.update(version)( _.copy( reviewState = ReviewState.Unreviewed, approvedAt = None, reviewerId = None ) ) newReview <- service.update(review)(_.copy(endedAt = None)) _ <- newReview.addMessage(Message("Reopened the review", System.currentTimeMillis(), "start")) } yield Redirect(routes.Reviews.showReviews(author, slug, versionString)) } } def stopReview(author: String, slug: String, versionString: String): Action[String] = { Authenticated .andThen(PermissionAction(Permission.Reviewer)) .asyncF(parse.form(forms.ReviewDescription)) { implicit request => for { version <- getProjectVersion(author, slug, versionString) review <- version.mostRecentUnfinishedReview(ModelView.now(Review)).toZIOWithError(notFound) newReview <- service.update(review)(_.copy(endedAt = Some(OffsetDateTime.now()))) _ <- newReview.addMessage(Message(request.body.trim, System.currentTimeMillis(), "stop")) } yield Redirect(routes.Reviews.showReviews(author, slug, versionString)) } } def approveReview(author: String, slug: String, versionString: String): Action[AnyContent] = { Authenticated.andThen(PermissionAction(Permission.Reviewer)).asyncF { implicit request => for { project <- getProject(author, slug) version <- getVersion(project, versionString) review <- version.mostRecentUnfinishedReview(ModelView.now(Review)).toZIOWithError(notFound) _ <- ( service.update(review)(_.copy(endedAt = Some(OffsetDateTime.now()))), // send notification that review happened sendReviewNotification(project, version) ).parTupled } yield Redirect(routes.Reviews.showReviews(author, slug, versionString)) } } private def queryNotificationUsers( projectId: Rep[DbRef[Project]], userId: Rep[Option[DbRef[User]]] ): Query[(Rep[DbRef[User]], Rep[Option[Role]]), (DbRef[User], Option[Role]), Seq] = { // Query Orga Members val q1 = for { org <- TableQuery[OrganizationTable] if org.id === projectId members <- TableQuery[OrganizationMembersTable] if org.id === members.organizationId roles <- TableQuery[OrganizationRoleTable] if members.userId === roles.userId // TODO roletype lvl in database? users <- TableQuery[UserTable] if members.userId === users.id } yield (users.id, roles.roleType.?) // Query version author val q2 = for { user <- TableQuery[UserTable] if user.id === userId } yield (user.id, None: Rep[Option[Role]]) q1 ++ q2 // Union } private lazy val notificationUsersQuery = Compiled(queryNotificationUsers _) private def sendReviewNotification( project: Model[Project], version: Version ): UIO[Unit] = { val usersF = service.runDBIO(notificationUsersQuery((project.id, version.authorId)).result).map { list => list.collect { case (res, Some(role)) if role.permissions.has(Permission.EditVersion) => res case (res, None) => res } } usersF .map { users => users.map { userId => Notification( userId = userId, notificationType = NotificationType.VersionReviewed, messageArgs = NonEmptyList.of("notification.project.reviewed", project.slug, version.versionString) ) } } .flatMap(service.bulkInsert(_).unit) } def takeoverReview(author: String, slug: String, versionString: String): Action[String] = { Authenticated .andThen(PermissionAction(Permission.Reviewer)) .asyncF(parse.form(forms.ReviewDescription)) { implicit request => for { version <- getProjectVersion(author, slug, versionString) _ <- { // Close old review val closeOldReview = version .mostRecentUnfinishedReview(ModelView.now(Review)) .toZIO .flatMap { oldreview => ( oldreview.addMessage(Message(request.body.trim, System.currentTimeMillis(), "takeover")), service.update(oldreview)(_.copy(endedAt = Some(OffsetDateTime.now()))) ).parTupled.unit } .either .map(_.merge) // Then make new one ( closeOldReview, this.service.insert( Review( version.id, request.user.id, None, Json.obj() ) ) ).parTupled } } yield Redirect(routes.Reviews.showReviews(author, slug, versionString)) } } def editReview(author: String, slug: String, versionString: String, reviewId: DbRef[Review]): Action[String] = { Authenticated .andThen(PermissionAction(Permission.Reviewer)) .asyncF(parse.form(forms.ReviewDescription)) { implicit request => for { version <- getProjectVersion(author, slug, versionString) review <- version.reviewById(reviewId).toZIOWithError(notFound) _ <- review.addMessage(Message(request.body.trim)) } yield Ok("Review" + review) } } def addMessage(author: String, slug: String, versionString: String): Action[String] = { Authenticated.andThen(PermissionAction(Permission.Reviewer)).asyncF(parse.form(forms.ReviewDescription)) { implicit request => for { version <- getProjectVersion(author, slug, versionString) recentReview <- version .mostRecentUnfinishedReview(ModelView.now(Review)) .toZIOWithError(Ok("Review")) currentUser <- users.current.toZIOWithError(Ok("Review")) _ <- { if (recentReview.userId == currentUser.id.value) { recentReview.addMessage(Message(request.body.trim)) } else UIO.succeed(0) } } yield Ok("Review") } } def backlogToggle(author: String, slug: String, versionString: String): Action[AnyContent] = { Authenticated.andThen(PermissionAction[AuthRequest](Permission.Reviewer)).asyncF { implicit request => for { version <- getProjectVersion(author, slug, versionString) oldState <- ZIO.fromEither( Either.cond( Seq(ReviewState.Backlog, ReviewState.Unreviewed).contains(version.reviewState), version.reviewState, BadRequest("Invalid state for toggle backlog") ) ) newState = oldState match { case ReviewState.Unreviewed => ReviewState.Backlog case ReviewState.Backlog => ReviewState.Unreviewed case _ => oldState } _ <- UserActionLogger.log( request, LoggedActionType.VersionReviewStateChanged, version.id, newState.toString, oldState.toString )(LoggedActionVersion(_, Some(version.projectId))) _ <- service.update(version)(_.copy(reviewState = newState)) } yield Redirect(routes.Reviews.showReviews(author, slug, versionString)) } } }
SpongePowered/Ore
ore/app/controllers/Reviews.scala
Scala
mit
10,219
/* * Copyright 2001-2013 Artima, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.scalatest import org.scalactic.Prettifier import scala.collection.mutable.ListBuffer import collection.immutable.TreeMap /** * Sub-trait of <code>Assertions</code> that override <code>assert</code> and <code>assume</code> methods to include * a diagram showing the values of expression in the error message when the assertion or assumption fails. * * Here are some examples: * * <pre class="stREPL"> * scala&gt; import DiagrammedAssertions._ * import DiagrammedAssertions._ * * scala&gt; assert(a == b || c &gt;= d) * org.scalatest.exceptions.TestFailedException: * * assert(a == b || c &gt;= d) * | | | | | | | * 1 | 2 | 3 | 4 * | | false * | false * false * * at org.scalatest.Assertions$class.newAssertionFailedException(Assertions.scala:422) * ... * * scala&gt; assert(xs.exists(_ == 4)) * org.scalatest.exceptions.TestFailedException: * * assert(xs.exists(_ == 4)) * | | * | false * List(1, 2, 3) * * at org.scalatest.Assertions$class.newAssertionFailedException(Assertions.scala:422) * ... * * scala&gt; assert("hello".startsWith("h") &amp;&amp; "goodbye".endsWith("y")) * org.scalatest.exceptions.TestFailedException: * * assert("hello".startsWith("h") &amp;&amp; "goodbye".endsWith("y")) * | | | | | | | * "hello" true "h" | "goodbye" false "y" * false * * at org.scalatest.Assertions$class.newAssertionFailedException(Assertions.scala:422) * ... * * scala&gt; assert(num.isInstanceOf[Int]) * org.scalatest.exceptions.TestFailedException: * * assert(num.isInstanceOf[Int]) * | | * 1.0 false * * at org.scalatest.Assertions$class.newAssertionFailedException(Assertions.scala:422) * ... * * scala&gt; assert(Some(2).isEmpty) * org.scalatest.exceptions.TestFailedException: * * assert(Some(2).isEmpty) * | | | * | 2 false * Some(2) * * at org.scalatest.Assertions$class.newAssertionFailedException(Assertions.scala:422) * ... * * scala&gt; assert(None.isDefined) * org.scalatest.exceptions.TestFailedException: * * assert(None.isDefined) * | | * None false * * at org.scalatest.Assertions$class.newAssertionFailedException(Assertions.scala:422) * ... * * scala&gt; assert(xs.exists(i =&gt; i &gt; 10)) * org.scalatest.exceptions.TestFailedException: * * assert(xs.exists(i =&gt; i &gt; 10)) * | | * | false * List(1, 2, 3) * * at org.scalatest.Assertions$class.newAssertionFailedException(Assertions.scala:422) * ... * * </pre> * * <p> * If the expression passed to <code>assert</code> or <code>assume</code> spans more than one line, <code>DiagrammedAssertions</code> falls * back to the default style of error message, since drawing a diagram would be difficult. Here's an example showing how * <code>DiagrammedAssertions</code> will treat a multi-line assertion (<em>i.e.</em>, you don't get a diagram): * </p> * * <pre class="stREPL"> * scala&gt; assert("hello".startsWith("h") &amp;&amp; * | "goodbye".endsWith("y")) * org.scalatest.exceptions.TestFailedException: "hello" started with "h", but "goodbye" did not end with "y" * at org.scalatest.Assertions$class.newAssertionFailedException(Assertions.scala:422) * ... * </pre> * * <p> * Also, since an expression diagram essentially represents multi-line ascii art, if a clue string is provided, it appears <em>above</em> the diagram, not after it. It will often also show up in the diagram: * </p> * * <pre class="stREPL"> * scala&gt; assert(None.isDefined, "Don't do this at home") * org.scalatest.exceptions.TestFailedException: Don't do this at home * * assert(None.isDefined, "Don't do this at home") * | | * None false * * at org.scalatest.Assertions$class.newAssertionFailedException(Assertions.scala:422) * ... * * scala&gt; assert(None.isDefined, * | "Don't do this at home") * org.scalatest.exceptions.TestFailedException: Don't do this at home * * assert(None.isDefined, * | | * None false * * at org.scalatest.Assertions$class.newAssertionFailedException(Assertions.scala:422) * ... * </pre> * * <p>Trait <code>DiagrammedAssertions</code> was inspired by Peter Niederwieser's work in <a href="http://code.google.com/p/spock/">Spock</a> and <a href="https://github.com/pniederw/expecty">Expecty</a>. */ trait DiagrammedAssertions extends Assertions { import language.experimental.macros /** * Helper class used by code generated by the overriden <code>assert</code> macro. */ class DiagrammedAssertionsHelper { // this is taken from expecty private[this] def fits(line: StringBuilder, str: String, anchor: Int): Boolean = line.slice(anchor, anchor + str.length + 1).forall(_.isWhitespace) // this is taken from expecty private[this] def placeString(line: StringBuilder, str: String, anchor: Int) { val diff = anchor - line.length for (i <- 1 to diff) line.append(' ') line.replace(anchor, anchor + str.length(), str) } // this is taken from expecty and modified private[this] def renderValue(value: Any): String = { value match { case aEqualizer: org.scalactic.EqualityPolicy#Equalizer[_] => Prettifier.default(aEqualizer.leftSide) case aEqualizer: org.scalactic.EqualityPolicy#CheckingEqualizer[_] => Prettifier.default(aEqualizer.leftSide) case _ => Prettifier.default(value) } } // this is taken from expecty private[this] def placeValue(lines: ListBuffer[StringBuilder], value: Any, col: Int) { val str = renderValue(value) placeString(lines(0), "|", col) for (line <- lines.drop(1)) { if (fits(line, str, col)) { placeString(line, str, col) return } placeString(line, "|", col) } val newLine = new StringBuilder() placeString(newLine, str, col) lines.append(newLine) } // this is taken from expecty private[this] def filterAndSortByAnchor(anchorValues: List[AnchorValue]): Traversable[AnchorValue] = { var map = TreeMap[Int, AnchorValue]()(Ordering.by(-_)) // values stemming from compiler generated code often have the same anchor as regular values // and get recorded before them; let's filter them out for (value <- anchorValues) if (!map.contains(value.anchor)) map += (value.anchor -> value) map.values } // this is taken from expecty private[this] def renderDiagram(sourceText: String, anchorValues: List[AnchorValue]): String = { val offset = sourceText.prefixLength(_.isWhitespace) val intro = new StringBuilder().append(sourceText.trim()) val lines = ListBuffer(new StringBuilder) val rightToLeft = filterAndSortByAnchor(anchorValues) for (anchorValue <- rightToLeft) placeValue(lines, anchorValue.value, anchorValue.anchor - offset) lines.prepend(intro) lines.append(new StringBuilder) lines.mkString(Prettifier.lineSeparator) } /** * Assert that the passed in <code>Bool</code> is <code>true</code>, else fail with <code>TestFailedException</code> * with error message that include a diagram showing expression values. * * @param bool the <code>Bool</code> to assert for * @param clue optional clue to be included in <code>TestFailedException</code>'s error message when assertion failed */ def macroAssert(bool: DiagrammedExpr[Boolean], clue: Any, sourceText: String) { if (clue == null) throw new NullPointerException("clue was null") if (!bool.value) { val failureMessage = Some(clue + Prettifier.lineSeparator + Prettifier.lineSeparator + renderDiagram(sourceText, bool.anchorValues)) throw newAssertionFailedException(failureMessage, None, "Assertions.scala", "macroAssert", 2) } } /** * Assume that the passed in <code>Bool</code> is <code>true</code>, else throw <code>TestCanceledException</code> * with error message that include a diagram showing expression values. * * @param bool the <code>Bool</code> to assume for * @param clue optional clue to be included in <code>TestCanceledException</code>'s error message when assertion failed */ def macroAssume(bool: DiagrammedExpr[Boolean], clue: Any, sourceText: String) { if (clue == null) throw new NullPointerException("clue was null") if (!bool.value) { val failureMessage = Some(clue + Prettifier.lineSeparator + Prettifier.lineSeparator + renderDiagram(sourceText, bool.anchorValues)) throw newTestCanceledException(failureMessage, None, "Assertions.scala", "macroAssume", 2) } } } /** * Helper instance used by code generated by the overriden macro assertion. */ val diagrammedAssertionsHelper = new DiagrammedAssertionsHelper /** * Assert that a boolean condition is true. * If the condition is <code>true</code>, this method returns normally. * Else, it throws <code>TestFailedException</code>. * * <p> * This method is implemented in terms of a Scala macro that will generate a more helpful error message that includes * a diagram showing expression values. * </p> * * <p> * If multi-line <code>Boolean</code> is passed in, it will fallback to the macro implementation of <code>Assertions</code> * that does not contain diagram. * </p> * * @param condition the boolean condition to assert * @throws TestFailedException if the condition is <code>false</code>. */ override def assert(condition: Boolean): Unit = macro DiagrammedAssertionsMacro.assert /** * Assert that a boolean condition, described in <code>String</code> * <code>message</code>, is true. * If the condition is <code>true</code>, this method returns normally. * Else, it throws <code>TestFailedException</code> with the * <code>String</code> obtained by invoking <code>toString</code> on the * specified <code>clue</code> as the exception's detail message and a * diagram showing expression values. * * <p> * If multi-line <code>Boolean</code> is passed in, it will fallback to the macro implementation of <code>Assertions</code> * that does not contain diagram. * </p> * * @param condition the boolean condition to assert * @param clue An objects whose <code>toString</code> method returns a message to include in a failure report. * @throws TestFailedException if the condition is <code>false</code>. * @throws NullPointerException if <code>message</code> is <code>null</code>. */ override def assert(condition: Boolean, clue: Any): Unit = macro DiagrammedAssertionsMacro.assertWithClue /** * Assume that a boolean condition is true. * If the condition is <code>true</code>, this method returns normally. * Else, it throws <code>TestCanceledException</code>. * * <p> * This method is implemented in terms of a Scala macro that will generate a more helpful error message that includes * a diagram showing expression values. * </p> * * <p> * If multi-line <code>Boolean</code> is passed in, it will fallback to the macro implementation of <code>Assertions</code> * that does not contain diagram. * </p> * * @param condition the boolean condition to assume * @throws TestCanceledException if the condition is <code>false</code>. */ override def assume(condition: Boolean): Unit = macro DiagrammedAssertionsMacro.assume /** * Assume that a boolean condition, described in <code>String</code> * <code>message</code>, is true. * If the condition is <code>true</code>, this method returns normally. * Else, it throws <code>TestCanceledException</code> with the * <code>String</code> obtained by invoking <code>toString</code> on the * specified <code>clue</code> as the exception's detail message and a * diagram showing expression values. * * <p> * If multi-line <code>Boolean</code> is passed in, it will fallback to the macro implementation of <code>Assertions</code> * that does not contain diagram. * </p> * * @param condition the boolean condition to assume * @param clue An objects whose <code>toString</code> method returns a message to include in a failure report. * @throws TestCanceledException if the condition is <code>false</code>. * @throws NullPointerException if <code>message</code> is <code>null</code>. */ override def assume(condition: Boolean, clue: Any): Unit = macro DiagrammedAssertionsMacro.assumeWithClue } /** * Companion object that facilitates the importing of <code>DiagrammedAssertions</code> members as * an alternative to mixing it in. One use case is to import <code>DiagrammedAssertions</code> members so you can use * them in the Scala interpreter: * * <pre class="stREPL"> * $scala -classpath scalatest.jar * Welcome to Scala version 2.10.4.final (Java HotSpot(TM) Client VM, Java 1.6.0_45). * Type in expressions to have them evaluated. * Type :help for more information. * &nbsp; * scala&gt; import org.scalatest.Assertions._ * import org.scalatest.Assertions._ * &nbsp; * scala&gt; assert(1 === 2) * org.scalatest.exceptions.TestFailedException: * * assert(1 === 2) * | | | * 1 | 2 * false * * at org.scalatest.Assertions$class.newAssertionFailedException(Assertions.scala:422) * at org.scalatest.DiagrammedAssertions$.newAssertionFailedException(DiagrammedAssertions.scala:249) * at org.scalatest.DiagrammedAssertions$DiagrammedAssertionsHelper.macroAssert(DiagrammedAssertions.scala:111) * at .&lt;init&gt;(&lt;console&gt;:20) * at .&lt;clinit&gt;(&lt;console&gt;) * at .&lt;init&gt;(&lt;console&gt;:7) * at .&lt;clinit&gt;(&lt;console&gt;) * at $print(&lt;console&gt;) * at sun.reflect.NativeMethodAccessorImpl.invoke... * </pre> */ object DiagrammedAssertions extends DiagrammedAssertions
SRGOM/scalatest
scalatest/src/main/scala/org/scalatest/DiagrammedAssertions.scala
Scala
apache-2.0
14,940
package actors import akka.actor.{Actor, Props} import play.api.libs.iteratee.Concurrent import play.api.libs.json.JsValue object WebSocketActor { def props(channel: Concurrent.Channel[JsValue]):Props = Props(new WebSocketActor(channel)) } class WebSocketActor(channel: Concurrent.Channel[JsValue]) extends Actor { def receive = { case x: JsValue => channel.push(x) } }
JetChat/JetChat
app/actors/WebSocketActor.scala
Scala
apache-2.0
382
package org.talend.jira.test import net.liftweb.json._ import scala.io.Source import org.talend.jira.internal.JiraParser import org.talend.jira.JiraIssue object TestParseLinkedIssues { val files = Array(("pmdq256.json", "PMDQ-256"), ("TDQ-9794.json", "TDQ-9794"), ("TDQ-10045.json", "TDQ-10045"), ("QAI-43445.json", "QAI-43445")) def main(args: Array[String]): Unit = { files.foreach(s => { val filename = "./" + s._1 val jsonSrc = Source.fromFile(filename).getLines.mkString val query = new JiraParser(); val jiraIssue = new JiraIssue(s._2) // can put any jira name here actually // val issueTypes = List("New Feature", "Bug","Work item") query.extractJiraIssueFromJson(jiraIssue, jsonSrc) jiraIssue.linkedIssues.foreach(i => println("Issue: " + s._2 + " linked to " + i.issueKey)) }) } }
scorreia/jira-tools
src/test/scala/org/talend/jira/test/TestParseLinkedIssues.scala
Scala
apache-2.0
863
package com.github.usql import com.github.usql.ASTNode._ import com.github.kmizu.scomb import com.github.kmizu.scomb.{Result, SCombinator} import scala.util.matching.Regex import scala.collection.mutable /** * @author Kota Mizushima */ class Parser extends Pass[String, ASTNode.USQL] { private object USQLParsers extends SCombinator[ASTNode.USQL] { def publicLocations: mutable.Map[Int, scomb.Location] = locations implicit def stringToParser(literal: String): Parser[String] = $(literal) implicit def regexToParser(literal: Regex): Parser[String] = regularExpression(literal) def %% : Parser[SourceLocation] = % ^^ { l => SourceLocation(l.line, l.column) } def commit[T](parser: Parser[T]): Parser[T] = parser.commit def keyword(name: String): Parser[String] = { KEYWORDS += name token(name) } lazy val qident: Parser[String] = (regularExpression("""'[A-Za-z_][a-zA-Z0-9_]*""".r).filter{n => !KEYWORDS(n) }) << SPACING_WITHOUT_LF lazy val sident: Parser[String] = (regularExpression("""[A-Za-z_][a-zA-Z0-9_]*""".r).filter{n => !KEYWORDS(n) }) << SPACING_WITHOUT_LF lazy val KEYWORDS: mutable.Set[String] = mutable.Set() lazy val LINEFEED: Parser[String] = ("\r\n" | "\r" | "\n") lazy val SEMICOLON: Parser[String] = ";" lazy val ANY: Parser[String] = any ^^ { _.toString } lazy val SPACING: Parser[String] = rule { (COMMENT | "\r\n" | "\r" | "\n" | " " | "\t" | "\b" | "\f").* ^^ { _.mkString } } lazy val SPACING_WITHOUT_LF: Parser[String] = rule { (COMMENT | "\t" | " " | "\b" | "\f").* ^^ { _.mkString } } lazy val TERMINATOR: Parser[String] = rule { (LINEFEED | SEMICOLON | EOF) << SPACING } lazy val SEPARATOR: Parser[String] = rule { (LINEFEED | COMMA | EOF | SPACING_WITHOUT_LF) << SPACING } lazy val BLOCK_COMMENT: Parser[Any] = rule { "/*" ~ (not("*/") ~ (BLOCK_COMMENT | ANY)).* ~ "*/" } lazy val LINE_COMMENT: Parser[Any] = rule { "//" ~ (not(LINEFEED) ~ ANY).* ~ LINEFEED } lazy val COMMENT: Parser[Any] = rule { BLOCK_COMMENT | LINE_COMMENT } def CL[T](parser: Parser[T]): Parser[T] = parser << SPACING override def token(parser: String): Parser[String] = parser << SPACING_WITHOUT_LF def unescape(input: String): String = { val builder = new java.lang.StringBuilder val length = input.length var i = 0 while (i < length - 1) { (input.charAt(i), input.charAt(i + 1)) match { case ('\\', 'r') => builder.append('\r'); i += 2 case ('\\', 'n') => builder.append('\n'); i += 2 case ('\\', 'b') => builder.append('\b'); i += 2 case ('\\', 'f') => builder.append('\f'); i += 2 case ('\\', 't') => builder.append('\t'); i += 2 case ('\\', '\\') => builder.append('\\'); i += 2 case (ch, _) => builder.append(ch); i += 1 } } if (i == length - 1) { builder.append(input.charAt(i)) } new String(builder) } lazy val LT : Parser[String] = keyword("<") lazy val GT : Parser[String] = keyword(">") lazy val LTE : Parser[String] = keyword("<=") lazy val GTE : Parser[String] = keyword(">=") lazy val EQEQ : Parser[String] = keyword("==") lazy val NOT_EQ : Parser[String] = keyword("!=") lazy val PLUS : Parser[String] = keyword("+") lazy val MINUS : Parser[String] = keyword("-") lazy val STAR : Parser[String] = keyword("*") lazy val SLASH : Parser[String] = keyword("/") lazy val PERCENT : Parser[String] = keyword("%") lazy val LPAREN : Parser[String] = keyword("(") lazy val RPAREN : Parser[String] = keyword(")") lazy val LBRACE : Parser[String] = keyword("{") lazy val RBRACE : Parser[String] = keyword("}") lazy val LBRACKET : Parser[String] = keyword("[") lazy val RBRACKET : Parser[String] = keyword("]") lazy val IF : Parser[String] = keyword("if") lazy val ELSE : Parser[String] = keyword("else") lazy val TRUE : Parser[String] = keyword("true") lazy val FALSE : Parser[String] = keyword("false") lazy val NULL : Parser[String] = keyword("null") lazy val IN : Parser[String] = keyword("in") lazy val DEFINE : Parser[String] = keyword("define") lazy val TABLE : Parser[String] = keyword("table") lazy val VAL : Parser[String] = keyword("val") lazy val VAR : Parser[String] = keyword("var") lazy val TRANSACTTION : Parser[String] = keyword("transaction") lazy val PRIMARY_KEY : Parser[String] = keyword("primary_key") lazy val COMMIT : Parser[String] = keyword("commit") lazy val COMMA : Parser[String] = keyword(",") lazy val DOT : Parser[String] = keyword(".") lazy val EQ : Parser[String] = keyword("=") lazy val COLON : Parser[String] = keyword(":") lazy val QUES : Parser[String] = keyword("?") lazy val AMP2 : Parser[String] = keyword("&&") lazy val BAR2 : Parser[String] = keyword("||") lazy val BAR : Parser[String] = keyword("|") override def root: Parser[ASTNode.USQL] = QUERY lazy val column: Parser[ASTNode.ColumnDescription] = ??? lazy val transaction: Parser[ASTNode.TransactionClause] = for { location <- %% _ <- CL(TRANSACTTION) _ <- LBRACE cs <- TOP.repeat0By(TERMINATOR) _ <- RBRACE } yield TransactionClause(location, cs) lazy val QUERY: Parser[ASTNode.USQL] = (%% ~ TOP.*).map { case l ~ cs => ASTNode.USQL(l, cs)} lazy val TOP: Parser[ASTNode.Clause] = DDL | DML | DCL lazy val DDL: Parser[ASTNode.Clause] = ??? lazy val DML: Parser[ASTNode.Clause] = ??? lazy val DCL: Parser[ASTNode.Clause] = transaction } override def name: String = "Parser" override def process(input: String): ASTNode.USQL = ??? }
usql/usql
src/main/scala/com/github/usql/Parser.scala
Scala
mit
6,394
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // scalastyle:off println package org.apache.spark.examples.ml // $example on$ import org.apache.spark.ml.feature.VectorAssembler import org.apache.spark.ml.linalg.Vectors // $example off$ import org.apache.spark.sql.SparkSession object VectorAssemblerExample { def main(args: Array[String]): Unit = { val spark = SparkSession .builder .appName("VectorAssemblerExample") .getOrCreate() // $example on$ val dataset = spark.createDataFrame( Seq((0, 18, 1.0, Vectors.dense(0.0, 10.0, 0.5), 1.0)) ).toDF("id", "hour", "mobile", "userFeatures", "clicked") val assembler = new VectorAssembler() .setInputCols(Array("hour", "mobile", "userFeatures")) .setOutputCol("features") val output = assembler.transform(dataset) println("Assembled columns 'hour', 'mobile', 'userFeatures' to vector column 'features'") output.select("features", "clicked").show(false) // $example off$ spark.stop() } } // scalastyle:on println
alec-heif/MIT-Thesis
spark-bin/examples/src/main/scala/org/apache/spark/examples/ml/VectorAssemblerExample.scala
Scala
mit
1,798
/* Copyright (c) 2013-2016 Karol M. Stasiak Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package io.github.karols.units import language.higherKinds import language.implicitConversions import language.existentials import io.github.karols.units.internal.Bools._ import io.github.karols.units.internal.Integers._ import io.github.karols.units.internal.Strings._ import io.github.karols.units.internal.SingleUnits._ import io.github.karols.units.internal.UnitImpl._ import io.github.karols.units.internal.Conversions._ import scala.math /** Supertype of all units of measure. */ trait MUnit { /** @see io.github.karols.units./ @see [[io.github.karols.units._1]] */ type Invert <: MUnit type Get[U<:TSingleUnit] <:TInteger type MulSingle[S<:TUnitPowerPair] <: MUnit /** @see io.github.karols.units.× */ type Mul[S<:MUnit] <: MUnit type Sqrt <:MUnit type Cbrt <:MUnit /** @see [[io.github.karols.units.square]] */ type IsSquare <: TBool /** @see [[io.github.karols.units.cube]] */ type IsCube <: TBool type ToPower[Exp<:TInteger] <: MUnit type Substitute[S<:TSingleUnit, V<:MUnit] <: MUnit }
KarolS/units
units/src/main/scala/io/github/karols/units/MUnit.scala
Scala
mit
2,090
package me.heaton.profun.week2.homework import org.scalatest.FunSuite import org.junit.runner.RunWith import org.scalatest.junit.JUnitRunner /** * This class is a test suite for the methods in object FunSets. To run * the test suite, you can either: * - run the "test" command in the SBT console * - right-click the file in eclipse and chose "Run As" - "JUnit Test" */ @RunWith(classOf[JUnitRunner]) class FunSetSuite extends FunSuite { /** * Link to the scaladoc - very clear and detailed tutorial of FunSuite * * http://doc.scalatest.org/1.9.1/index.html#org.scalatest.FunSuite * * Operators * - test * - ignore * - pending */ /** * Tests are written using the "test" operator and the "assert" method. */ test("string take") { val message = "hello, world" assert(message.take(5) == "hello") } /** * For ScalaTest tests, there exists a special equality operator "===" that * can be used inside "assert". If the assertion fails, the two values will * be printed in the error message. Otherwise, when using "==", the test * error message will only say "assertion failed", without showing the values. * * Try it out! Change the values so that the assertion fails, and look at the * error message. */ test("adding ints") { assert(1 + 2 === 3) } import FunSets._ test("contains is implemented") { assert(contains(x => true, 100)) } /** * When writing tests, one would often like to re-use certain values for multiple * tests. For instance, we would like to create an Int-set and have multiple test * about it. * * Instead of copy-pasting the code for creating the set into every test, we can * store it in the test class using a val: * * val s1 = singletonSet(1) * * However, what happens if the method "singletonSet" has a bug and crashes? Then * the test methods are not even executed, because creating an instance of the * test class fails! * * Therefore, we put the shared values into a separate trait (traits are like * abstract classes), and create an instance inside each test method. * */ trait TestSets { val s1 = singletonSet(1) val s2 = singletonSet(2) val s3 = singletonSet(3) val s12 = union(s1, s2) val s23 = union(s2, s3) } /** * This test is currently disabled (by using "ignore") because the method * "singletonSet" is not yet implemented and the test would fail. * * Once you finish your implementation of "singletonSet", exchange the * function "ignore" by "test". */ test("singletonSet(1) contains 1") { /** * We create a new instance of the "TestSets" trait, this gives us access * to the values "s1" to "s3". */ new TestSets { /** * The string argument of "assert" is a message that is printed in case * the test fails. This helps identifying which assertion failed. */ assert(contains(s1, 1), "Singleton") } } test("union contains all elements") { new TestSets { val s = union(s1, s2) assert(contains(s, 1), "Union 1") assert(contains(s, 2), "Union 2") assert(!contains(s, 3), "Union 3") } } test("intersect contains elements both in all sets") { new TestSets { val s = intersect(s12, s23) assert(!contains(s, 1)) assert(contains(s, 2)) assert(!contains(s, 3)) } } test("diff contains elements only in one set") { new TestSets { val s = diff(s12, s23) assert(contains(s, 1)) assert(!contains(s, 2)) assert(!contains(s, 3)) } } test("filter will return a set that are accepted by a given predicate") { val s1: Set = (x) => x > 0 val s = filter(s1, (x) => x < 5) assert(contains(s, 1)) assert(!contains(s, 5)) } test("forall tests whether a given predicate is true for all elements of the set") { val s: Set = (x) => x > 0 assert(forall(s, (x) => x > 1)) assert(!forall(s, (x) => x > -1)) assert(!forall(s, (x) => x < 1)) } test("exists tests whether a set contains at least one element for which the given predicate is true") { val s: Set = (x) => x > 0 assert(exists(s, (x) => x > 1)) assert(exists(s, (x) => x < 2)) assert(!exists(s, (x) => x < 1)) } test("map transforms a given set into another one by applying to each of its elements the given function for plus") { val s1: Set = (x) => x > 0 val s = map(s1, (x) => x + 2) assert(contains(s, 3)) assert(!contains(s, 1)) } test("map transforms a given set into another one by applying to each of its elements the given function for product") { val s1: Set = (x) => x > 0 val s = map(s1, (x) => x * 2) assert(contains(s, 2)) assert(!contains(s, 1)) } }
SanCoder-Q/hello-scala
src/test/scala/me/heaton/profun/week2/homework/FunSetSuite.scala
Scala
mit
4,815
package com.taig.tmpltr.engine.html import com.taig.tmpltr._ import play.api.mvc.Content class table( val attributes: Attributes, val content: Content ) extends markup.table with Tag.Body[table, Content] object table extends Tag.Body.Appliable[table, Content]
Taig/Play-Tmpltr
app/com/taig/tmpltr/engine/html/table.scala
Scala
mit
263
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.examples.scala.graph import org.apache.flink.api.scala._ import org.apache.flink.examples.java.graph.util.ConnectedComponentsData import org.apache.flink.util.Collector object TransitiveClosureNaive { def main (args: Array[String]): Unit = { if (!parseParameters(args)) { return } val env = ExecutionEnvironment.getExecutionEnvironment val edges = getEdgesDataSet(env) val paths = edges.iterateWithTermination(maxIterations) { prevPaths: DataSet[(Long, Long)] => val nextPaths = prevPaths .join(edges) .where(1).equalTo(0) { (left, right) => (left._1,right._2) } .union(prevPaths) .groupBy(0, 1) .reduce((l, r) => l) val terminate = prevPaths .coGroup(nextPaths) .where(0).equalTo(0) { (prev, next, out: Collector[(Long, Long)]) => { val prevPaths = prev.toSet for (n <- next) if (!prevPaths.contains(n)) out.collect(n) } } (nextPaths, terminate) } if (fileOutput) { paths.writeAsCsv(outputPath, "\n", " ") } else { paths.print() } env.execute("Scala Transitive Closure Example") } private var fileOutput: Boolean = false private var edgesPath: String = null private var outputPath: String = null private var maxIterations: Int = 10 private def parseParameters(programArguments: Array[String]): Boolean = { if (programArguments.length > 0) { fileOutput = true if (programArguments.length == 3) { edgesPath = programArguments(0) outputPath = programArguments(1) maxIterations = Integer.parseInt(programArguments(2)) } else { System.err.println("Usage: TransitiveClosure <edges path> <result path> <max number of " + "iterations>") return false } } else { System.out.println("Executing TransitiveClosure example with default parameters and " + "built-in default data.") System.out.println(" Provide parameters to read input data from files.") System.out.println(" See the documentation for the correct format of input files.") System.out.println(" Usage: TransitiveClosure <edges path> <result path> <max number of " + "iterations>") } true } private def getEdgesDataSet(env: ExecutionEnvironment): DataSet[(Long, Long)] = { if (fileOutput) { env.readCsvFile[(Long, Long)]( edgesPath, fieldDelimiter = ' ', includedFields = Array(0, 1)) .map { x => (x._1, x._2)} } else { val edgeData = ConnectedComponentsData.EDGES map { case Array(x, y) => (x.asInstanceOf[Long], y.asInstanceOf[Long]) } env.fromCollection(edgeData) } } }
citlab/vs.msc.ws14
flink-0-7-custom/flink-examples/flink-scala-examples/src/main/scala/org/apache/flink/examples/scala/graph/TransitiveClosureNaive.scala
Scala
apache-2.0
3,623
package ark import scala.math.BigDecimal.double2bigDecimal import ark.TrapAlign._ case class Hit(val trap: Trap, val bonusMultiplier: BigDecimal = 0.0) { def align = trap.align def damage = trap.damage def elaborate = points(Elaborate) def sadistic = points(Sadistic) def humiliating = points(Humiliating) private def points(trapAlign: TrapAlign): Int = { if (trap.align == trapAlign) { points } else { 0 } } def points = trap.points def baseMultiplier = trap.multiplier def multiplier = baseMultiplier + bonusMultiplier override def toString = s"(${trap.name}${if (bonusMultiplier == 0) "" else s""", ${bonusMultiplier}"""})" }
lucaster/ark-calc
src/main/scala/ark/Hit.scala
Scala
mit
683
/* __ *\ ** ________ ___ / / ___ __ ____ Scala.js Benchmarks ** ** / __/ __// _ | / / / _ | __ / // __/ Adam Burmister ** ** __\ \/ /__/ __ |/ /__/ __ |/_// /_\ \ 2012, Google, Inc ** ** /____/\___/_/ |_/____/_/ | |__/ /____/ 2013, Jonas Fonseca ** ** |/____/ ** \* */ // The ray tracer code in this file is written by Adam Burmister. It // is available in its original form from: // // http://labs.flog.co.nz/raytracer/ // // Ported from the v8 benchmark suite by Google 2012. // Ported from the Dart benchmark_harness to Scala.js by Jonas Fonseca 2013 package org.scalajs.benchmark.tracer import org.scalajs.benchmark.dom._ // 'event' null means scalar we are benchmarking class RenderScene extends Scene { val camera = new Camera( new Vector(0.0f, 0.0f, -15.0f), new Vector(-0.2f, 0.0f, 5.0f), new Vector(0.0f, 1.0f, 0.0f) ) val background = new Background(new Color(0.5f, 0.5f, 0.5f), 0.4f) val plane = new Plane( new Vector(0.1f, 0.9f, -0.5f).normalize, 1.2f, new Chessboard( new Color(1.0f, 1.0f, 1.0f), new Color(0.0f, 0.0f, 0.0f), 0.2f, 0.0f, 1.0f, 0.7f ) ) val sphere = new Sphere( new Vector(-1.5f, 1.5f, 2.0f), 1.5f, new Solid( new Color(0.0f, 0.5f, 0.5f), 0.3f, 0.0f, 0.0f, 2.0f ) ) val sphere1 = new Sphere( new Vector(1.0f, 0.25f, 1.0f), 0.5f, new Solid( new Color(0.9f, 0.9f, 0.9f), 0.1f, 0.0f, 0.0f, 1.5f ) ) val shapes = List(plane, sphere, sphere1) var light = new Light( new Vector(5.0f, 10.0f, -1.0f), new Color(0.8f, 0.8f, 0.8f) ) var light1 = new Light( new Vector(-3.0f, 5.0f, -15.0f), new Color(0.8f, 0.8f, 0.8f), 100.0f ) val lights = List(light, light1) def renderScene(config: EngineConfiguration, canvas: CanvasRenderingContext2D): Unit = { new Engine(config).renderScene(this, canvas) } }
sjrd/scalajs-benchmarks
tracerFloat/shared/src/main/scala/org/scalajs/benchmark/tracer/RenderScene.scala
Scala
bsd-3-clause
2,204
import org.specs._ object MeetupSpec extends Specification { import dispatch._ import meetup._ import dispatch.liftjson.Js._ import oauth._ val conf = new java.io.File("meetup.test.properties") if (conf.exists) { val config = { val stm = new java.io.FileInputStream(conf) val props = new java.util.Properties props.load(stm) stm.close() props } val consumer = Consumer(config.getProperty("oauth_consumer_key"), config.getProperty("oauth_consumer_secret")) val token = Token(config.getProperty("oauth_token"), config.getProperty("oauth_token_secret")) val client = OAuthClient(consumer, token) val nyseID = "05002008" "Group Query" should { "find knitting groups in Brooklyn" in { val http = new Http val group_topics = http(client(Groups.cityUS("Brooklyn", "NY").topic("knitting")) ># ( Response.results >~> Group.topics )) group_topics.size must be > (0) group_topics forall { _.flatMap(Group.Topic.name) exists { _.toLowerCase == "knitting" } } must_== true } } "Event Query" should { implicit val http = new Http "find New York Scala events" in { import java.util.Calendar val cal = Calendar.getInstance cal.add(Calendar.YEAR, -1) val (res, meta) = Http(client.handle(Events.group_id(1377720) .after(cal.getTime) .before(new java.util.Date) )) res.size must be > (5) (meta >>= Meta.count) must_== List(res.size) } "find upcoming events" in { val (res, meta) = Http(client.handle(Events.topic("technology"))) val statuses = res flatMap Event.status statuses must notBeEmpty statuses must notExist { _ != Event.Upcoming } } } "Member and Group query" should { implicit val http = new Http "find NYSE members" in { val NYSE = "ny-scala" val (res, meta) = client.call(Members.group_urlname(NYSE)) val ids = for (r <- res; id <- Member.id(r)) yield id ids.size must be > (5) } } "Photos query" should { implicit val http = new Http "Find North East Scala Symposium photos" in { val (res, _) = client.call(Photos.event_id("15526582")) val photos = for { r <- res id <- Photo.photo_id(r) created <- Photo.created(r) updated <- Photo.updated(r) hr_link <- Photo.highres_link(r) photo_link <- Photo.photo_link(r) thumb_link <- Photo.thumb_link(r) } yield (id, created, updated, hr_link, photo_link, thumb_link) photos.size must be > 5 } } } }
cmc333333/Databinder-Dispatch
meetup/src/test/scala/MeetupSpec.scala
Scala
lgpl-2.1
2,710
package orz.mongo.tochka.dsl private[tochka] class BoolField(protected val __prefix: String = "") extends Field { protected type FieldType = Boolean }
fkmt-disk/tochka
src/main/scala/orz/mongo/tochka/dsl/BoolField.scala
Scala
mit
162
/* * Copyright 2016 rdbc contributors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.rdbc.pgsql.core.internal.typecodec.sco import io.rdbc.pgsql.core.SessionParams import io.rdbc.pgsql.core.types.PgVal import scodec.Codec private[sco] trait IgnoreSessionParams[T <: PgVal[_]] { this: ScodecPgValCodec[T] => def codec: Codec[T] override final def codec(sessionParams: SessionParams): Codec[T] = codec }
rdbc-io/rdbc-pgsql
rdbc-pgsql-core/src/main/scala/io/rdbc/pgsql/core/internal/typecodec/sco/IgnoreSessionParams.scala
Scala
apache-2.0
941
package ru.dgolubets.reactjs.server.actors import java.io.File import java.nio.file.{Path, WatchEvent} import java.util.concurrent.Executors import scala.concurrent.duration._ import akka.actor.{Actor, ActorRef, Cancellable, Props} import akka.util.ByteString import better.files.{File => BetterFile} import ru.dgolubets.reactjs.server.util.{FileMonitorEx, MD5} import scala.concurrent.ExecutionContext private[server] class SourcesMonitorActor(server: ActorRef, root: File, files: Seq[File], delay: FiniteDuration) extends Actor { import context.dispatcher import Messages._ import SourcesMonitorActor._ private val blockingExecutor = Executors.newCachedThreadPool() private val blockingIO: ExecutionContext = ExecutionContext.fromExecutor(blockingExecutor) private val watcher = new FileMonitorEx(root, recursive = true) { private val scheduler = context.system.scheduler private var scheduledNotification: Option[Cancellable] = None override def onEvent(eventType: WatchEvent.Kind[Path], file: BetterFile, count: Int): Unit = { for (n <- scheduledNotification) { n.cancel() } val c = scheduler.scheduleOnce(delay) { self ! FileChanged } scheduledNotification = Some(c) } } watcher.start()(blockingIO) private var filesModified: Map[File, Option[FileInfo]] = getFilesModified() notifyServer(filesModified, None) override def postStop(): Unit = { watcher.close() blockingExecutor.shutdown() } def getFilesModified(): Map[File, Option[FileInfo]] = { files.map { file => if (file.exists()) { val info = FileInfo(file.lastModified(), MD5.ofFile(file)) file -> Some(info) } else file -> None }.toMap } def notifyServer(current: Map[File, Option[FileInfo]], previous: Option[Map[File, Option[FileInfo]]]): Unit = { val missingFiles = current .collect { case (k, v) if v.isEmpty => k } .toList if (missingFiles.nonEmpty) { server ! SourcesMissing(missingFiles) } else { val updatedFiles = previous .fold(current) { prev => current.filter { case (k, Some(v)) => prev.get(k).forall(_.forall(_ != v)) case _ => false } } .keys .toList if (previous.isEmpty || updatedFiles.nonEmpty) { server ! SourcesChanged(updatedFiles) } } } override def receive: Receive = { case FileChanged => val newFilesModified = getFilesModified() val oldFilesModified = filesModified if (oldFilesModified != newFilesModified) { filesModified = newFilesModified notifyServer(newFilesModified, Some(oldFilesModified)) } } } private[server] object SourcesMonitorActor { def props(server: ActorRef, root: File, files: Seq[File], delay: FiniteDuration): Props = Props(new SourcesMonitorActor(server, root, files, delay)) object FileChanged case class FileInfo(lastModified: Long, md5: ByteString) }
DGolubets/reactjs-server
src/main/scala/ru/dgolubets/reactjs/server/actors/SourcesMonitorActor.scala
Scala
mit
3,025
package net.danielkza.http2.api import akka.http.scaladsl.model.HttpHeader import scala.language.implicitConversions import akka.util.ByteString import akka.http.scaladsl.{model => akkaModel} sealed trait Header { def name: ByteString def value: ByteString def secure: Boolean } object Header extends { object Constants { final val METHOD = ByteString(":method") final val SCHEME = ByteString(":scheme") final val AUTHORITY = ByteString(":authority") final val STATUS = ByteString(":status") final val PATH = ByteString(":path") final val HOST = ByteString("Host") } import Constants._ private def encode(s: String): ByteString = ByteString.fromString(s, "UTF-8") case class RawHeader(name: ByteString, value: ByteString, secure: Boolean = false) extends Header case class WrappedAkkaHeader(akkaHeader: akkaModel.HttpHeader, secure: Boolean = false) extends Header { override val name = encode(akkaHeader.name.toLowerCase) override val value = encode(akkaHeader.value.toLowerCase) } implicit def headerFromAkka(header: HttpHeader): WrappedAkkaHeader = WrappedAkkaHeader(header, false) object WrappedAkkaHeader { implicit def unwrapAkkaHeader(wrapped: WrappedAkkaHeader): akkaModel.HttpHeader = wrapped.akkaHeader } case class WrappedAkkaStatusCode(akkaStatusCode: akkaModel.StatusCode) extends Header { override val name = STATUS override val value = ByteString(Integer.toString(akkaStatusCode.intValue)) override val secure = false } case class WrappedAkkaMethod(akkaMethod: akkaModel.HttpMethod) extends Header { override val name = METHOD override val value = ByteString(akkaMethod.value) override val secure = false } def plain(name: ByteString, value: ByteString): RawHeader = RawHeader(name, value, secure = false) def plain(name: String, value: String): RawHeader = RawHeader(ByteString(name.toLowerCase), ByteString(value.toLowerCase), secure = false) def secure(name: ByteString, value: ByteString): RawHeader = RawHeader(name, value, secure = true) def secure(name: String, value: String): RawHeader = RawHeader(ByteString(name.toLowerCase), ByteString(value.toLowerCase), secure = true) }
danielkza/h2scala
core/src/main/scala/net/danielkza/http2/api/Header.scala
Scala
apache-2.0
2,301
/** * Copyright (c) 2013, The National Archives <[email protected]> * http://www.nationalarchives.gov.uk * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ package uk.gov.nationalarchives.csv.validator.schema.v1_1 import org.junit.runner.RunWith import org.specs2.mutable.Specification import org.specs2.runner.JUnitRunner import uk.gov.nationalarchives.csv.validator.metadata.{Cell, Row} import uk.gov.nationalarchives.csv.validator.schema._ import scalaz.{Failure, Success} @RunWith(classOf[JUnitRunner]) class AnyRuleSpec extends Specification { "AnyRule with a string literal behaviour" should { val globalDirsOne = List(TotalColumns(1)) val schema: Schema = Schema(globalDirsOne, List(ColumnDefinition(NamedColumnIdentifier("column1")))) "succeed if it matches the only any rule value" in { val anyRule = AnyRule(List(Literal(Some("hello world")))) anyRule.evaluate(0, Row(List(Cell("hello world")), 1), schema) mustEqual Success(true) } "succeed if it matches the one of any rule value" in { val anyRule = AnyRule(List(Literal(Some("hello world")), Literal(Some("value2")), Literal(Some("value3")))) anyRule.evaluate(0, Row(List(Cell("hello world")), 1), schema) mustEqual Success(true) anyRule.evaluate(0, Row(List(Cell("value2")), 2), schema) mustEqual Success(true) anyRule.evaluate(0, Row(List(Cell("value3")), 3), schema) mustEqual Success(true) } "fail if it doesn't matches" in { val anyRule = AnyRule(List(Literal(Some("hello world")))) anyRule.evaluate(0, Row(List(Cell("hello world today")), 1), schema) must beLike { case Failure(messages) => messages.head mustEqual """any("hello world") fails for line: 1, column: column1, value: "hello world today"""" } } "succeed with @ignoreCase" in { val anyRule = AnyRule(List(Literal(Some("hello world")))) anyRule.evaluate(0, Row(List(Cell("hello WORLD")), 1), Schema(globalDirsOne, List(ColumnDefinition(NamedColumnIdentifier("column1"), Nil, List(IgnoreCase()))))) mustEqual Success(true) } } }
valydia/csv-validator
csv-validator-core/src/test/scala/uk/gov/nationalarchives/csv/validator/schema/v1_1/AnyRuleSpec.scala
Scala
mpl-2.0
2,272
package com.twitter.finagle.loadbalancer.aperture import com.twitter.finagle._ import com.twitter.finagle.loadbalancer.EndpointFactory import com.twitter.finagle.util.Rng import com.twitter.util._ import scala.collection.mutable private[loadbalancer] trait ApertureSuite { class Empty extends Exception /** * An aperture load balancer which exposes some of the internals * via proxy methods. */ trait TestBal extends Aperture[Unit, Unit] { protected val rng = Rng(12345L) protected val emptyException = new Empty protected def maxEffort = 5 protected def minAperture = 1 protected def useDeterministicOrdering = false protected[this] val maxEffortExhausted = statsReceiver.counter("max_effort_exhausted") def applyn(n: Int): Unit = { val factories = Await.result(Future.collect(Seq.fill(n)(apply()))) Await.result(Closable.all(factories:_*).close()) } // Expose some protected methods for testing def adjustx(n: Int): Unit = adjust(n) def aperturex: Int = aperture def maxUnitsx: Int = maxUnits def distx: Distributor = dist def rebuildx(): Unit = rebuild() } case class Factory(i: Int) extends EndpointFactory[Unit, Unit] { def remake() = {} def address = Address.Failed(new Exception) var _total = 0 var _outstanding = 0 var _numCloses = 0 /** * Returns the total number of services acquired via this factory. */ def total: Int = _total /** * Returns the current number of outstanding services. Services are * relinquished via calls to close. */ def outstanding: Int = _outstanding /** * The number of times close was called on the factory. */ def numCloses: Int = _numCloses /** * Clears the total number of services acquired and number of closes. */ def clear(): Unit = { _numCloses = 0 _total = 0 } def apply(conn: ClientConnection): Future[Service[Unit, Unit]] = { _total += 1 _outstanding += 1 Future.value(new Service[Unit, Unit] { def apply(unit: Unit): Future[Unit] = ??? override def close(deadline: Time): Future[Unit] = { _outstanding -= 1 Future.Done } }) } @volatile var _status: Status = Status.Open override def status: Status = _status def status_=(v: Status) { _status = v } def close(deadline: Time): Future[Unit] = { _numCloses += 1 Future.Done } override def toString: String = s"Factory(id=$i, requests=$total, status=$status)" } class Counts extends Iterable[Factory] { val factories = new mutable.HashMap[Int, Factory] def iterator = factories.values.iterator def clear(): Unit = { factories.values.foreach(_.clear()) } /** * This allows test writers to validate the number of [[Counts]] that * have received requests. After a statistically significant number * of requests sent through the balancer, the size of this collection * should be bound by the aperture size. */ def nonzero: Set[Int] = factories.filter({ case (_, f) => f.total > 0 }).keys.toSet def apply(i: Int) = factories.getOrElseUpdate(i, Factory(i)) def range(n: Int): IndexedSeq[EndpointFactory[Unit, Unit]] = Vector.tabulate(n) { i => apply(i) } } }
koshelev/finagle
finagle-core/src/test/scala/com/twitter/finagle/loadbalancer/aperture/ApertureSuite.scala
Scala
apache-2.0
3,365
package com.phasmid.laScala.cache import akka.event.LoggingAdapter import com.phasmid.laScala.fp.FP import org.scalatest._ import org.scalatest.concurrent._ import scala.language.implicitConversions import scala.util._ /** * Created by scalaprof on 3/28/16. */ class CacheSpec extends FlatSpec with Matchers with Futures with ScalaFutures { class MockLoggingAdapter extends LoggingAdapter { var debugCount = 0 var infoCount = 0 var warningCount = 0 var errorCount = 0 protected def notifyInfo(message: String): Unit = { println("info: " + message) infoCount = infoCount + 1 } override def isErrorEnabled: Boolean = false override def isInfoEnabled: Boolean = true override def isDebugEnabled: Boolean = true override protected def notifyError(message: String): Unit = { System.err.println("error: " + message) errorCount = errorCount + 1 } override protected def notifyError(cause: Throwable, message: String): Unit = { System.err.println("error: " + message + ": " + cause) errorCount = errorCount + 1 } override def isWarningEnabled: Boolean = true override protected def notifyWarning(message: String): Unit = { System.err.println("warning: " + message) warningCount = warningCount + 1 } override protected def notifyDebug(message: String): Unit = { println("debug: " + message) debugCount = debugCount + 1 } } "BasicFulfillingCache" should "succeed" in { val mockLoggingAdapter = new MockLoggingAdapter mockLoggingAdapter.debugCount shouldBe 0 mockLoggingAdapter.warningCount shouldBe 0 implicit def carper(s: String): Unit = mockLoggingAdapter.warning(s) def evaluate(k: String): Option[Int] = { mockLoggingAdapter.debug(s"evaluating $k") Try(k.toInt).toOption } val cache = BasicFulfillingCache[String, Int](evaluate) mockLoggingAdapter.warningCount shouldBe 0 cache.get("1") should matchPattern { case Some(1) => } mockLoggingAdapter.warningCount shouldBe 0 mockLoggingAdapter.debugCount shouldBe 1 cache("1") should matchPattern { case 1 => } mockLoggingAdapter.warningCount shouldBe 0 mockLoggingAdapter.debugCount shouldBe 1 cache.get("2") should matchPattern { case Some(2) => } mockLoggingAdapter.warningCount shouldBe 0 mockLoggingAdapter.debugCount shouldBe 2 cache.get("A") should matchPattern { case None => } mockLoggingAdapter.warningCount shouldBe 1 cache.get("A") should matchPattern { case None => } mockLoggingAdapter.warningCount shouldBe 2 } "NonExpiringCache" should "succeed" in { val mockLoggingAdapter = new MockLoggingAdapter mockLoggingAdapter.debugCount shouldBe 0 mockLoggingAdapter.warningCount shouldBe 0 implicit def carper(s: String): Unit = mockLoggingAdapter.warning(s) def evaluate(k: String): Try[Int] = { mockLoggingAdapter.debug(s"evaluating $k") Try(k.toInt) } val cache = NonExpiringCache[String, Int](evaluate) mockLoggingAdapter.warningCount shouldBe 0 cache.get("1") should matchPattern { case Success(1) => } mockLoggingAdapter.warningCount shouldBe 0 mockLoggingAdapter.debugCount shouldBe 1 cache("1") should matchPattern { case 1 => } mockLoggingAdapter.warningCount shouldBe 0 mockLoggingAdapter.debugCount shouldBe 1 cache.get("2") should matchPattern { case Success(2) => } mockLoggingAdapter.warningCount shouldBe 0 mockLoggingAdapter.debugCount shouldBe 2 cache.get("A") should matchPattern { case Failure(_) => } mockLoggingAdapter.warningCount shouldBe 1 cache.get("A") should matchPattern { case Failure(_) => } mockLoggingAdapter.warningCount shouldBe 2 } "asMap" should "yield appropriate Map" in { val values = Map("x" -> 1, "y" -> 2) import Cache._ val cache = NonExpiringCache[String, Int] { k => FP.optionToTry(values.get(k)) } cache("x") val map: Map[String, Int] = cache.asMap map shouldBe Map("x" -> 1) cache("y") cache.asMap shouldBe Map("x" -> 1, "y" -> 2) } }
rchillyard/LaScala
src/test/scala/com/phasmid/laScala/cache/CacheSpec.scala
Scala
lgpl-2.1
4,127
package scaredy.api import scaredy.data._ import scaredy.http.HttpClient import scaredy.utils.Opt import scala.concurrent.{ExecutionContext, Future} class Link(val client: HttpClient, val id: String)(implicit val ec: ExecutionContext) { import scaredy.utils.ResponseHelpers._ private implicit val _: HttpClient = client def comments(comment: Opt[String] = None, sort: Opt[String] = None, context: Opt[Int] = None, depth: Opt[Int] = None, limit: Opt[Int] = None, showEdits: Opt[Boolean] = None, showMore: Opt[Boolean] = None): Future[Seq[CommentData]] = { val params = Seq( "comment" -> comment, "context" -> context, "depth" -> depth, "limit" -> limit, "showedits" -> showEdits, "showmore" -> showMore, "sort" -> sort ) rawSeq[Thing](Endpoints.comments(id), params.asString).map(_.only[CommentData]) } def duplicates: Future[Seq[LinkData]] = { rawSeq[Thing](Endpoints.duplicates(id)).map(_.only[LinkData]) } } object Link { import scaredy.utils.ResponseHelpers._ def byId(names: Seq[String])(implicit client: HttpClient, ec: ExecutionContext): Future[Seq[LinkData]] = { rawSeq[Thing](Endpoints.byId(names.mkString(","))).map(_.only[LinkData]) } } class RedditLink(client: HttpClient, val data: LinkData)(ec: ExecutionContext) extends Link(client, data.id)(ec)
jjpatel/scaredy
src/main/scala/scaredy/api/Link.scala
Scala
mit
1,367
package io.coral.actors import io.coral.actors.Messages._ import scala.collection.immutable.SortedMap import akka.actor._ import scaldi.Injector class RuntimeActor(implicit injector: Injector) extends Actor with ActorLogging { def actorRefFactory = context var actors = SortedMap.empty[Long, ActorPath] var count = 0L def receive = { case CreateActor(json) => val props = CoralActorFactory.getProps(json) val actorId = props map { p => count += 1 val id = count val actor = actorRefFactory.actorOf(p, s"$id") actors += (id -> actor.path) id } sender ! actorId case RegisterActorPath(id, path) => actors += (id -> path) case UnregisterActorId(id) => actors -= id case GetCount() => count += 1 sender ! count case ListActors() => sender ! actors.keys.toList case Delete(id: Long) => actors.get(id).map { a => actorRefFactory.actorSelection(a) ! PoisonPill } actors -= id case DeleteAllActors() => // do not reset the counter since poisoning is asynchrounous! actors.foreach { path => actorRefFactory.actorSelection(path._2) ! PoisonPill } actors = SortedMap.empty[Long, ActorPath] log.info(context.children.size.toString) case GetActorPath(id) => val path = actors.get(id) sender ! path } }
daishichao/coral
runtime-api/src/main/scala/io/coral/actors/RuntimeActor.scala
Scala
apache-2.0
1,377
package provingground import HoTT._ import scalahott._ import org.scalatest._, flatspec._ import induction.implicits._ import provingground.library.{ DoubleEvenSym, LocalConstImpliesConstSym, SuccNOrNEvenSym } // import Fold._ import shapeless._ import NatRing._ import spire.math.SafeLong class InductionSpecSym extends flatspec.AnyFlatSpec { val n = NatTyp.sym val m = NatTyp.sym val recNN: Func[ScalaTerm[SafeLong], Func[Func[NatRing.Nat, Func[ScalaTerm[ SafeLong ], ScalaTerm[SafeLong]]], Func[NatRing.Nat, ScalaTerm[SafeLong]]]] = NatRing.rec(NatTyp) val recNNN: Func[Func[ScalaTerm[SafeLong], ScalaTerm[SafeLong]], Func[Func[ NatRing.Nat, Func[Func[ScalaTerm[SafeLong], ScalaTerm[SafeLong]], Func[ScalaTerm[ SafeLong ], ScalaTerm[SafeLong]]] ], Func[NatRing.Nat, Func[ScalaTerm[SafeLong], ScalaTerm[SafeLong]]]]] = NatRing.rec(NatTyp ->: NatTyp) "Ackermann function recursively defined symbolically" should "give the correct values" in { val ackm : Func[ScalaTerm[SafeLong], ScalaTerm[SafeLong]] = "ack(m)" :: NatTyp ->: NatTyp val ackmp1n: ScalaTerm[SafeLong] = "ack(m+1)(n)" :: NatTyp val ack: Func[NatRing.Nat, Func[ScalaTerm[SafeLong], ScalaTerm[SafeLong]]] = recNNN(succ)( m :-> (ackm :-> recNN(ackm(Literal(1)))( n :-> (ackmp1n :-> (ackm(ackmp1n))) )) ) assert(ack(Literal(2))(Literal(2)) == Literal(7)) assert(ack(Literal(3))(Literal(1)) == Literal(13)) } "Example Theorems Symbolically proved" should "have proofs with types the theorems" in { assert(DoubleEvenSym.pf.typ == DoubleEvenSym.thm) assert(SuccNOrNEvenSym.pf.typ == SuccNOrNEvenSym.thm) assert(LocalConstImpliesConstSym.pf.typ == LocalConstImpliesConstSym.thm) } }
siddhartha-gadgil/ProvingGround
mantle/src/test/scala/provingground/InductionSpecSym.scala
Scala
mit
1,785
package com.betfair.domain import play.api.libs.json.{Writes, Reads} object PriceData extends Enumeration { type PriceData = Value val SP_AVAILABLE, SP_TRADED, EX_BEST_OFFERS, EX_ALL_OFFERS, EX_TRADED = Value implicit val enumReads: Reads[PriceData] = EnumUtils.enumReads(PriceData) implicit def enumWrites: Writes[PriceData] = EnumUtils.enumWrites }
city81/betfair-service-ng
src/main/scala/com/betfair/domain/PriceData.scala
Scala
bsd-2-clause
362
/* * Copyright (c) 2017 sadikovi * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.github.sadikovi.riff.io import java.nio.ByteBuffer import com.github.sadikovi.testutil.UnitTestSuite class ByteBufferStreamSuite extends UnitTestSuite { test("stream on empty byte buffer") { val in = new ByteBufferStream(ByteBuffer.allocate(0)) in.available should be (0) in.read(new Array[Byte](4)) should be (0) // read of single byte when no bytes are left in.read() should be (-1) } test("stream on byte buffer") { val buf = ByteBuffer.wrap(Array[Byte](1, 2, 3, 4, 5, 6, 7, 8)) val in = new ByteBufferStream(buf) in.available should be (8) in.read(new Array[Byte](5)) should be (5) in.available should be (3) in.read() should be (6) in.available should be (2) // read more than remaining in.read(new Array[Byte](3)) should be (2) in.available should be (0) } test("stream - skip bytes more than buffer") { val buf = ByteBuffer.wrap(Array[Byte](1, 2, 3, 4, 5, 6, 7, 8)) val in = new ByteBufferStream(buf) in.skip(100) should be (8) in.available should be (0) } test("stream - skip bytes less than buffer") { val buf = ByteBuffer.wrap(Array[Byte](1, 2, 3, 4, 5, 6, 7, 8)) val in = new ByteBufferStream(buf) in.skip(6) should be (6) in.available should be (2) in.skip(6) should be (2) in.available should be (0) } }
sadikovi/riff
format/src/test/scala/com/github/sadikovi/riff/io/ByteBufferStreamSuite.scala
Scala
mit
2,465
import sbt._ object Dependencies { val _ScalaVersion_ = "2.11.7" val _SbtVersion_ = "0.13.8" //Versions private val _AkkaVersion_ = "2.4-M2" private val _AkkaStreamsVersion_ = "1.0" private val _JUnitVersion_ = "4.12" private val _PlayPluginsMailerVersion_ = "2.3.1" private val _ScalaJsDomVersion_ = "0.8.0" val _ScalaJsJQueryVersion_ = "0.8.0" val _ScalaRxVersion_ = "0.2.7" val _ScalaTagsVersion_ = "0.4.5" private val _ScalaTestVersion_ = "2.2.4" private val _ScalazVersion_ = "7.1.1" private val _SecureSocialVersion_ = "3.0-M3" private val _ShapelessVersion_ = "2.1.0" private val _Slf4jVersion_ = "1.7.10" val _UTestVersion_ = "0.3.0" val _Test_ = "test" //Libraries private[this] val _ComTypesafeAkka_ = "com.typesafe.akka" val _AkkaActor_ = _ComTypesafeAkka_ %% "akka-actor" % _AkkaVersion_ val _AkkaSlf4j_ = _ComTypesafeAkka_ %% "akka-slf4j" % _AkkaVersion_ val _AkkaTestKit_ = _ComTypesafeAkka_ %% "akka-testkit" % _AkkaVersion_ % _Test_ val _AkkaStreams_ = _ComTypesafeAkka_ %% "akka-stream-experimental" % _AkkaStreamsVersion_ val _ComLihaoyi_ = "com.lihaoyi" val _JUnit_ = "junit" % "junit" % _JUnitVersion_ val _PlayPluginsMailer_ = "com.typesafe.play.plugins" %% "play-plugins-mailer" % _PlayPluginsMailerVersion_ val _Scalatest_ = "org.scalatest" %% "scalatest" % _ScalaTestVersion_ % _Test_ val _SecureSocial_ = "ws.securesocial" %% "securesocial" % _SecureSocialVersion_ private[this] val _OrgScalaz_ = "org.scalaz" val _ScalazDeps_ = Seq( _OrgScalaz_ %% "scalaz-core" % _ScalazVersion_, _OrgScalaz_ %% "scalaz-effect" % _ScalazVersion_, _OrgScalaz_ %% "scalaz-typelevel" % _ScalazVersion_, _OrgScalaz_ %% "scalaz-scalacheck-binding" % _ScalazVersion_ % _Test_ ) val _Shapeless_ = "com.chuusai" %% "shapeless" % _ShapelessVersion_ val _Slf4japi_ = "org.slf4j" % "slf4j-api" % _Slf4jVersion_ val _Slf4jsimple_ = "org.slf4j" % "slf4j-simple" % _Slf4jVersion_ }
georgenicoll/monkey-barrel
old20150804/project/Dependencies.scala
Scala
gpl-2.0
2,030
package io.swagger.client.model import io.swagger.client.core.ApiModel import org.joda.time.DateTime case class VariableCategory ( /* id */ id: Option[Int], /* Name of the category */ name: String, /* Value for replacing null measurements */ fillingValue: Option[Float], /* Maximum recorded value of this category */ maximumAllowedValue: Option[Float], /* Minimum recorded value of this category */ minimumAllowedValue: Option[Float], /* How long the effect of a measurement in this variable lasts */ durationOfAction: Option[Int], /* How long it takes for a measurement in this variable to take effect */ onsetDelay: Option[Int], /* How to combine values of this variable (for instance, to see a summary of the values over a month) 0 for sum OR 1 for mean */ combinationOperation: Option[String], /* updated */ updated: Option[Int], /* A value of 1 indicates that this category is generally a cause in a causal relationship. An example of a causeOnly category would be a category such as Work which would generally not be influenced by the behaviour of the user */ causeOnly: Option[Boolean], /* Is category public */ public: Option[Int], /* outcome */ outcome: Option[Boolean], /* created_at */ createdAt: Option[DateTime], /* updated_at */ updatedAt: Option[DateTime], /* Image URL */ imageUrl: Option[String], /* ID of the default unit for the category */ defaultUnitId: Option[Int]) extends ApiModel
QuantiModo/QuantiModo-SDK-Akka-Scala
src/main/scala/io/swagger/client/model/VariableCategory.scala
Scala
gpl-2.0
1,478
/* sbt -- Simple Build Tool * Copyright 2010 Mark Harrah */ package sbt import complete.HistoryCommands import scala.annotation.tailrec import java.io.File import Path._ object BasicCommandStrings { val HelpCommand = "help" val Exit = "exit" val Quit = "quit" /** The command name to terminate the program.*/ val TerminateAction: String = Exit def helpBrief = (HelpCommand, "Displays this help message or prints detailed help on requested commands (run 'help <command>').") def helpDetailed = HelpCommand + """ Prints a help summary. """ + HelpCommand + """ <command> Prints detailed help for command <command>. """ + HelpCommand + """ <regular expression> Searches the help according to the provided regular expression. """ def HistoryHelpBrief = (HistoryCommands.Start -> "History command help. Lists and describes all history commands.") def historyHelp = Help(Nil, (HistoryHelpBrief +: HistoryCommands.descriptions).toMap, Set(HistoryCommands.Start)) def exitBrief = "Terminates the build." def ReadCommand = "<" def ReadFiles = " file1 file2 ..." def ReadDetailed = ReadCommand + ReadFiles + """ Reads the lines from the given files and inserts them as commands. All empty lines and lines that start with '#' are ignored. If a file does not exist or is not readable, this command fails. All the lines from all the files are read before any of the commands are executed. Thus, if any file is not readable, none of commands from any of the files (even the existing ones) will be run. You probably need to escape this command if entering it at your shell.""" def ApplyCommand = "apply" def ApplyDetailed = ApplyCommand + """ <module-name>* Transforms the current State by calling <module-name>.apply(currentState) for each listed module name. Here, currentState is of type sbt.State. """ def RebootCommand = "reboot" def RebootDetailed = RebootCommand + """ [full] This command is equivalent to exiting sbt, restarting, and running the remaining commands with the exception that the JVM is not shut down. If 'full' is specified, the boot directory (`~/.sbt/boot` by default) is deleted before restarting. This forces an update of sbt and Scala and is useful when working with development versions of sbt or Scala.""" def Multi = ";" def MultiBrief = (Multi + " <command> (" + Multi + " <command>)*", "Runs the provided semicolon-separated commands.") def MultiDetailed = Multi + " command1 " + Multi + """ command2 ... Runs the specified commands.""" def AppendCommand = "append" def AppendLastDetailed = AppendCommand + """ <command> Appends 'command' to list of commands to run. """ val AliasCommand = "alias" def AliasDetailed = AliasCommand + """ Prints a list of defined aliases. """ + AliasCommand + """ name Prints the alias defined for `name`. """ + AliasCommand + """ name=value Sets the alias `name` to `value`, replacing any existing alias with that name. Whenever `name` is entered, the corresponding `value` is run. If any argument is provided to `name`, it is appended as argument to `value`. """ + AliasCommand + """ name= Removes the alias for `name`.""" def Shell = "shell" def ShellDetailed = "Provides an interactive prompt from which commands can be run." def ClearOnFailure = "--" def OnFailure = "-" def OnFailureDetailed = OnFailure + """ command Registers 'command' to run when a command fails to complete normally. Only one failure command may be registered at a time, so this command replaces the previous command if there is one. The failure command resets when it runs once, so it must be added again if desired.""" def IfLast = "iflast" def IfLastCommon = "If there are no more commands after this one, 'command' is run." def IfLastDetailed = IfLast + """ <command> """ + IfLastCommon val ContinuousExecutePrefix = "~" def continuousDetail = "Executes the specified command whenever source files change." def continuousBriefHelp = (ContinuousExecutePrefix + " <command>", continuousDetail) }
jamesward/xsbt
main/command/BasicCommandStrings.scala
Scala
bsd-3-clause
4,048
package proxy.protectedproxy object TestProtectionProxy extends App { val owner: Owner = new Owner() var reportGenerator: ReportGeneratorProxy = new ReportGeneratorProtectionProxy(owner) owner.setReportGenerator(reportGenerator) val employee: Employee = new Employee() reportGenerator = new ReportGeneratorProtectionProxy(employee) employee.setReportGenerator(reportGenerator) println("For owner:") println(owner.generateDailyReport()) println("For employee:") println(employee.generateDailyReport()) }
BBK-PiJ-2015-67/sdp-portfolio
exercises/week11/src/main/scala/proxy/protectedproxy/TestProtectionProxy.scala
Scala
unlicense
528
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.broadcast import java.io._ import java.nio.ByteBuffer import scala.collection.JavaConverters._ import scala.reflect.ClassTag import scala.util.Random import org.apache.spark.{Logging, SparkConf, SparkEnv, SparkException} import org.apache.spark.io.CompressionCodec import org.apache.spark.serializer.Serializer import org.apache.spark.storage.{BroadcastBlockId, StorageLevel} import org.apache.spark.util.{ByteBufferInputStream, Utils} import org.apache.spark.util.io.ByteArrayChunkOutputStream /** * A BitTorrent-like implementation of [[org.apache.spark.broadcast.Broadcast]]. * * The mechanism is as follows: * * The driver divides the serialized object into small chunks and * stores those chunks in the BlockManager of the driver. * * On each executor, the executor first attempts to fetch the object from its BlockManager. If * it does not exist, it then uses remote fetches to fetch the small chunks from the driver and/or * other executors if available. Once it gets the chunks, it puts the chunks in its own * BlockManager, ready for other executors to fetch from. * * This prevents the driver from being the bottleneck in sending out multiple copies of the * broadcast data (one per executor) as done by the [[org.apache.spark.broadcast.HttpBroadcast]]. * * When initialized, TorrentBroadcast objects read SparkEnv.get.conf. * * @param obj object to broadcast * @param id A unique identifier for the broadcast variable. */ private[spark] class TorrentBroadcast[T: ClassTag](obj: T, id: Long) extends Broadcast[T](id) with Logging with Serializable { /** * Value of the broadcast object on executors. This is reconstructed by [[readBroadcastBlock]], * which builds this value by reading blocks from the driver and/or other executors. * * On the driver, if the value is required, it is read lazily from the block manager. */ @transient private lazy val _value: T = readBroadcastBlock() /** The compression codec to use, or None if compression is disabled */ @transient private var compressionCodec: Option[CompressionCodec] = _ /** Size of each block. Default value is 4MB. This value is only read by the broadcaster. */ @transient private var blockSize: Int = _ private def setConf(conf: SparkConf) { compressionCodec = if (conf.getBoolean("spark.broadcast.compress", true)) { Some(CompressionCodec.createCodec(conf)) } else { None } // Note: use getSizeAsKb (not bytes) to maintain compatiblity if no units are provided blockSize = conf.getSizeAsKb("spark.broadcast.blockSize", "4m").toInt * 1024 } setConf(SparkEnv.get.conf) private val broadcastId = BroadcastBlockId(id) /** Total number of blocks this broadcast variable contains. */ private val numBlocks: Int = writeBlocks(obj) override protected def getValue() = { _value } /** * Divide the object into multiple blocks and put those blocks in the block manager. * @param value the object to divide * @return number of blocks this broadcast variable is divided into */ private def writeBlocks(value: T): Int = { // Store a copy of the broadcast variable in the driver so that tasks run on the driver // do not create a duplicate copy of the broadcast variable's value. SparkEnv.get.blockManager.putSingle(broadcastId, value, StorageLevel.MEMORY_AND_DISK, tellMaster = false) val blocks = TorrentBroadcast.blockifyObject(value, blockSize, SparkEnv.get.serializer, compressionCodec) blocks.zipWithIndex.foreach { case (block, i) => SparkEnv.get.blockManager.putBytes( BroadcastBlockId(id, "piece" + i), block, StorageLevel.MEMORY_AND_DISK_SER, tellMaster = true) } blocks.length } /** Fetch torrent blocks from the driver and/or other executors. */ private def readBlocks(): Array[ByteBuffer] = { // Fetch chunks of data. Note that all these chunks are stored in the BlockManager and reported // to the driver, so other executors can pull these chunks from this executor as well. val blocks = new Array[ByteBuffer](numBlocks) val bm = SparkEnv.get.blockManager for (pid <- Random.shuffle(Seq.range(0, numBlocks))) { val pieceId = BroadcastBlockId(id, "piece" + pid) logDebug(s"Reading piece $pieceId of $broadcastId") // First try getLocalBytes because there is a chance that previous attempts to fetch the // broadcast blocks have already fetched some of the blocks. In that case, some blocks // would be available locally (on this executor). def getLocal: Option[ByteBuffer] = bm.getLocalBytes(pieceId) def getRemote: Option[ByteBuffer] = bm.getRemoteBytes(pieceId).map { block => // If we found the block from remote executors/driver's BlockManager, put the block // in this executor's BlockManager. SparkEnv.get.blockManager.putBytes( pieceId, block, StorageLevel.MEMORY_AND_DISK_SER, tellMaster = true) block } val block: ByteBuffer = getLocal.orElse(getRemote).getOrElse( throw new SparkException(s"Failed to get $pieceId of $broadcastId")) blocks(pid) = block } blocks } /** * Remove all persisted state associated with this Torrent broadcast on the executors. */ override protected def doUnpersist(blocking: Boolean) { TorrentBroadcast.unpersist(id, removeFromDriver = false, blocking) } /** * Remove all persisted state associated with this Torrent broadcast on the executors * and driver. */ override protected def doDestroy(blocking: Boolean) { TorrentBroadcast.unpersist(id, removeFromDriver = true, blocking) } /** Used by the JVM when serializing this object. */ private def writeObject(out: ObjectOutputStream): Unit = Utils.tryOrIOException { assertValid() out.defaultWriteObject() } private def readBroadcastBlock(): T = Utils.tryOrIOException { TorrentBroadcast.synchronized { setConf(SparkEnv.get.conf) SparkEnv.get.blockManager.getLocal(broadcastId).map(_.data.next()) match { case Some(x) => x.asInstanceOf[T] case None => logInfo("Started reading broadcast variable " + id) val startTimeMs = System.currentTimeMillis() val blocks = readBlocks() logInfo("Reading broadcast variable " + id + " took" + Utils.getUsedTimeMs(startTimeMs)) val obj = TorrentBroadcast.unBlockifyObject[T]( blocks, SparkEnv.get.serializer, compressionCodec) // Store the merged copy in BlockManager so other tasks on this executor don't // need to re-fetch it. SparkEnv.get.blockManager.putSingle( broadcastId, obj, StorageLevel.MEMORY_AND_DISK, tellMaster = false) obj } } } } private object TorrentBroadcast extends Logging { def blockifyObject[T: ClassTag]( obj: T, blockSize: Int, serializer: Serializer, compressionCodec: Option[CompressionCodec]): Array[ByteBuffer] = { val bos = new ByteArrayChunkOutputStream(blockSize) val out: OutputStream = compressionCodec.map(c => c.compressedOutputStream(bos)).getOrElse(bos) val ser = serializer.newInstance() val serOut = ser.serializeStream(out) serOut.writeObject[T](obj).close() bos.toArrays.map(ByteBuffer.wrap) } def unBlockifyObject[T: ClassTag]( blocks: Array[ByteBuffer], serializer: Serializer, compressionCodec: Option[CompressionCodec]): T = { require(blocks.nonEmpty, "Cannot unblockify an empty array of blocks") val is = new SequenceInputStream( blocks.iterator.map(new ByteBufferInputStream(_)).asJavaEnumeration) val in: InputStream = compressionCodec.map(c => c.compressedInputStream(is)).getOrElse(is) val ser = serializer.newInstance() val serIn = ser.deserializeStream(in) val obj = serIn.readObject[T]() serIn.close() obj } /** * Remove all persisted blocks associated with this torrent broadcast on the executors. * If removeFromDriver is true, also remove these persisted blocks on the driver. */ def unpersist(id: Long, removeFromDriver: Boolean, blocking: Boolean): Unit = { logDebug(s"Unpersisting TorrentBroadcast $id") SparkEnv.get.blockManager.master.removeBroadcast(id, removeFromDriver, blocking) } }
chenc10/Spark-PAF
core/src/main/scala/org/apache/spark/broadcast/TorrentBroadcast.scala
Scala
apache-2.0
9,220
package com.localytics.sbt.s3 import java.io.File import java.net.URL import java.util.zip.ZipFile import sbt.Keys.TaskStreams import scala.sys.process._ import scala.util.Try object DownloadS3Proxy { private[s3] def validJar(file: File): Boolean = Try(new ZipFile(file)).isSuccess def apply(ver: String, url: String, dir: File, file: String, streamz: TaskStreams): File = { val outputFile = new File(dir, file) if (!dir.exists()) { streamz.log.info(s"Creating S3Proxy directory $dir") dir.mkdirs() } if (!outputFile.exists()) { streamz.log.info(s"Downloading S3Proxy from [$url] to [${outputFile.getAbsolutePath}]") (new URL(url) #> outputFile).!! } if (!validJar(outputFile)) sys.error(s"Invalid jar file at [${outputFile.getAbsolutePath}]") outputFile } }
localytics/sbt-s3
src/main/scala/com/localytics/sbt/s3/DownloadS3Proxy.scala
Scala
mit
823
import api.{WebsiteApiConfig, WebsiteApi} import models.WebsiteDb import play.Play import play.api._ object Global extends GlobalSettings { override def onStart(app: Application): Unit = { val config = new play.Configuration(app.configuration) val websiteApiConfig : WebsiteApiConfig = WebsiteApiConfig(config) if (app.configuration.getBoolean("nginx.autostartstop").getOrElse( false)) { if (app.mode == Mode.Test) { Logger.info("starting in test mode") } else { val websiteDb = new WebsiteDb(websiteApiConfig.configDb, websiteApiConfig.adminPort, websiteApiConfig.wwwPath) WebsiteApi.start(websiteApiConfig, websiteDb) } Logger.info(s"nginx started on port ${websiteApiConfig.adminPort}") } else { Logger.info("not starting nginx as nginx.autostartstop is deactivated") } } override def onStop(app: Application): Unit = { if (app.configuration.getBoolean("nginx.autostartstop").getOrElse( false)) { if (app.mode == Mode.Test) { Logger.info("stopping in test mode") } else { WebsiteApi.stop } } else { Logger.info("not stopping nginx as nginx.autostartstop is deactivated") } } }
jlcanela/fastcms
app/Global.scala
Scala
apache-2.0
1,228
package uk.ac.ncl.openlab.intake24.sql.tools import java.io.BufferedReader import java.io.InputStreamReader trait WarningMessage { def displayWarningMessage(message: String) = { val len = message.length() val bar = Seq.fill(len)('=').mkString println(bar) println(message) println(bar) println() println("Are you sure you wish to continue? Type 'yes' to proceed, 'no' or Control+C to abort.") var proceed = false; val reader = new BufferedReader(new InputStreamReader(System.in)) while (!proceed) { val input = reader.readLine() if (input == "yes") proceed = true; if (input == "no") System.exit(0); } reader.close() } } object WarningMessage extends WarningMessage
digitalinteraction/intake24
DatabaseTools/src/main/scala/uk/ac/ncl/openlab/intake24/sql/tools/WarningMessage.scala
Scala
apache-2.0
765
/* * Copyright 2014 - 2015 SlamData Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package slamdata.engine.javascript import slamdata.Predef._ import scalaz._ import Scalaz._ import slamdata.engine.{RenderTree, Terminal} import slamdata.engine.fp._ import slamdata.engine.analysis.fixplate._ /** ADT for a simplified, composable, core language for JavaScript. Provides only expressions, including lets. */ sealed trait JsCore[+A] object JsCore { sealed trait Operator { val js: String } abstract sealed class BinaryOperator(val js: String) extends Operator final case object Add extends BinaryOperator("+") final case object BitAnd extends BinaryOperator("&") final case object BitLShift extends BinaryOperator("<<") final case object BitNot extends BinaryOperator("~") final case object BitOr extends BinaryOperator("|") final case object BitRShift extends BinaryOperator(">>") final case object BitXor extends BinaryOperator("^") final case object Lt extends BinaryOperator("<") final case object Lte extends BinaryOperator("<=") final case object Gt extends BinaryOperator(">") final case object Gte extends BinaryOperator(">=") final case object Eq extends BinaryOperator("===") final case object Neq extends BinaryOperator("!==") final case object Div extends BinaryOperator("/") final case object In extends BinaryOperator("in") final case object And extends BinaryOperator("&&") final case object Or extends BinaryOperator("||") final case object Mod extends BinaryOperator("%") final case object Mult extends BinaryOperator("*") final case object Sub extends BinaryOperator("-") final case object Instance extends BinaryOperator("instanceof") abstract sealed class UnaryOperator(val js: String) extends Operator final case object Neg extends UnaryOperator("-") final case object Not extends UnaryOperator("!") final case class Literal(value: Js.Lit) extends JsCore[Nothing] final case class Ident(name: String) extends JsCore[Nothing] final case class Access[A](expr: A, key: A) extends JsCore[A] final case class Call[A](callee: A, args: List[A]) extends JsCore[A] final case class New[A](name: String, args: List[A]) extends JsCore[A] final case class If[A](condition: A, consequent: A, alternative: A) extends JsCore[A] final case class UnOp[A](op: UnaryOperator, arg: A) extends JsCore[A] final case class BinOp[A](op: BinaryOperator, left: A, right: A) extends JsCore[A] object BinOp { def apply[A](op: BinaryOperator, a1: Term[JsCore], a2: Term[JsCore], a3: Term[JsCore], args: Term[JsCore]*): Term[JsCore] = args.toList match { case Nil => BinOp(op, a1, BinOp(op, a2, a3).fix).fix case h :: t => BinOp(op, a1, BinOp(op, a2, a3, h, t: _*)).fix } } // TODO: Cond // TODO: Fn? final case class Arr[A](values: List[A]) extends JsCore[A] final case class Fun[A](params: List[String], body: A) extends JsCore[A] // NB: at runtime, JS may not preserve the order of fields, but using // ListMap here lets us be explicit about what result we'd like to see. final case class Obj[A](values: ListMap[String, A]) extends JsCore[A] final case class Let[A](name: Ident, expr: A, body: A) extends JsCore[A] final case class SpliceObjects[A](srcs: List[A]) extends JsCore[A] final case class SpliceArrays[A](srcs: List[A]) extends JsCore[A] def Select(expr: Term[JsCore], name: String): Access[Term[JsCore]] = Access(expr, Literal(Js.Str(name)).fix) private[javascript] def toUnsafeJs(expr: Term[JsCore]): Js.Expr = expr.simplify.unFix match { case Literal(value) => value case Ident(name) => Js.Ident(name) case Access(expr, key) => smartDeref(toUnsafeJs(expr), toUnsafeJs(key)) case Call(callee, args) => Js.Call(toUnsafeJs(callee), args.map(toUnsafeJs)) case New(name, args) => Js.New(Js.Call(Js.Ident(name), args.map(toUnsafeJs(_)))) case If(cond, cons, alt) => Js.Ternary(toUnsafeJs(cond), toUnsafeJs(cons), toUnsafeJs(alt)) case UnOp(op, arg) => Js.UnOp(op.js, toUnsafeJs(arg)) case BinOp(op, left, right) => Js.BinOp(op.js, toUnsafeJs(left), toUnsafeJs(right)) case Arr(values) => Js.AnonElem(values.map(toUnsafeJs(_))) case Fun(params, body) => Js.AnonFunDecl(params, List(Js.Return(toUnsafeJs(body)))) case Obj(values) => Js.AnonObjDecl(values.toList.map { case (k, v) => k -> toUnsafeJs(v) }) case Let(name, expr, body) => Js.Let(ListMap(name.name -> toUnsafeJs(expr)), Nil, toUnsafeJs(body)) case SpliceObjects(_) => expr.toJs case SpliceArrays(_) => expr.toJs } val findFunctionsƒ: JsCore[(Term[JsCore], Set[String])] => Set[String] = { case Call((Term(Ident(name)), _), args) => Foldable[List].fold(args.map(_._2)) + name case js => js.map(_._2).fold } def copyAllFields(src: Term[JsCore], dst: Term[JsCore]): Js.Stmt = { val tmp = Js.Ident("__attr") // TODO: use properly-generated temp name (see #581) Js.ForIn(tmp, src.toJs, Js.If( Js.Call(Js.Select(src.toJs, "hasOwnProperty"), List(tmp)), Js.BinOp("=", Js.Access(dst.toJs, tmp), Js.Access(src.toJs, tmp)), None)) } private def whenDefined(expr: Term[JsCore], body: Js.Expr => Js.Expr, default: => Js.Expr): Js.Expr = { expr.simplify.unFix match { case Literal(Js.Null) => default case Literal(_) => body(expr.toJs) case Arr(_) => body(expr.toJs) case Fun(_, _) => body(expr.toJs) case Obj(_) => body(expr.toJs) case Access(x, y) => val bod = body(toUnsafeJs(expr)) val test = Js.BinOp("&&", Js.BinOp("!=", toUnsafeJs(x), Js.Null), Js.BinOp("!=", toUnsafeJs(expr), Js.Null)) Js.Ternary(test, bod, default) case _ => // NB: expr is duplicated here, which generates redundant code if expr is // a function call, for example. See #581. val bod = body(toUnsafeJs(expr)) val test = Js.BinOp("!=", expr.toJs, Js.Null) bod match { case Js.Ternary(cond, cons, default0) if default0 == default => Js.Ternary(Js.BinOp("&&", test, cond), cons, default) case _ => Js.Ternary(test, bod, default) } } } private def smartDeref(expr: Js.Expr, key: Js.Expr): Js.Expr = key match { case Js.Str(name @ Js.SimpleNamePattern()) => Js.Select(expr, name) case _ => Js.Access(expr, key) } // TODO: Remove this once we have actually functionalized everything def safeAssign(lhs: Term[JsCore], rhs: => Term[JsCore]): Js.Expr = lhs.simplify.unFix match { case Access(obj, key) => whenDefined(obj, obj => Js.BinOp("=", smartDeref(obj, key.toJs), rhs.toJs), Js.Undefined) case _ => Js.BinOp("=", lhs.toJs, rhs.toJs) } // Check the RHS, but assume the LHS is known to be defined: def unsafeAssign(lhs: Term[JsCore], rhs: => Term[JsCore]): Js.Expr = Js.BinOp("=", toUnsafeJs(lhs), rhs.toJs) implicit val JsCoreTraverse: Traverse[JsCore] = new Traverse[JsCore] { def traverseImpl[G[_], A, B](fa: JsCore[A])(f: A => G[B])(implicit G: Applicative[G]): G[JsCore[B]] = { fa match { case x @ Literal(_) => G.point(x) case x @ Ident(_) => G.point(x) case Access(expr, key) => G.apply2(f(expr), f(key))(Access(_, _)) case Call(expr, args) => G.apply2(f(expr), args.map(f).sequence)(Call(_, _)) case New(name, args) => G.map(args.map(f).sequence)(New(name, _)) case If(cond, cons, alt) => G.apply3(f(cond), f(cons), f(alt))(If(_, _, _)) case UnOp(op, arg) => G.map(f(arg))(UnOp(op, _)) case BinOp(op, left, right) => G.apply2(f(left), f(right))(BinOp(op, _, _)) case Arr(values) => G.map(values.map(f).sequence)(Arr(_)) case Fun(params, body) => G.map(f(body))(Fun(params, _)) case Obj(values) => G.map((values ∘ f).sequence)(Obj(_)) case Let(name, expr, body) => G.apply2(f(expr), f(body))(Let(name, _, _)) case SpliceObjects(srcs) => G.map(srcs.map(f).sequence)(SpliceObjects(_)) case SpliceArrays(srcs) => G.map(srcs.map(f).sequence)(SpliceArrays(_)) } } } implicit class UnFixedJsCoreOps(expr: JsCore[Term[JsCore]]) { def fix = Term[JsCore](expr) } implicit class JsCoreOps(expr: Term[JsCore]) { def toJs: Js.Expr = expr.simplify.unFix match { case Literal(value) => value case Ident(name) => Js.Ident(name) case Access(expr, key) => whenDefined( expr, smartDeref(_, key.toJs), Js.Undefined) case Call(Term(Access(Term(New(name, args1)), Term(Literal(Js.Str(mName))))), args2) => // NB: if we are explicitly constructing a value, we presumably know its fields, // so no need to check them, but the args may still come from an unreliable source. Js.Call(Js.Select(Js.New(Js.Call(Js.Ident(name), args1.map(_.toJs))), mName), args2.map(_.toJs)) case Call(Term(Access(arr @ Term(Arr(_)), Term(Literal(Js.Str(mName))))), args) => // NB: if we are explicitly constructing a value, we presumably know its fields, // so no need to check them. Js.Call(Js.Select(arr.toJs, mName), args.map(_.toJs)) case Call(expr @ Term(Access(_, _)), args) => // NB: check any other access and the callee together. whenDefined(expr, Js.Call(_, args.map(_.toJs)), Js.Undefined) case Call(callee, args) => Js.Call(callee.toJs, args.map(_.toJs)) case New(name, args) => Js.New(Js.Call(Js.Ident(name), args.map(_.toJs))) case If(cond, cons, alt) => Js.Ternary(cond.toJs, cons.toJs, alt.toJs) case UnOp(op, arg) => whenDefined(arg, Js.UnOp(op.js, _), Js.Null) case BinOp(op, left, right) => whenDefined( left, l => whenDefined(right, r => Js.BinOp(op.js, l, r), Js.Null), Js.Null) case Arr(values) => Js.AnonElem(values.map(_.toJs)) case Fun(params, body) => Js.AnonFunDecl(params, List(Js.Return(body.toJs))) case Obj(values) => Js.AnonObjDecl(values.toList.map { case (k, v) => k -> v.toJs }) case Let(name, expr, body) => Js.Let(ListMap(name.name -> expr.toJs), Nil, body.toJs) case s @ SpliceObjects(srcs) => val tmp = Ident("__rez") // TODO: use properly-generated temp name (see #581) Js.Let( Map(tmp.name -> Js.AnonObjDecl(Nil)), srcs.flatMap { case Term(Obj(values)) => values.map { case (k, v) => Js.BinOp("=", smartDeref(tmp.fix.toJs, Js.Str(k)), v.toJs) } case src => copyAllFields(src, tmp.fix) :: Nil }, tmp.fix.toJs) case s @ SpliceArrays(srcs) => val tmp = Ident("__rez") // TODO: use properly-generated temp name (see #581) val elem = Ident("__elem") // TODO: use properly-generated temp name (see #581) Js.Let( Map(tmp.name -> Js.AnonElem(Nil)), srcs.flatMap { case Term(Arr(values)) => values.map(v => Js.Call(Js.Select(tmp.fix.toJs, "push"), List(v.toJs))) case src => List( Js.ForIn(Js.Ident(elem.name), src.toJs, Js.If( Js.Call(Js.Select(src.toJs, "hasOwnProperty"), List(elem.fix.toJs)), Js.Call(Js.Select(tmp.fix.toJs, "push"), List(Js.Access(src.toJs, elem.fix.toJs))), None))) }, tmp.fix.toJs) } def simplify: Term[JsCore] = { expr.rewrite(_.unFix match { case Access(Term(Obj(values)), Term(Literal(Js.Str(name)))) => values.get(name) case If(cond0, Term(If(cond1, cons, alt1)), alt0) if alt0 == alt1 => Some(If(BinOp(And, cond0, cond1).fix, cons, alt0).fix) // NB: inline simple names and selects (e.g. `x`, `x.y`, and `x.y.z`) case Let(name, expr @ Term(Ident(_)), body) => Some(body.substitute(name.fix, expr)) case Let(name, expr @ Term(Access(Term(Ident(_)), Term(Literal(Js.Str(_))))), body) => Some(body.substitute(name.fix, expr)) case Let(name, expr @ Term(Access(Term(Access(Term(Ident(_)), Term(Literal(Js.Str(_))))), Term(Literal(Js.Str(_))))), body) => Some(body.substitute(name.fix, expr)) // NB: inline object constructors where the body only extracts one field case Let(bound, Term(Obj(values)), Term(Access(Term(name), Term(Literal(Js.Str(key)))))) if bound == name => values.get(key) case x => None }) } def substitute(oldExpr: Term[JsCore], newExpr: Term[JsCore]): Term[JsCore] = { def loop(x: Term[JsCore], inScope: Set[Term[JsCore]]): Term[JsCore] = if (x == oldExpr && !(inScope contains x)) newExpr else x.unFix match { case Let(name, expr, body) => Let(name, loop(expr, inScope), loop(body, inScope + name.fix)).fix case Fun(params, body) => Fun(params, loop(body, inScope ++ params.map(Ident(_).fix).toSet)).fix case Access(expr, key) => Access(loop(expr, inScope), loop(key, inScope)).fix case Arr(values) => Arr(values.map(loop(_, inScope))).fix case BinOp(op, l, r) => BinOp(op, loop(l, inScope), loop(r, inScope)).fix case Call(callee, args) => Call(loop(callee, inScope), args.map(loop(_, inScope))).fix case id @ Ident(_) => id.fix case If(cond, cons, alt) => If(loop(cond, inScope), loop(cons, inScope), loop(alt, inScope)).fix case lit @ Literal(_) => lit.fix case New(name, args) => New(name, args.map(loop(_, inScope))).fix case Obj(values) => Obj(values ∘ (x => loop(x, inScope))).fix case SpliceArrays(srcs) => SpliceArrays(srcs.map(loop(_, inScope))).fix case SpliceObjects(srcs) => SpliceObjects(srcs.map(loop(_, inScope))).fix case UnOp(op, x) => UnOp(op, loop(x, inScope)).fix } loop(expr, Set.empty) } } import slamdata.engine.physical.mongodb.{Bson} def unapply(value: Bson): Option[Term[JsCore]] = value match { case Bson.Null => Some(JsCore.Literal(Js.Null).fix) case Bson.Text(str) => Some(JsCore.Literal(Js.Str(str)).fix) case Bson.Bool(value) => Some(JsCore.Literal(Js.Bool(value)).fix) case Bson.Int32(value) => Some(JsCore.Literal(Js.Num(value, false)).fix) case Bson.Int64(value) => Some(JsCore.Literal(Js.Num(value, false)).fix) case Bson.Dec(value) => Some(JsCore.Literal(Js.Num(value, true)).fix) case Bson.Doc(value) => value.map { case (name, bson) => JsCore.unapply(bson).map(name -> _) }.toList.sequenceU.map(pairs => JsCore.Obj(pairs.toListMap).fix) case _ => None } } final case class JsFn(base: JsCore.Ident, expr: Term[JsCore]) { def apply(x: Term[JsCore]) = expr.substitute(base.fix, x) def >>>(that: JsFn): JsFn = if (this == JsFn.identity) that else if (that == JsFn.identity) this else JsFn(this.base, JsCore.Let(that.base, this.expr, that.expr).fix.simplify) override def toString = JsCore.toUnsafeJs(apply(JsCore.Ident("_").fix).simplify).render(0) val commonBase = JsCore.Ident("$") override def equals(obj: scala.Any) = obj match { case that @ JsFn(_, _) => apply(commonBase.fix).simplify == that.apply(commonBase.fix).simplify case _ => false } override def hashCode = apply(commonBase.fix).simplify.hashCode } object JsFn { val base = JsCore.Ident("__val") val identity = { JsFn(base, base.fix) } def const(x: Term[JsCore]) = JsFn(JsCore.Ident("__unused"), x) implicit val JsFnRenderTree = RenderTree.fromToString[JsFn]("JsFn") }
wemrysi/quasar
core/src/main/scala/slamdata/engine/javascript/jscore.scala
Scala
apache-2.0
16,570
package lectures package algorithms import scala.annotation.tailrec sealed trait Conc[@specialized(Int, Long, Float, Double) +T] { def level: Int def size: Int def left: Conc[T] def right: Conc[T] def normalized = this } object Conc { case class <>[+T](left: Conc[T], right: Conc[T]) extends Conc[T] { val level = 1 + math.max(left.level, right.level) val size = left.size + right.size } sealed trait Leaf[T] extends Conc[T] { def left = sys.error("Leaves do not have children.") def right = sys.error("Leaves do not have children.") } case object Empty extends Leaf[Nothing] { def level = 0 def size = 0 } class Single[@specialized(Int, Long, Float, Double) T](val x: T) extends Leaf[T] { def level = 0 def size = 1 override def toString = s"Single($x)" } class Chunk[@specialized(Int, Long, Float, Double) T](val array: Array[T], val size: Int, val k: Int) extends Leaf[T] { def level = 0 override def toString = s"Chunk(${array.mkString("", ", ", "")}; $size; $k)" } case class Append[+T](left: Conc[T], right: Conc[T]) extends Conc[T] { val level = 1 + math.max(left.level, right.level) val size = left.size + right.size override def normalized = { def wrap[T](xs: Conc[T], ys: Conc[T]): Conc[T] = (xs: @unchecked) match { case Append(ws, zs) => wrap(ws, zs <> ys) case xs => xs <> ys } wrap(left, right) } } def concatTop[T](xs: Conc[T], ys: Conc[T]) = { if (xs == Empty) ys else if (ys == Empty) xs else concat(xs, ys) } private def concat[T](xs: Conc[T], ys: Conc[T]): Conc[T] = { val diff = ys.level - xs.level if (diff >= -1 && diff <= 1) new <>(xs, ys) else if (diff < -1) { if (xs.left.level >= xs.right.level) { val nr = concat(xs.right, ys) new <>(xs.left, nr) } else { val nrr = concat(xs.right.right, ys) if (nrr.level == xs.level - 3) { val nl = xs.left val nr = new <>(xs.right.left, nrr) new <>(nl, nr) } else { val nl = new <>(xs.left, xs.right.left) val nr = nrr new <>(nl, nr) } } } else { if (ys.right.level >= ys.left.level) { val nl = concat(xs, ys.left) new <>(nl, ys.right) } else { val nll = concat(xs, ys.left.left) if (nll.level == ys.level - 3) { val nl = new <>(nll, ys.left.right) val nr = ys.right new <>(nl, nr) } else { val nl = nll val nr = new <>(ys.left.right, ys.right) new <>(nl, nr) } } } } def appendTop[T](xs: Conc[T], ys: Leaf[T]): Conc[T] = (xs: @unchecked) match { case xs: Append[T] => append(xs, ys) case _ <> _ => new Append(xs, ys) case Empty => ys case xs: Leaf[T] => new <>(xs, ys) } @tailrec private def append[T](xs: Append[T], ys: Conc[T]): Conc[T] = { if (xs.right.level > ys.level) new Append(xs, ys) else { val zs = new <>(xs.right, ys) xs.left match { case ws @ Append(_, _) => append(ws, zs) case ws if ws.level <= zs.level => ws <> zs case ws => new Append(ws, zs) } } } def traverse[@specialized(Int, Long, Float, Double) T, @specialized(Int, Long, Float, Double) U](xs: Conc[T], f: T => U): Unit = (xs: @unchecked) match { case left <> right => traverse(left, f) traverse(right, f) case s: Single[T] => f(s.x) case c: Chunk[T] => val a = c.array val sz = c.size var i = 0 while (i < sz) { f(a(i)) i += 1 } case Empty => case Append(left, right) => traverse(left, f) traverse(right, f) case _ => sys.error("All cases should have been covered: " + xs + ", " + xs.getClass) } }
twistedgut/scala_coursera
parprog-snippets/src/main/scala/lectures/algorithms/Conc.scala
Scala
gpl-3.0
3,862
/* Copyright 2014 Twitter, Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.twitter.scalding.typed import com.twitter.algebird.{ CMS, CMSHasher } object Sketched { // TODO: there are more efficient orderings we could use here if this turns // out to be a bottleneck, and this should actually never end up gettings used. // We may be able to remove this after some refactoring in Algebird. implicit val byteArrayOrdering = Ordering.by((_: Array[Byte]).toIterable) /** * This is based on the CMSHasherBigInt found in algebird (see docs for in depth explanation): * https://github.com/twitter/algebird/blob/develop/algebird-core/src/main/scala/com/twitter/algebird/CountMinSketch.scala#L1086 * * TODO: We need to move this hasher to CMSHasherImplicits in algebird: * https://github.com/twitter/algebird/blob/develop/algebird-core/src/main/scala/com/twitter/algebird/CountMinSketch.scala#L1054 * See: https://github.com/twitter/scalding/issues/1177 */ implicit object CMSHasherByteArray extends CMSHasher[Array[Byte]] { override def hash(a: Int, b: Int, width: Int)(x: Array[Byte]): Int = { val hash: Int = scala.util.hashing.MurmurHash3.arrayHash(x, a) // We only want positive integers for the subsequent modulo. This method mimics Java's Hashtable // implementation. The Java code uses `0x7FFFFFFF` for the bit-wise AND, which is equal to Int.MaxValue. val positiveHash = hash & Int.MaxValue positiveHash % width } } } /** * This class is generally only created by users * with the TypedPipe.sketch method */ case class Sketched[K, V](pipe: TypedPipe[(K, V)], numReducers: Int, delta: Double, eps: Double, seed: Int)(implicit serialization: K => Array[Byte], ordering: Ordering[K]) extends MustHaveReducers { import Sketched._ def serialize(k: K): Array[Byte] = serialization(k) def reducers = Some(numReducers) private lazy implicit val cms = CMS.monoid[Array[Byte]](eps, delta, seed) lazy val sketch: TypedPipe[CMS[Array[Byte]]] = pipe .map { case (k, v) => cms.create(serialization(k)) } .groupAll .sum .values .forceToDisk /** * Like a hashJoin, this joiner does not see all the values V at one time, only one at a time. * This is sufficient to implement join and leftJoin */ def cogroup[V2, R](right: TypedPipe[(K, V2)])(joiner: (K, V, Iterable[V2]) => Iterator[R]): SketchJoined[K, V, V2, R] = new SketchJoined(this, right, numReducers)(joiner) /** * Does a logical inner join but replicates the heavy keys of the left hand side * across the reducers */ def join[V2](right: TypedPipe[(K, V2)]) = cogroup(right)(Joiner.hashInner2) /** * Does a logical left join but replicates the heavy keys of the left hand side * across the reducers */ def leftJoin[V2](right: TypedPipe[(K, V2)]) = cogroup(right)(Joiner.hashLeft2) } case class SketchJoined[K: Ordering, V, V2, R](left: Sketched[K, V], right: TypedPipe[(K, V2)], numReducers: Int)(joiner: (K, V, Iterable[V2]) => Iterator[R]) extends MustHaveReducers { def reducers = Some(numReducers) //the most of any one reducer we want to try to take up with a single key private val maxReducerFraction = 0.1 private def flatMapWithReplicas[W](pipe: TypedPipe[(K, W)])(fn: Int => Iterable[Int]) = pipe.cross(left.sketch).flatMap{ case (v, cms) => val maxPerReducer = (cms.totalCount / numReducers) * maxReducerFraction + 1 val maxReplicas = (cms.frequency(left.serialize(v._1)).estimate.toDouble / maxPerReducer) //if the frequency is 0, maxReplicas.ceil will be 0 so we will filter out this key entirely //if it's < maxPerReducer, the ceil will round maxReplicas up to 1 to ensure we still see it val replicas = fn(maxReplicas.ceil.toInt.min(numReducers)) replicas.map{ i => (i, v._1) -> v._2 } } lazy val toTypedPipe: TypedPipe[(K, R)] = { lazy val rand = new scala.util.Random(left.seed) val lhs = flatMapWithReplicas(left.pipe){ n => Some(rand.nextInt(n) + 1) } val rhs = flatMapWithReplicas(right){ n => 1.to(n) } lhs .group .cogroup(rhs.group){ (k, itv, itu) => itv.flatMap{ v => joiner(k._2, v, itu) } } .withReducers(numReducers) .map{ case ((r, k), v) => (k, v) } } } object SketchJoined { implicit def toTypedPipe[K, V, V2, R](joined: SketchJoined[K, V, V2, R]): TypedPipe[(K, R)] = joined.toTypedPipe }
MansurAshraf/scalding
scalding-core/src/main/scala/com/twitter/scalding/typed/Sketched.scala
Scala
apache-2.0
4,970
package org.oneugene.log.play import java.time.Month import org.oneugene.log.model.{BDate, User} import org.openjdk.jmh.annotations.{Scope, State} @State(Scope.Benchmark) class TestConstants { var originalUser = User("Ievgenii", BDate(1978, Month.OCTOBER, 3)) var newName = "Test" var newYear = 1990 var newMonth: Month = Month.APRIL var newDay = 20 }
oneugene/evgeniy-portfolio
jmhtests/src/main/scala/org/oneugene/log/play/TestConstants.scala
Scala
mit
366
/* Copyright (c) 2017, Qvantel All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the Qvantel nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL Qvantel BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.qvantel.jsonapi import org.specs2.mutable.Specification import shapeless.{:+:, CNil, Coproduct, Poly1} import _root_.spray.json.DefaultJsonProtocol._ import _root_.spray.json._ final class PolyToOneSpec extends Specification { sequential implicit val apiRoot: com.qvantel.jsonapi.ApiRoot = ApiRoot(None) @jsonApiResource final case class Person(id: String, name: String) @jsonApiResource final case class Company(id: String, name: String) type Author = Person :+: Company :+: CNil private[this] implicit object AuthorPolyIdentifiable extends PolyIdentifiable[Author] { private[this] object polyIdentify extends Poly1 { implicit def casePerson = at[Person](obj => obj.id) implicit def caseCompany = at[Company](obj => obj.id) } private[this] object polyResourceType extends Poly1 { implicit def casePerson = at[Person](_ => implicitly[ResourceType[Person]].resourceType) implicit def caseCompany = at[Company](_ => implicitly[ResourceType[Company]].resourceType) } override def identify(a: Author): String = a fold polyIdentify override def resourceType(a: Author): String = a fold polyResourceType } @jsonApiResource final case class Article(id: String, title: String, author: PolyToOne[Author]) type Looped = Loop :+: Person :+: TestThing :+: CNil private[this] implicit object LoopedPolyIdentifiable extends PolyIdentifiable[Looped] { private[this] object polyIdentify extends Poly1 { implicit def caseLoop = at[Loop](obj => obj.id) implicit def casePerson = at[Person](obj => obj.id) implicit def caseTestThing = at[TestThing](obj => obj.id) } private[this] object polyResourceType extends Poly1 { implicit def caseLoop = at[Loop](_ => implicitly[ResourceType[Loop]].resourceType) implicit def casePerson = at[Person](_ => implicitly[ResourceType[Person]].resourceType) implicit def caseTestThing = at[TestThing](_ => implicitly[ResourceType[TestThing]].resourceType) } override def identify(a: Looped): String = a fold polyIdentify override def resourceType(a: Looped): String = a fold polyResourceType } @jsonApiResource final case class Loop(id: String, looped: PolyToOne[Looped]) @jsonApiResource final case class Maybe(id: String, maybe: Option[PolyToOne[Author]]) @jsonApiResource final case class TestThing(id: String, foo: Option[PolyToOne[Looped]]) "get" should { "return Some for Loaded" in { val person = Person("id", "name") val author = Coproduct[Author](person) PolyToOne.loaded[Author, Person](person).get must beSome(author) } "return None for Reference" in { PolyToOne.reference[Author, Person]("id").get must beNone } } "readers" should { "properly read in simple poly to-one reference" in { val article = Article("1", "boom", PolyToOne.reference[Author, Person]("john")) val articleJson = implicitly[JsonApiFormat[Article]].write(article) implicitly[JsonApiFormat[Article]].read(articleJson, Set.empty) must be equalTo article } "properly read in simple poly to-one loaded" in { val article = Article("1", "boom", PolyToOne.loaded[Author, Person](Person("test-id", "mario"))) val json = rawOne[Article](article) readOne[Article](json, Set("author")) must be equalTo article } "properly read in looped poly to-one reference" in { val loop = Loop("1", PolyToOne.reference[Looped, Person]("john")) val loopJson = implicitly[JsonApiFormat[Loop]].write(loop) implicitly[JsonApiFormat[Loop]].read(loopJson, Set.empty) must be equalTo loop } "properly read in looped poly to-one loaded" in { val loop = Loop("1", PolyToOne.loaded[Looped, Loop](Loop("2", PolyToOne.loaded[Looped, Person](Person("john", "doe"))))) val json = rawOne[Loop](loop) readOne[Loop](json, Set("looped.looped")) must be equalTo loop } "properly read in optional poly to one reference" in { val maybe = Maybe("1", None) val maybeJson = implicitly[JsonApiFormat[Maybe]].write(maybe) implicitly[JsonApiFormat[Maybe]].read(maybeJson, Set.empty) must be equalTo maybe } "properly read in optional poly to one reference" in { val maybe = Maybe("1", Some(PolyToOne.loaded[Author, Person](Person("test-id", "mario")))) val maybeJson = rawOne[Maybe](maybe) readOne[Maybe](maybeJson, Set("maybe")) must be equalTo maybe } "handle null/non existing/empty relationships" in { val emptyRelationships = """ |{ | "id": "test", | "type": "maybes", | "relationships": { | | } |} """.stripMargin.parseJson val emptyIdRelationship = """ |{ | "id": "test", | "type": "maybes", | "relationships": { | "maybe": { | "data": { | "id": "", | "type": "people" | } | } | } |} """.stripMargin.parseJson val nullRelationships = """ |{ | "id": "test", | "type": "maybes", | "relationships": null |} """.stripMargin.parseJson val nonExistingRelationships = """ |{ | "id": "test", | "type": "maybes" |} """.stripMargin.parseJson implicitly[JsonApiFormat[Maybe]].read(emptyRelationships, Set.empty) must be equalTo Maybe("test", None) implicitly[JsonApiFormat[Maybe]].read(nullRelationships, Set.empty) must be equalTo Maybe("test", None) implicitly[JsonApiFormat[Maybe]].read(nonExistingRelationships, Set.empty) must be equalTo Maybe("test", None) implicitly[JsonApiFormat[Maybe]].read(emptyIdRelationship, Set.empty) must throwA[DeserializationException]( "illegal id 'empty string' found") } "fail with deserialization exception when the entity in the relationship is of wrong type" in { val article = Article("1", "boom", PolyToOne.loaded[Author, Person](Person("test-id", "mario"))) val articleJson = implicitly[JsonApiFormat[Article]].write(article) val articleIncludesJson = implicitly[JsonApiFormat[Article]].included(article) import _root_.spray.json.lenses.JsonLenses._ val modifiedArticleJson = articleJson.update("relationships" / "author" / "data" / "type" ! set[String]("wrong-type")).asJsObject val modifiedArticleIncludes = articleIncludesJson.map(_.update(Symbol("type") ! set[String]("wrong-type"))).map(_.asJsObject) implicitly[JsonApiFormat[Article]] .read(modifiedArticleJson, modifiedArticleIncludes) must throwA[DeserializationException]( message = "relationship of type 'wrong-type' is not part of coproduct 'PolyToOneSpec.this.Author'") val maybe = Maybe("1", Some(PolyToOne.loaded[Author, Person](Person("test-id", "mario")))) val maybeJson = implicitly[JsonApiFormat[Maybe]].write(maybe) val maybeIncludedJson = implicitly[JsonApiFormat[Maybe]].included(maybe) val modifiedMaybeJson = maybeJson.update("relationships" / "maybe" / "data" / "type" ! set[String]("wrong-type")).asJsObject val modifiedMaybeIncludes = maybeIncludedJson.map(_.update(Symbol("type") ! set[String]("wrong-type"))).map(_.asJsObject) implicitly[JsonApiFormat[Maybe]] .read(modifiedMaybeJson, modifiedMaybeIncludes) must throwA[DeserializationException]( message = "relationship of type 'wrong-type' is not part of coproduct 'PolyToOneSpec.this.Author'") } } "write" >> { "print out data as null for None case of Option[PolyToOne[X]]" >> { val t = Maybe("id", None) val rawJson = """ |{ | "data": { | "relationships": { | "maybe": { | "data": null, | "links": { | "related": "/maybes/id/maybe" | } | } | }, | "links": { | "self": "/maybes/id" | }, | "id": "id", | "type": "maybes" | } |} """.stripMargin.parseJson.asJsObject rawOne(t) must be equalTo rawJson } "skip printing out data for JsonAbsent case of JsonOption[PolyToOne[X]]" in { @jsonApiResource final case class Test(id: String, opt: JsonOption[PolyToOne[Author]]) val t = Test("id", JsonAbsent) val rawJson = """ |{ | "data": { | "relationships": { | "opt": { | "links": { | "related": "/tests/id/opt" | } | } | }, | "links": { | "self": "/tests/id" | }, | "id": "id", | "type": "tests" | } |} """.stripMargin.parseJson.asJsObject rawOne(t) must be equalTo rawJson } "print out data as null for JsonNull case of JsonOption[PolyToOne[X]]" in { @jsonApiResource final case class Test(id: String, opt: JsonOption[PolyToOne[Author]]) val t = Test("id", JsonNull) val rawJson = """ |{ | "data": { | "relationships": { | "opt": { | "links": { | "related": "/tests/id/opt" | }, | "data": null | } | }, | "links": { | "self": "/tests/id" | }, | "id": "id", | "type": "tests" | } |} """.stripMargin.parseJson.asJsObject rawOne(t) must be equalTo rawJson } "correctly write sparse fieldsets (while supporting inclusion of the relationship even if it is not included in the sparse fieldset)" >> { implicit val sparseFields: Map[String, List[String]] = Map("articles" -> List("title")) val article = Article("1", "boom", PolyToOne.loaded[Author, Person](Person("test-id", "mario"))) val rawJson = """ |{ | "data": { | "attributes": { | "title": "boom" | }, | "links": { | "self":"/articles/1" | }, | "id": "1", | "type": "articles" | }, | "included": [ | { | "attributes": { | "name": "mario" | }, | "id": "test-id", | "links": { | "self": "/people/test-id" | }, | "type": "people" | } | ] |} """.stripMargin.parseJson.asJsObject rawOne[Article](article) must be equalTo rawJson } } "properly generate Includes type class for poly to one relationship" in { val includes = implicitly[Includes[Loop]] includes.includeAllowed("looped") must beTrue includes.includeAllowed("looped.foo.looped") must beTrue includes.includeAllowed("looped.looped.looped") must beTrue includes.includesAllowed("looped", "looped.foo.looped", "looped.looped.looped") must beTrue includes.includeAllowed("foo") must beFalse includes.includeAllowed("notlooped") must beFalse includes.includeAllowed("looped.brake.looped") must beFalse includes.includeAllowed("looped.foo.brake") must beFalse includes.includesAllowed("foo", "notlooped", "looped.brake.looped", "looped.foo.brake") must beFalse includes.includesAllowed("looped", "notlooped") must beFalse } "read and write JsonOption relationship" in { @jsonApiResource("test", "no-id") final case class Test(name: String, x: JsonOption[PolyToOne[Author]]) val t = Test("name", JsonAbsent) val json = rawOne(t) val parsed = readOne[Test](json) parsed must be equalTo t val t2 = Test("name", JsonNull) val json2 = rawOne(t2) val parsed2 = readOne[Test](json2) parsed2 must be equalTo (t2) val t3 = Test("name", JsonSome(PolyToOne.reference[Author, Person]("test"))) val json3 = rawOne(t3) val parsed3 = readOne[Test](json3) parsed3 must be equalTo t3 val t4 = Test("name", JsonSome(PolyToOne.loaded[Author, Person](Person("2", "2")))) val json4 = rawOne(t4) val parsed4 = readOne[Test](json4, Set("x")) parsed4 must be equalTo t4 } }
qvantel/jsonapi-scala
core/src/test/scala/com/qvantel/jsonapi/PolyToOneSpec.scala
Scala
bsd-3-clause
14,232
package Tutorial import Chisel._ import Node._ import Literal._ import scala.collection.mutable.HashMap import scala.collection.mutable.ArrayBuffer class point_t extends Bundle { val x = UFix(width = 64) val y = UFix(width = 64) val z = UFix(width = 64) } class KEngineIO_t extends Bundle { val centeroidsFinished = Bool() val pointsFinished = Bool() val centeroidIndex = UFix(width = 16) val point = new point_t } class FU_in_t extends Bundle { val in1 = UFix(width = 64) val in2 = UFix(width = 64) } class FU_out_t extends Bundle { val out = UFix(width = 64) } class distanceFU_in_t extends Bundle { val in1 = new point_t val in2 = new point_t } class distanceFU_out_t extends Bundle { val out = UFix(width = 64) } class pointMemInput_t extends Bundle { val rw = Bool() val addr = UFix(width = 16) val data = new point_t } class pointMemOutput_t extends Bundle { val data = new point_t }
seyedmaysamlavasani/GorillaPP
chisel/Gorilla++/emulator/src/k-means.scala
Scala
bsd-3-clause
901
/*********************************************************************** * Copyright (c) 2013-2018 Commonwealth Computer Research, Inc. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Apache License, Version 2.0 * which accompanies this distribution and is available at * http://www.opensource.org/licenses/apache2.0.php. ***********************************************************************/ package org.locationtech.geomesa.index.conf import org.locationtech.geomesa.utils.conf.GeoMesaSystemProperties.SystemProperty object SchemaProperties { val ValidateDistributedClasspath = SystemProperty("geomesa.validate.distributed.classpath", "true") }
jahhulbert-ccri/geomesa
geomesa-index-api/src/main/scala/org/locationtech/geomesa/index/conf/SchemaProperties.scala
Scala
apache-2.0
723
package org.apache.predictionio.examples.pfriendrecommendation import org.apache.spark.SparkContext._ import org.apache.spark.graphx._ import org.apache.spark.rdd.RDD import scala.collection.mutable.{ListBuffer, ArraySeq, HashSet} import scala.util.Random import scala.collection.mutable.HashSet import scala.collection.mutable.Queue import org.apache.spark.SparkContext import scala.collection.mutable.Map object Sampling { def geometricSample(param: Double) : Int = { var num = 1 while(Random.nextDouble <= param) { num += 1 } num } def sortBySrc(a:Array[Edge[Int]]): Array[Edge[Int]] = { if (a.length < 2) { a } else { val pivot = a(a.length / 2).srcId // 'L'ess, 'E'qual, 'G'reater val partitions = a.groupBy( (e:Edge[Int]) => { if (e.srcId < pivot) 'L' else if (e.srcId > pivot) 'G' else 'E' }) var sortedAccumulator: Array[Edge[Int]] = Array() List('L', 'E', 'G').foreach((c:Char) => { if (partitions.contains(c)) { sortedAccumulator = sortedAccumulator ++ partitions(c) } }) sortedAccumulator } } // Samples vertices by forest fire random process and induces edges. // Fraction denotes fraction of total graph vertices to sample and geoParam // denotes the parameter for geometric distribution, which is used to // determine branching factor at each iteration of forest fire process. def forestFireSamplingInduced ( sc: SparkContext, graph: Graph[Int,Int], fraction: Double, geoParam: Double = 0.7) = { var g = graph var e = sortBySrc(g.edges.toArray) val targetVertexCount = (graph.vertices.count() * fraction).toInt var seedVertices = graph.vertices .sample(false, fraction, targetVertexCount) .toArray.iterator var sampledVertices: HashSet[VertexId] = HashSet() var burnQueue: Queue[VertexId] = Queue() while (sampledVertices.size < targetVertexCount) { val seedVertex = seedVertices.next sampledVertices += seedVertex._1 burnQueue += seedVertex._1 while (burnQueue.size > 0 ){ val vertexId = burnQueue.dequeue() val numToSample = geometricSample(geoParam) val edgeCandidates = accumulateEdges(e, vertexId) val burnCandidate = sc.parallelize(edgeCandidates) .filter((e:Edge[Int]) => { !sampledVertices.contains(e.dstId) }) val burnFraction = numToSample.toDouble / burnCandidate.count.toDouble val burnEdges = burnCandidate.sample( false, burnFraction, Random.nextLong) val neighborVertexIds = burnEdges.map((e:Edge[Int]) => e.dstId) sampledVertices = sampledVertices ++ neighborVertexIds.toArray burnQueue = burnQueue ++ neighborVertexIds.toArray if (sampledVertices.size > targetVertexCount) { burnQueue.dequeueAll((v:VertexId) => true) } } } val vertex: Seq[(VertexId, Int)] = sampledVertices.map(v => (v,1)) .toSeq val edges = graph.edges.filter(e => sampledVertices.contains(e.srcId) && sampledVertices.contains(e.dstId) ) Graph(sc.parallelize(vertex), edges) } // Samples vertices uniformly and induces edges. def nodeSampling(sc:SparkContext, graph:Graph[Int,Int], fraction:Double) = { val vertices = graph.vertices.sample(false, fraction, Random.nextLong) val vertexMap = vertices.collectAsMap() val edges = graph.edges.filter(e => vertexMap.contains(e.srcId) && vertexMap.contains(e.dstId) ) val graph2 = Graph(vertices, edges) graph2 } // Get all edges with source vertexId of target def accumulateEdges( e:Array[Edge[Int]], target:VertexId) : ListBuffer[Edge[Int]] = { val idx = binarySearchE(e, target)(0, e.size-1) var outEdges: ListBuffer[Edge[Int]] = ListBuffer() if (idx == -1) return outEdges outEdges.append(e(idx)) var tIdx = idx+1 var edge:Edge[Int] = null // get upper edges while (tIdx < e.size) { edge = e(tIdx) if (edge.srcId == target) { outEdges.append(edge) tIdx += 1 } else { tIdx = e.size } } // get lower edges tIdx = idx-1 while (tIdx > -1){ edge = e(tIdx) if (edge.srcId == target) { outEdges.append(edge) tIdx -= 1 } else { tIdx = -1 } } outEdges } // Binary search to find an edge with target vertexId def binarySearchE(list: Array[Edge[Int]], target: VertexId) (start: Int=0, end: Int=list.length-1): Int = { if (start>end) return -1 val mid = start + (end-start+1)/2 if (list(mid).srcId == target) return mid else if (list(mid).srcId > target) return binarySearchE(list, target)(start, mid-1) else return binarySearchE(list, target)(mid+1, end) } }
alex9311/PredictionIO
examples/experimental/scala-parallel-friend-recommendation/src/main/scala/Sampling.scala
Scala
apache-2.0
4,949
/* ************************************************************************************* * Copyright 2016 Normation SAS ************************************************************************************* * * This file is part of Rudder. * * Rudder is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * In accordance with the terms of section 7 (7. Additional Terms.) of * the GNU General Public License version 3, the copyright holders add * the following Additional permissions: * Notwithstanding to the terms of section 5 (5. Conveying Modified Source * Versions) and 6 (6. Conveying Non-Source Forms.) of the GNU General * Public License version 3, when you create a Related Module, this * Related Module is not considered as a part of the work and may be * distributed under the license agreement of your choice. * A "Related Module" means a set of sources files including their * documentation that, without modification of the Source Code, enables * supplementary functions or services in addition to those offered by * the Software. * * Rudder is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with Rudder. If not, see <http://www.gnu.org/licenses/>. * ************************************************************************************* */ package com.normation.templates.cli import java.io.File import java.io.InputStreamReader import java.io.BufferedReader import com.normation.templates.FillTemplatesService import com.normation.templates.STVariable import com.normation.utils.Control._ import org.apache.commons.io.FileUtils import net.liftweb.common._ import net.liftweb.json._ import scopt.OptionParser import org.apache.commons.io.IOUtils import java.io.StringWriter /** * The configuration object for our CLI. * The basic process is to take one file in input for the definition of variables, one set of files as template to change. * * By default, the files are generated with the same name in the current folder. * * - add the possibility to take directories as input (but a good shell can do it, so not very important) */ final case class Config( variables : File = new File("variables.json") , templates : Seq[File] = Seq() , outdir : File = new File(".") , verbose : Boolean = false , inputExtension : String = ".st" , outputExtension: String = "" , showStackTrace : Boolean = false , outputToStdout : Boolean = false ) object Tryor { //the lazy param is of course necessary, else the exception is thrown //before going to the block, never caught. def apply[T](cmd: => T, errorMsg: String): Box[T] = { try { Full(cmd) } catch { case ex: Exception => Failure(s"${errorMsg}: ${ex.getMessage}", Full(ex), Empty) } } } object TemplateCli { val fillerService = new FillTemplatesService() val parser = new OptionParser[Config]("Rudder template cli") { head("rudder-templates-cli", "4.0.x") opt[File]("outdir") valueName("<file>") action { (x, c) => c.copy(outdir = x) } text("output directory for filled template, default is '.'") opt[String]("inext") optional() valueName("<input file extension>") action { (x, c) => c.copy(inputExtension = x) } text("extension of input templates. Default is '.st'") opt[String]("outext") optional() valueName("<output file extension>") action { (x, c) => c.copy(outputExtension = x) } text("extension of templates after processing. Default is '' (no extension added)") opt[File]('p', "params") optional() valueName("<variable.json>") action { (x, c) => c.copy(variables = x) } text("JSON file defining variables. Default is 'variables.json'. See below for format details.") opt[Unit]('X', "stackTrace") optional() action { (_, c) => c.copy(showStackTrace = true) } text("Print stack trace on error") opt[Unit]("stdout") optional() action { (_, c) => c.copy(outputToStdout = true) } text("Print stack trace on error") arg[File]("<template.st>...") optional() unbounded() action { (x, c) => c.copy(templates = c.templates:+ x) } text("""list of templates to fill. Only file with the correct extension (by default '.st') will | be processed. The extension will be replaced by '.cf' by default, ounce processed.""".stripMargin) help("help") text("prints this usage text") note("""The expected format for variables.json is a simple key:value file, with value being only string, boolean or Array of string. 'system' and 'optioannal' properties can also be specified: | { | "key1": true | , "key2": "some value" | , "key3": "42" | , "key4": [ "some", "more", "values", true, false ] | , "key5": { "value": "k5", "system": true, "optional": false } | , "key6": { "value": [ "a1", "a2", "a3" ], "system": false, "optional": true } | , "key7": "" | , "key8": { "value": [] } | } """.stripMargin) } def main(args: Array[String]): Unit = { //in case of error with args, stop and display usage val config = parser.parse(args, Config()).getOrElse { parser.showUsage System.exit(1) //just for type inference, never reached Config() } process(config) match { case eb: EmptyBox => val e = eb match { case Empty => eb ?~! "Error when processing templates" case f:Failure => f } System.err.println(e.messageChain) if(config.showStackTrace) { e.rootExceptionCause.foreach { ex => System.err.println (ex.getMessage) ex.printStackTrace() } } System.exit(1) case Full(res) => //ok //here, we can't call System.exit(0), because maven. //seriously: http://maven.apache.org/surefire/maven-surefire-plugin/faq.html#vm-termination // """Surefire does not support tests or any referenced libraries calling System.exit() at any time.""" } } /** * An utility method so that I can actually test things, * because you know, maven doesn't allow to have exit(1) * anywhere, so I'm going to be able to test on Full/Failure */ def process(config: Config) = { for { variables <- ParseVariables.fromFile(config.variables) allDone <- if(config.templates.nonEmpty) { val filler = //if we are writing to stdout, use a different filler and ignore outputExtension if(config.outputToStdout) { fillToStdout(variables.toSeq, config.inputExtension) _ } else { fill(variables.toSeq, config.outdir, config.inputExtension, config.outputExtension) _ } bestEffort(config.templates) { filler } } else { /* * If no templates are given, try to read from stdin. * In that case, --stdout is forced. */ for { content <- readStdin() ok <- filledAndWriteToStdout(variables.toSeq, content, "stdin") } yield { ok } } } yield { allDone } } def readStdin(): Box[String] = { for { in <- Tryor(new java.io.InputStreamReader(System.in), "Error when trying to access stdin") ready <- if(in.ready) Full("ok") else Failure("Can not get template content from stdin and no template file given") content <- Tryor(IOUtils.toString(System.in, "UTF-8"), "Error when trying to read content from stdin") ok <- if(content.length > 0) { Full(content) } else { Failure("Can not get template content from stdin and no template file given") } } yield { ok } } /** * Utility class that handles reading from file / writing to file. * It takes variables and outDir as a seperate argument list so that * it is easier to reuse the same "filler" context for different templates * * Only file with inputExtension are processed. * inputExtension is replaced by outputExtension. */ def fill(variables: Seq[STVariable], outDir: File, inputExtension: String, outputExtension: String)(template: File): Box[String] = { for { ok <- if(template.getName.endsWith(inputExtension)) { Full("ok") } else { Failure(s"Ignoring file ${template.getName} because it does not have extension '${inputExtension}'") } content <- Tryor(FileUtils.readFileToString(template), s"Error when reading variables from ${template.getAbsolutePath}") filled <- fillerService.fill(template.getAbsolutePath, content, variables) name = template.getName out = new File(outDir, name.substring(0, name.size-inputExtension.size)+outputExtension) writed <- Tryor(FileUtils.writeStringToFile(out, filled), s"Error when writting filled template into ${out.getAbsolutePath}") } yield { out.getAbsolutePath } } /** * Same as fill, but print everything to stdout */ def fillToStdout(variables: Seq[STVariable], inputExtension: String)(template: File): Box[String] = { for { ok <- if(template.getName.endsWith(inputExtension)) { Full("ok") } else { Failure(s"Ignoring file ${template.getName} because it does not have extension '${inputExtension}'") } content <- Tryor(FileUtils.readFileToString(template), s"Error when reading variables from ${template.getAbsolutePath}") writed <- filledAndWriteToStdout(variables, content, template.getName) } yield { writed } } def filledAndWriteToStdout(variables: Seq[STVariable], content: String, templateName: String) = { for { filled <- fillerService.fill(templateName, content, variables) writed <- Tryor(IOUtils.write(filled, System.out, "UTF-8"), s"Error when writting filled template to stdout") } yield { templateName } } } /** * Parse the JSON file for variables. * We only uderstand two type of value: string and boolean. * The expected format is: * { * "key1": true * , "key2": "some value" * , "key3": "42" * , "key4": [ "some", "more", "values", true, false ] * , "key5": { "value": "k5", "system": true, "optional": false } * , "key6": { "value": [ "a1", "a2", "a3" ], "system": false, "optional": true } * , "key7": "" * , "key8": { "value": [] } * } * * * Default value for system is false * Default value for optional is true * */ object ParseVariables extends Loggable { def fromFile(file: File): Box[Set[STVariable]] = { for { jsonString <- Tryor(FileUtils.readFileToString(file, "UTF-8"), s"Error when trying to read file ${file.getAbsoluteFile}") vars <- fromString(jsonString) } yield { vars } } def fromString(jsonString: String): Box[Set[STVariable]] = { def parseAsValue(v: JValue): List[Any] = { v match { case JString(value) => value :: Nil case JBool(value) => value :: Nil case JArray(arr) => arr.map { x => x match { case JString(value) => value case JBool(value) => value //at that level, any other thing, including array, is parser as a simple string case value => compactRender(value) } } case value => compactRender(value) :: Nil } } //the whole logic for { json <- Tryor(JsonParser.parse(jsonString), s"Error when parsing the variable file") } yield { json match { case JObject(fields) => fields.flatMap { x => x match { case field@JField(name, JObject(values)) => // in that case, only value is mandatory val map = values.map { case JField(n, v) => (n, v) }.toMap map.get("value") match { case None => logger.info(s"Missing mandatory field 'value' in object ${compactRender(JObject(field))}") None case Some(value) => val optional = map.get("optional") match { case Some(JBool(b)) => b case _ => true } val system = map.get("system") match { case Some(JBool(b)) => b case _ => false } Some(STVariable(name, optional, parseAsValue(value), system)) } //in any other case, parse as value case JField(name, value) => Some(STVariable(name, true, parseAsValue(value), false)) //and if not a field, well just abort case _ => None } }.toSet case _ => Set() } } } }
armeniaca/rudder
rudder-templates-cli/src/main/scala/com/normation/templates/cli/TemplateCli.scala
Scala
gpl-3.0
13,592
package com.thetestpeople.trt.utils import org.apache.commons.validator.routines._ import org.joda.time.Duration import play.api.data._ import play.api.data.format.Formatter import play.api.data.validation._ import java.net.URI import com.thetestpeople.trt.model.Configuration object FormUtils { private lazy val durationParser = DurationParser() implicit val durationFormat = new Formatter[Duration] { def bind(key: String, data: Map[String, String]): Either[Seq[FormError], Duration] = data.get(key).flatMap(durationParser.parse).toRight(formError(key)) private def formError(key: String) = Seq(FormError(key, "Not a valid duration", Nil)) def unbind(key: String, value: Duration) = Map(key -> durationParser.asString(value)) } def duration = Forms.of[Duration] implicit val urlFormat = new Formatter[URI] { def bind(key: String, data: Map[String, String]): Either[Seq[FormError], URI] = data.get(key).flatMap(parseURL).toRight(formError(key)) private def formError(key: String) = Seq(FormError(key, "Not a valid URL", Nil)) def unbind(key: String, value: URI) = Map(key -> value.toString) } def url = Forms.of[URI] private def parseURL(s: String): Option[URI] = if (urlValidator.isValid(s)) Some(new URI(s)) else None implicit val configurationFormat = new Formatter[Configuration] { def bind(key: String, data: Map[String, String]): Either[Seq[FormError], Configuration] = data.get(key).map(Configuration.apply).toRight(formError(key)) private def formError(key: String) = Seq(FormError(key, "Not a valid Configuration", Nil)) def unbind(key: String, value: Configuration) = Map(key -> value.configuration) } def configuration = Forms.of[Configuration] private lazy val urlValidator: UrlValidator = new UrlValidator(Array("http", "https"), new RegexValidator("^([\\p{Alnum}\\-\\.]*)(:\\d*)?(.*)?"), 0) lazy val isUrl: Constraint[String] = Constraint(plainText ⇒ if (urlValidator.isValid(plainText)) Valid else Invalid(Seq(ValidationError("Not a well-formed URL")))) }
thetestpeople/trt
app/com/thetestpeople/trt/utils/FormUtils.scala
Scala
mit
2,119
package slick.jdbc import java.net.URI import java.sql.PreparedStatement import com.typesafe.config.ConfigException import scala.concurrent.Await import scala.concurrent.duration.Duration import scala.language.experimental.macros import scala.language.implicitConversions import scala.reflect.ClassTag import scala.reflect.macros.{blackbox, whitebox} import scala.collection.mutable.ArrayBuffer import slick.SlickException import slick.basic.{DatabaseConfig, StaticDatabaseConfigMacros, StaticDatabaseConfig} import slick.dbio.{NoStream, Effect} import slick.sql.{SqlAction, SqlStreamingAction} import slick.util.ClassLoaderUtil class ActionBasedSQLInterpolation(val s: StringContext) extends AnyVal { import ActionBasedSQLInterpolation._ /** Build a SQLActionBuilder via string interpolation */ def sql(param: Any*): SQLActionBuilder = macro sqlImpl /** Build an Action for an UPDATE statement via string interpolation */ def sqlu(param: Any*): SqlAction[Int, NoStream, Effect] = macro sqluImpl /** Build an Invoker for a statement with computed types via string interpolation */ def tsql(param: Any*): SqlStreamingAction[Vector[Any], Any, Effect] = macro tsqlImpl } object ActionBasedSQLInterpolation { def sqlImpl(ctxt: blackbox.Context)(param: ctxt.Expr[Any]*): ctxt.Expr[SQLActionBuilder] = { import ctxt.universe._ val macroTreeBuilder = new MacroTreeBuilder[ctxt.type](ctxt)(param.toList) reify { SQLActionBuilder( ctxt.Expr[Seq[Any]] (macroTreeBuilder.queryParts).splice, ctxt.Expr[SetParameter[Unit]](macroTreeBuilder.pconvTree).splice ) } } def sqluImpl(ctxt: blackbox.Context)(param: ctxt.Expr[Any]*): ctxt.Expr[SqlAction[Int, NoStream, Effect]] = { import ctxt.universe._ val macroTreeBuilder = new MacroTreeBuilder[ctxt.type](ctxt)(param.toList) reify { val res: SQLActionBuilder = SQLActionBuilder( ctxt.Expr[Seq[Any]] (macroTreeBuilder.queryParts).splice, ctxt.Expr[SetParameter[Unit]](macroTreeBuilder.pconvTree).splice ) res.asUpdate } } def tsqlImpl(ctxt: whitebox.Context)(param: ctxt.Expr[Any]*): ctxt.Expr[SqlStreamingAction[Vector[Any], Any, Effect]] = { import ctxt.universe._ val macroTreeBuilder = new MacroTreeBuilder[ctxt.type](ctxt)(param.toList) val uri = StaticDatabaseConfigMacros.getURI(ctxt) //TODO The database configuration and connection should be cached for subsequent macro invocations val dc = try DatabaseConfig.forURI[JdbcProfile](new URI(uri), ClassLoaderUtil.defaultClassLoader) catch { case ex @ (_: ConfigException | _: SlickException) => ctxt.abort(ctxt.enclosingPosition, s"""Cannot load @StaticDatabaseConfig("$uri"): ${ex.getMessage}""") } val rTypes = try { val a = SimpleJdbcAction { ctx => ctx.session.withPreparedStatement(macroTreeBuilder.staticQueryString) { _.getMetaData match { case null => Vector() case resultMeta => Vector.tabulate(resultMeta.getColumnCount) { i => val modelBuilder = dc.profile.createModelBuilder(Nil, true)(scala.concurrent.ExecutionContext.global) modelBuilder.jdbcTypeToScala(resultMeta.getColumnType(i + 1), resultMeta.getColumnTypeName(i + 1)) } } } } Await.result(dc.db.run(a), Duration.Inf) } finally dc.db.close() reify { val rconv = ctxt.Expr[GetResult[Any]](macroTreeBuilder.rconvTree(rTypes)).splice val res: SQLActionBuilder = SQLActionBuilder( ctxt.Expr[Seq[Any]] (macroTreeBuilder.queryParts).splice, ctxt.Expr[SetParameter[Unit]](macroTreeBuilder.pconvTree).splice ) res.as(rconv) } } } case class SQLActionBuilder(queryParts: Seq[Any], unitPConv: SetParameter[Unit]) { def as[R](implicit rconv: GetResult[R]): SqlStreamingAction[Vector[R], R, Effect] = { val query = if(queryParts.length == 1 && queryParts(0).isInstanceOf[String]) queryParts(0).asInstanceOf[String] else queryParts.iterator.map(String.valueOf).mkString new StreamingInvokerAction[Vector[R], R, Effect] { def statements = List(query) protected[this] def createInvoker(statements: Iterable[String]) = new StatementInvoker[R] { val getStatement = statements.head protected def setParam(st: PreparedStatement) = unitPConv((), new PositionedParameters(st)) protected def extractValue(rs: PositionedResult): R = rconv(rs) } protected[this] def createBuilder = Vector.newBuilder[R] } } def asUpdate = as[Int](GetResult.GetUpdateValue).head }
AtkinsChang/slick
slick/src/main/scala/slick/jdbc/StaticQuery.scala
Scala
bsd-2-clause
4,660
import sbt._ import Keys._ final class ReactiveMongo(scalacPlugin: Project) { self => import Dependencies._ import Format._ val reactiveResolvers = Seq( Resolver.typesafeRepo("snapshots"), Resolver.sonatypeRepo("snapshots")) lazy val generatedClassDirectory = settingKey[File]( "Directory where classes get generated") val reactiveMongoVer = "1.0.10" lazy val project = Project(id = "reactive-mongo", base = file("reactive-mongo")). settings(formatSettings ++ Set( name := "reactive-mongo", fork in Test := true, resolvers ++= reactiveResolvers, compile in Test := (compile in Test).dependsOn( scalacPlugin / packageBin in Compile).value, scalacOptions in Test ++= ScalacPlugin. compilerOptions(scalacPlugin).value, libraryDependencies ++= Seq( "org.reactivemongo" %% "reactivemongo" % reactiveMongoVer % Provided, "com.jsuereth" %% "scala-arm" % "2.1-SNAPSHOT", "org.slf4j" % "slf4j-simple" % "1.7.36" % Provided, "com.chuusai" %% "shapeless" % "2.3.8", "org.specs2" %% "specs2-core" % specsVer.value % Test) )) lazy val playProject = Project(id = "play-reactive-mongo", base = file("play-reactive-mongo")). settings(formatSettings ++ Set( name := "play-reactive-mongo", compile in Test := (compile in Test).dependsOn( scalacPlugin / packageBin in Compile).value, scalacOptions in Test ++= ScalacPlugin. compilerOptions(scalacPlugin).value, resolvers ++= reactiveResolvers, libraryDependencies ++= { val sv = scalaBinaryVersion.value val (playVer, playVar) = { if (sv == "2.12") "2.6.3" -> "play26" else if (sv == "2.13") "2.7.3" -> "play27" else "2.5.19" -> "play25" } val playRmVer = reactiveMongoVer.span(_ != '-') match { case (v, mod) => (if (mod != "") mod.drop(1) else mod).span(_ != '-') match { case ("", _) => s"${v}-${playVar}" case (a, "") if (a startsWith "rc.") => s"${v}-${playVar}-${a}" case (a, b) => s"${v}-${a}-${playVar}${b}" } } val iteratees = { if (sv != "2.13") { Seq( "com.typesafe.play" %% "play-iteratees" % "2.6.1" % Provided) } else { Seq.empty } } (Seq("reactivemongo-play-json-compat", "play2-reactivemongo").map { "org.reactivemongo" %% _ % playRmVer % Provided }) ++ Seq( "com.typesafe.play" %% "play" % playVer % Provided, "org.specs2" %% "specs2-core" % specsVer.value % Test ) ++ iteratees } )).dependsOn(self.project) }
cchantep/acolyte
project/ReactiveMongo.scala
Scala
lgpl-2.1
2,909
def flatMap[A, B](ra: Reader[E, A])(k: A => Reader[E, B]): Reader[E, B] = Reader { e => val a = runReader(ra)(e) val rb = k(a) runReader(rb)(e) }
hmemcpy/milewski-ctfp-pdf
src/content/3.5/code/scala/snippet11.scala
Scala
gpl-3.0
161
package org.jetbrains.plugins.scala package project import com.intellij.openapi.roots.libraries.LibraryType import com.intellij.openapi.roots.libraries.ui.{LibraryPropertiesEditor, LibraryEditorComponent} import com.intellij.openapi.vfs.VirtualFile import com.intellij.openapi.project.Project import org.jetbrains.plugins.scala.project.template.ScalaLibraryDescription import org.jetbrains.plugins.scala.icons.Icons import javax.swing.JComponent /** * @author Pavel Fatin */ class ScalaLibraryType extends LibraryType[ScalaLibraryProperties](ScalaLibraryKind) { def getIcon = Icons.SCALA_SDK def getCreateActionName = "Scala SDK" def createNewLibrary(parentComponent: JComponent, contextDirectory: VirtualFile, project: Project) = ScalaLibraryDescription.createNewLibrary(parentComponent, contextDirectory) def createPropertiesEditor(editorComponent: LibraryEditorComponent[ScalaLibraryProperties]): LibraryPropertiesEditor = new ScalaLibraryPropertiesEditor(editorComponent) } object ScalaLibraryType { def instance: ScalaLibraryType = Option(LibraryType.findByKind(ScalaLibraryKind).asInstanceOf[ScalaLibraryType]) .getOrElse(throw new NoSuchElementException("Scala library type not found")) }
triggerNZ/intellij-scala
src/org/jetbrains/plugins/scala/project/ScalaLibraryType.scala
Scala
apache-2.0
1,237
package com.nice.zoocache /** * Copyright (C) 2012 NICE Systems ltd. * <p/> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * <p/> * http://www.apache.org/licenses/LICENSE-2.0 * <p/> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * @author Arnon Rotem-Gal-Oz * @version %I%, %G% * <p/> */ import org.msgpack.annotation.Message @Message class Test{ var name:String ="" }
NiceSystems/zcache
src/test/scala/com/nice/zoocache/Test.scala
Scala
apache-2.0
798
/* Copyright 2017-19, Emmanouil Antonios Platanios. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package org.platanios.tensorflow.api.ops.lookup import org.platanios.tensorflow.api.core.Graph import org.platanios.tensorflow.api.core.types.{Resource, TF} import org.platanios.tensorflow.api.ops.{Op, Output, UntypedOp} /** Lookup table initializer that uses the provided tensors (containing keys and corresponding values) for initializing * a lookup table. * * @param keys Tensor containing the table keys. * @param values Tensor containing the table values. * * @author Emmanouil Antonios Platanios */ class LookupTableTensorInitializer[K: TF, V: TF] protected ( val keys: Output[K], val values: Output[V] ) extends LookupTableInitializer(keys.dataType, values.dataType) { /** Creates and returns an op that initializes the provided table. * * @param table Table to initialize. * @return Created initialization op for `table`. */ override def initialize( table: InitializableLookupTable[K, V], name: String = "Initialize" )(implicit evVTF: TF[V]): UntypedOp = { Op.nameScope(name) { val initializationOp = Op.Builder[(Output[Resource], Output[K], Output[V]), Unit]( opType = "InitializeTableV2", name = name, input = (table.handle, keys, values) ).build() Op.currentGraph.addToCollection(Graph.Keys.TABLE_INITIALIZERS)(initializationOp.asUntyped) initializationOp.asUntyped } } } object LookupTableTensorInitializer { def apply[K: TF, V: TF]( keys: Output[K], values: Output[V] ): LookupTableTensorInitializer[K, V] = { new LookupTableTensorInitializer(keys, values) } }
eaplatanios/tensorflow_scala
modules/api/src/main/scala/org/platanios/tensorflow/api/ops/lookup/LookupTableTensorInitializer.scala
Scala
apache-2.0
2,254
package edu.utexas.cs.sdao.reyes.core import scala.math._ import edu.utexas.cs.sdao.reyes.anim.Animatable /** * Defines a 4x4 matrix. * @param data a 16-item array containing the matrix in row-major * order, i.e. indices 0-3 are the first row, 4-7 the second row, etc. */ class Matrix4(val data: Array[Float] = Array.ofDim[Float](16)) extends Animatable[Matrix4] { if (data.length != 16) throw new IllegalArgumentException("Dim of matrix array != 16") private def idx(row: Int, col: Int) = row * 4 + col private def mult(arr1: Array[Float], arr2: Array[Float]) = { val newData = Array.ofDim[Float](16) for (i <- 0 until 4) { // i = row for (j <- 0 until 4) { // j = col newData(idx(i, j)) = arr1(idx(i, 0)) * arr2(idx(0, j)) + arr1(idx(i, 1)) * arr2(idx(1, j)) + arr1(idx(i, 2)) * arr2(idx(2, j)) + arr1(idx(i, 3)) * arr2(idx(3, j)) } } newData } def *(right: Matrix4) = new Matrix4(mult(data, right.data)) def *(u: Vector3): Vector3 = { Vector3(u.x * this(0, 0) + u.y * this(0, 1) + u.z * this(0, 2) + this(0, 3), u.x * this(1, 0) + u.y * this(1, 1) + u.z * this(1, 2) + this(1, 3), u.x * this(2, 0) + u.y * this(2, 1) + u.z * this(2, 2) + this(2, 3)) } def *(b: FilledBoundingSphere): FilledBoundingSphere = { // Origin changes when rotating. val newOrigin = this * b.origin // Radius changes when scaling. val newI = ((this * (b.origin + Vector3.I * b.radius)) - newOrigin).length val newJ = ((this * (b.origin + Vector3.J * b.radius)) - newOrigin).length val newK = ((this * (b.origin + Vector3.K * b.radius)) - newOrigin).length val newRadius = max(max(newI, newJ), newK) FilledBoundingSphere(newOrigin, newRadius) } /** * Generates the inverse matrix, i.e. a matrix that, * when left- or right-multiplied with this one, produces the * identity matrix. * * Use this function to produce a transformation matrix that will * undo a given set of transformations. * * If this matrix is singular, then an inverse does not exist, * so the original matrix will be returned. * * Note: this implementation doesn't do anything fancy. * It hard-codes the arithmetic for each new matrix value. * * @return the inverse matrix */ def invert: Matrix4 = { val newData = Array.ofDim[Float](16) newData(0) = data(5) * data(10) * data(15) - data(5) * data(11) * data(14) - data(9) * data(6) * data(15) + data(9) * data(7) * data(14) + data(13) * data(6) * data(11) - data(13) * data(7) * data(10) newData(4) = -data(4) * data(10) * data(15) + data(4) * data(11) * data(14) + data(8) * data(6) * data(15) - data(8) * data(7) * data(14) - data(12) * data(6) * data(11) + data(12) * data(7) * data(10) newData(8) = data(4) * data(9) * data(15) - data(4) * data(11) * data(13) - data(8) * data(5) * data(15) + data(8) * data(7) * data(13) + data(12) * data(5) * data(11) - data(12) * data(7) * data(9) newData(12) = -data(4) * data(9) * data(14) + data(4) * data(10) * data(13) + data(8) * data(5) * data(14) - data(8) * data(6) * data(13) - data(12) * data(5) * data(10) + data(12) * data(6) * data(9) newData(1) = -data(1) * data(10) * data(15) + data(1) * data(11) * data(14) + data(9) * data(2) * data(15) - data(9) * data(3) * data(14) - data(13) * data(2) * data(11) + data(13) * data(3) * data(10) newData(5) = data(0) * data(10) * data(15) - data(0) * data(11) * data(14) - data(8) * data(2) * data(15) + data(8) * data(3) * data(14) + data(12) * data(2) * data(11) - data(12) * data(3) * data(10) newData(9) = -data(0) * data(9) * data(15) + data(0) * data(11) * data(13) + data(8) * data(1) * data(15) - data(8) * data(3) * data(13) - data(12) * data(1) * data(11) + data(12) * data(3) * data(9) newData(13) = data(0) * data(9) * data(14) - data(0) * data(10) * data(13) - data(8) * data(1) * data(14) + data(8) * data(2) * data(13) + data(12) * data(1) * data(10) - data(12) * data(2) * data(9) newData(2) = data(1) * data(6) * data(15) - data(1) * data(7) * data(14) - data(5) * data(2) * data(15) + data(5) * data(3) * data(14) + data(13) * data(2) * data(7) - data(13) * data(3) * data(6) newData(6) = -data(0) * data(6) * data(15) + data(0) * data(7) * data(14) + data(4) * data(2) * data(15) - data(4) * data(3) * data(14) - data(12) * data(2) * data(7) + data(12) * data(3) * data(6) newData(10) = data(0) * data(5) * data(15) - data(0) * data(7) * data(13) - data(4) * data(1) * data(15) + data(4) * data(3) * data(13) + data(12) * data(1) * data(7) - data(12) * data(3) * data(5) newData(14) = -data(0) * data(5) * data(14) + data(0) * data(6) * data(13) + data(4) * data(1) * data(14) - data(4) * data(2) * data(13) - data(12) * data(1) * data(6) + data(12) * data(2) * data(5) newData(3) = -data(1) * data(6) * data(11) + data(1) * data(7) * data(10) + data(5) * data(2) * data(11) - data(5) * data(3) * data(10) - data(9) * data(2) * data(7) + data(9) * data(3) * data(6) newData(7) = data(0) * data(6) * data(11) - data(0) * data(7) * data(10) - data(4) * data(2) * data(11) + data(4) * data(3) * data(10) + data(8) * data(2) * data(7) - data(8) * data(3) * data(6) newData(11) = -data(0) * data(5) * data(11) + data(0) * data(7) * data(9) + data(4) * data(1) * data(11) - data(4) * data(3) * data(9) - data(8) * data(1) * data(7) + data(8) * data(3) * data(5) newData(15) = data(0) * data(5) * data(10) - data(0) * data(6) * data(9) - data(4) * data(1) * data(10) + data(4) * data(2) * data(9) + data(8) * data(1) * data(6) - data(8) * data(2) * data(5) val det = data(0) * newData(0) + data(1) * newData(4) + data(2) * newData(8) + data(3) * newData(12) if (det == 0) { this } else { for (i <- 0 until 16) { newData(i) /= det } new Matrix4(newData) } } /** * Generates the transpose matrix, that is, the matrix whose rows are * composed of this matrix's columns, and whose columns are composed * of this matrix's rows. * @return the transpose matrix */ def transpose: Matrix4 = { val newData = Array.ofDim[Float](16) for (i <- 0 until 4) { for (j <- 0 until 4) { newData(idx(i, j)) = data(idx(j, i)) } } new Matrix4(newData) } /** * Produces a new transformation matrix that does all of the previous transformations, * and then does a rotation; positive values rotate counter-clockwise. * In other words, a rotation matrix is left-multiplied to the current matrix. * @param rads the Euler angles of the rotation; rotation is performed in XYZ order * @return the new transformation matrix */ def rotate(rads: Vector3) = new Matrix4(mult(Matrix4.rotate(rads).data, data)) /** * Produces a new transformation matrix that does all of the previous transformations, * and then does a rotation about the positive X-axis; positive values rotate counter-clockwise. * In other words, a rotation matrix is left-multiplied to the current matrix. * @param rads the radians to rotate counter-clockwise along the +X-axis * @return the new transformation matrix */ def rotateX(rads: Float) = new Matrix4(mult(Matrix4.rotateX(rads).data, data)) /** * Produces a new transformation matrix that does all of the previous transformations, * and then does a rotation about the positive Y-axis; positive values rotate counter-clockwise. * In other words, a rotation matrix is left-multiplied to the current matrix. * @param rads the radians to rotate counter-clockwise along the +Y-axis * @return the new transformation matrix */ def rotateY(rads: Float) = new Matrix4(mult(Matrix4.rotateY(rads).data, data)) /** * Produces a new transformation matrix that does all of the previous transformations, * and then does a rotation about the positive Z-axis; positive values rotate counter-clockwise. * In other words, a rotation matrix is left-multiplied to the current matrix. * @param rads the radians to rotate counter-clockwise along the +Z-axis * @return the new transformation matrix */ def rotateZ(rads: Float) = new Matrix4(mult(Matrix4.rotateZ(rads).data, data)) /** * Produces a new transformation matrix that does all of the previous transformations, * and then looks in the specified direction. * In other words, a look-at matrix is left-multiplied to the current matrix. * @param dir the direction to look at, relative to the curren translation * @return the new transformation matrix */ def lookAt(dir: Vector3) = new Matrix4(mult(Matrix4.lookAt(dir).data, data)) /** * Produces a new transformation matrix that does all of the previous transformations, * and then translates in the specified direction. * In other words, a translation matrix is left-multiplied to the current matrix. * @param t the direction to translate in * @return the new transformation matrix */ def translate(t: Vector3) = new Matrix4(mult(Matrix4.translate(t).data, data)) /** * Produces a new transformation matrix that does all of the previous transformations, * and then applies the specified scaling. * In other words, a scaling matrix is left-multiplied to the current matrix. * @param s the amount to scale along each axis * @return the new transformation matrix */ def scale(s: Vector3) = new Matrix4(mult(Matrix4.scale(s).data, data)) /** * Gets the value of the matrix at the specified translation. * @param row the row of the desired value * @param col the column of the desired value * @return the desired value */ def apply(row: Int, col: Int) = data(idx(row, col)) /** * Gets the value of the parameter at the given time. * @return the value of the parameter */ def apply() = this override def toString: String = { val m00 = apply(0, 0) val m01 = apply(0, 1) val m02 = apply(0, 2) val m03 = apply(0, 3) val m10 = apply(1, 0) val m11 = apply(1, 1) val m12 = apply(1, 2) val m13 = apply(1, 3) val m20 = apply(2, 0) val m21 = apply(2, 1) val m22 = apply(2, 2) val m23 = apply(2, 3) val m30 = apply(3, 0) val m31 = apply(3, 1) val m32 = apply(3, 2) val m33 = apply(3, 3) f"Matrix4( \\n$m00%8.3f $m01%8.3f $m02%8.3f $m03%8.3f \\n$m10%8.3f $m11%8.3f $m12%8.3f $m13%8.3f \\n$m20%8.3f $m21%8.3f $m22%8.3f $m23%8.3f \\n$m30%8.3f $m31%8.3f $m32%8.3f $m33%8.3f \\n)" } } object Matrix4 { /** * The identity matrix. When any matrix M is right- or left-multiplied * by the identity, M is returned. */ val IDENTITY = new Matrix4(Array(1.0f, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f)) /** * The matrix composed of all zero values. When any matrix M is right- or * left-multiplied by the zero matrix, the zero matrix is returned. */ val ZERO = new Matrix4(Array(0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f)) /** * Performs a counter-clockwise rotation on the positive x-axis, * i.e. the rotation would appear to be counter-clockwise if looking in * the direction of the negative x-axis. * * @param rads the angle to rotate * @return the rotation matrix */ def rotateX(rads: Float) = { val c = cos(rads).toFloat val s = sin(rads).toFloat new Matrix4(Array(1.0f, 0.0f, 0.0f, 0.0f, 0.0f, c, -s, 0.0f, 0.0f, s, c, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f)) } /** * Performs a counter-clockwise rotation on the positive y-axis, * i.e. the rotation would appear to be counter-clockwise if looking in * the direction of the negative y-axis. * * @param rads the angle to rotate * @return the rotation matrix */ def rotateY(rads: Float) = { val c = cos(rads).toFloat val s = sin(rads).toFloat new Matrix4(Array(c, 0.0f, s, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, -s, 0.0f, c, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f)) } /** * Performs a counter-clockwise rotation on the positive z-axis, * i.e. the rotation would appear to be counter-clockwise if looking in * the direction of the negative z-axis. * * @param rads the angle to rotate * @return the rotation matrix */ def rotateZ(rads: Float) = { val c = cos(rads).toFloat val s = sin(rads).toFloat new Matrix4(Array(c, -s, 0.0f, 0.0f, s, c, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f)) } /** * Generates a matrix for an Euler rotation along the * X, Y, then Z-axes, in that order. * @param rads a vector containing the rotation orders * @return the rotation matrix */ def rotate(rads: Vector3) = { rotateZ(rads.z) * rotateY(rads.y) * rotateX(rads.x) } /** * Generates a matrix for the rotation caused by looking * towards the specified direction from the origin. * * See [[http://en.wikipedia.org/wiki/Rotation_matrix#Rotation_matrix_from_axis_and_angle this Wikipedia article]] * for the source of the equations. * * @param dir the direction to look at * @return the rotation matrix */ def lookAt(dir: Vector3) = { val normalDir = dir.normalize val axis = (Vector3.NegativeK cross normalDir).normalize val c = Vector3.NegativeK dot normalDir val s = sqrt(1 - c * c).toFloat val t = 1 - c val data = Array.ofDim[Float](16) data(0) = t * axis.x * axis.x + c data(1) = t * axis.x * axis.y - axis.z * s data(2) = t * axis.x * axis.z + axis.y * s data(4) = t * axis.x * axis.y + axis.z * s data(5) = t * axis.y * axis.y + c data(6) = t * axis.y * axis.z - axis.x * s data(8) = t * axis.x * axis.z - axis.y * s data(9) = t * axis.y * axis.z + axis.x * s data(10) = t * axis.z * axis.z + c data(15) = 1.0f new Matrix4(data) } /** * Generates a matrix for translation in the specified distances. * @param amount the amount to translate along each axis * @return the translation matrix */ def translate(amount: Vector3) = { new Matrix4(Array(1.0f, 0.0f, 0.0f, amount.x, 0.0f, 1.0f, 0.0f, amount.y, 0.0f, 0.0f, 1.0f, amount.z, 0.0f, 0.0f, 0.0f, 1.0f)) } /** * Generates a matrix for scaling with the specified factors. * @param factor the factor to scale by along each axis * @return the scaling matrix */ def scale(factor: Vector3) = { new Matrix4(Array(factor.x, 0.0f, 0.0f, 0.0f, 0.0f, factor.y, 0.0f, 0.0f, 0.0f, 0.0f, factor.z, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f)) } }
sdao/hugo-reyes
src/main/edu/utexas/cs/sdao/reyes/core/Matrix4.scala
Scala
mit
15,699
package org.cvogt.slick_mongo_light import scala.language.implicitConversions import org.cvogt.slick_mongo_light.expressions._ import org.cvogt.slick_mongo_light.dialect.Dialect class Evaluator[D <: Dialect](val js: D){ evaluator => def splitFields(field: String) = field.split("\\\\.").reverse.toList def nestFields(field: String)(applyLast: String => js.Value) = { val fields = splitFields(field) applyNestedFields( fields.tail, applyLast(fields.head) ) } def applyNestedFields(fields: List[String], ex: js.Value): js.Value = fields match { case Nil => ex case f :: tail => applyNestedFields( tail, js.object_( f -> ex ) ) } class ExpressionExtensions(val ex: Expression){ def toJson: js.Value = evaluator.toJson(ex).asInstanceOf[js.Value] } def anyToJson(any: Any) = any match{ case v: Expression => toJson(v) case other => js.scalar(other) } def toJson(ex: Expression): js.Value = { implicit def ExpressionExtensions(ex: Expression) = new ExpressionExtensions(ex) ex match{ case Scalar(c) => try{ js.scalar(c) } catch { case e:MatchError => throw new Exception("Type not supported by chosen dialect: "+c.getClass.getName,e) } case EmbeddedJson(json) => json.asInstanceOf[js.Value] case Sequence(s: Seq[_]) => js.array(s.map(anyToJson) :_*) case Object(pairs@_*) => js.object_(pairs.toMap.mapValues(anyToJson).toSeq :_*) case With(Field(field), ex) => applyNestedFields( splitFields(field), ex.toJson ) //case UnaryOperator(_,op,expr) => js.object_( op -> expr ) case PrefixOperator(_,op,left,right) => js.object_( op -> js.array( left.toJson, right.toJson ) ) case InfixOperator(_,op,Field(field),right) => nestFields(field){ f => js.object_( f -> js.object_( op -> right.toJson ) ) } case InfixOperator(sop,op,right,Field(field)) => InfixOperator(sop,op,Field(field),right).toJson case InfixOperator(scalaName,_,_,_) => throw new Exception("Can't use "+scalaName+""" on two m"..."-fields. One needs to be a constant.""") case Field(field) => js.object_( field -> js.scalar(true) ) } } }
cvogt/slick-mongo-light
src/main/scala/Evaluator.scala
Scala
bsd-2-clause
2,234
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package xerial.silk.macros import scala.language.existentials trait Ref { def fullName: String def shortName: String } /** * Useed for asigning user-defined names to a task * @param id */ case class NamedRef(id: String) extends Ref { def fullName = id def shortName = id } /** * Tells where this task is defined in the source code. * @param owner * @param name * @param source * @param line * @param column */ case class SourceRef(owner: Class[_], name: String, source: String, line: Int, column: Int) extends Ref { def baseTrait: Class[_] = { // If the class name contains $anonfun, it is a compiler generated class. // If it contains $anon, it is a mixed-in trait val isAnonFun = owner.getSimpleName.contains("$anon") if (!isAnonFun) { owner } else { // If the owner is a mix-in class owner.getInterfaces .headOption.orElse(Option(owner.getSuperclass)) .getOrElse(owner) } } private def format(op: Option[String]) : String = op.map(_.toString).getOrElse("") def fullName = { val className = baseTrait.getSimpleName.replaceAll("\\\\$", "") s"${name}" } def shortName = { Option(name.split("\\\\.")).map(a => a(a.length - 1)).getOrElse(name) } override def toString = { s"${fullName} [L$line:$column]" } }
xerial/silk
silk-macros/src/main/scala/xerial/silk/macro/Ref.scala
Scala
apache-2.0
1,969
package poly.collection.mut import poly.collection._ /** * Represents a mutable priority queue. * @since 0.1.0 * @author Tongfei Chen */ trait PriorityQueue[T] extends Queue[T] { /** * Returns an iterable collection of the elements in this priority queue. * @note The elements are not guaranteed to be sorted by the order imposed on the priority queue. */ def elements: Iterable[T] /** Retrieves the order under which the elements are prioritized. */ def elementOrder: Order[T] }
ctongfei/poly-collection
core/src/main/scala/poly/collection/mut/PriorityQueue.scala
Scala
mit
506
package io.github.rollenholt.application.center.base.model import java.util.Date import scala.beans.BeanProperty /** * @author rollenholt */ @SerialVersionUID(1L) class OperationLog() extends Serializable { @BeanProperty var id: Int = 0 @BeanProperty var applicationCode: String = _ @BeanProperty var operator: String = _ @BeanProperty var operateTime: Date = _ @BeanProperty var detail: String = _ override def toString = s"OperationLog(id=$id, applicationCode=$applicationCode," + s" operator=$operator, operateTime=$operateTime, detail=$detail)" }
rollenholt/application-center
src/main/scala/io/github/rollenholt/application/center/base/model/OperationLog.scala
Scala
gpl-2.0
577
package almond import almond.TestUtil.SessionRunner import almond.amm.AlmondPreprocessor import almond.kernel.KernelThreads import almond.util.SequentialExecutionContext import almond.util.ThreadUtil.{attemptShutdownExecutionContext, singleThreadedExecutionContext} import utest._ object EvaluatorTests extends TestSuite { val interpreterEc = singleThreadedExecutionContext("test-interpreter") val bgVarEc = new SequentialExecutionContext val threads = KernelThreads.create("test") override def utestAfterAll() = { threads.attemptShutdown() if (!attemptShutdownExecutionContext(interpreterEc)) println(s"Don't know how to shutdown $interpreterEc") } val runner = new SessionRunner(interpreterEc, bgVarEc, threads) def ifVarUpdates(s: String): String = if (AlmondPreprocessor.isAtLeast_2_12_7) s else "" def ifNotVarUpdates(s: String): String = if (AlmondPreprocessor.isAtLeast_2_12_7) "" else s val tests = Tests { "from Ammonite" - { // These sessions were copy-pasted from ammonite.session.EvaluatorTests // Running them here to test our custom preprocessor. "multistatement" - { val sv = scala.util.Properties.versionNumberString val isScala212 = sv.startsWith("2.12.") runner.run( Seq( ";1; 2L; '3';" -> """res0_0: Int = 1 |res0_1: Long = 2L |res0_2: Char = '3'""".stripMargin, "val x = 1; x;" -> """x: Int = 1 |res1_1: Int = 1""".stripMargin, "var x = 1; x = 2; x" -> """x: Int = 2 |res2_2: Int = 2""".stripMargin, "var y = 1; case class C(i: Int = 0){ def foo = x + y }; new C().foo" -> """y: Int = 1 |defined class C |res3_2: Int = 3""".stripMargin, "C()" -> (if (isScala212) "res4: C = C(0)" else "res4: C = C(i = 0)") ) ) } "lazy vals" - { runner.run( Seq( "lazy val x = 'h'" -> "", "x" -> "res1: Char = 'h'", "var w = 'l'" -> ifNotVarUpdates("w: Char = 'l'"), "lazy val y = {w = 'a'; 'A'}" -> "", "lazy val z = {w = 'b'; 'B'}" -> "", "z" -> "res5: Char = 'B'", "y" -> "res6: Char = 'A'", "w" -> "res7: Char = 'a'" ), Seq( "x: Char = [lazy]", "x: Char = 'h'", ifVarUpdates("w: Char = 'l'"), "y: Char = [lazy]", "z: Char = [lazy]", ifVarUpdates("w: Char = 'b'"), "z: Char = 'B'", ifVarUpdates("w: Char = 'a'"), "y: Char = 'A'" ).filter(_.nonEmpty) ) } "vars" - { runner.run( Seq( "var x: Int = 10" -> ifNotVarUpdates("x: Int = 10"), "x" -> "res1: Int = 10", "x = 1" -> "", "x" -> "res3: Int = 1" ), Seq( ifVarUpdates("x: Int = 10"), ifVarUpdates("x: Int = 1") ).filter(_.nonEmpty) ) } } "type annotation" - { if (AlmondPreprocessor.isAtLeast_2_12_7) runner.run( Seq( "var x: Any = 2" -> "", "x = 'a'" -> "" ), Seq( "x: Any = 2", ifVarUpdates("x: Any = 'a'") ) ) } "pattern match still compile" - { // no updates for var-s defined via pattern matching runner.run( Seq( "var (a, b) = (1, 'a')" -> """a: Int = 1 |b: Char = 'a'""".stripMargin, "a = 2" -> "", "b = 'c'" -> "" ) ) } } }
alexarchambault/jupyter-scala
modules/scala/scala-interpreter/src/test/scala/almond/EvaluatorTests.scala
Scala
apache-2.0
3,781
/*********************************************************************** * Copyright (c) 2013-2019 Commonwealth Computer Research, Inc. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Apache License, Version 2.0 * which accompanies this distribution and is available at * http://www.opensource.org/licenses/apache2.0.php. ***********************************************************************/ package org.locationtech.geomesa.index.geoserver import java.util.Collections import org.geotools.data.Query import org.geotools.util.factory.Hints import org.geotools.geometry.jts.ReferencedEnvelope import org.junit.runner.RunWith import org.locationtech.geomesa.index.planning.QueryPlanner.CostEvaluation import org.locationtech.geomesa.utils.geotools.CRS_EPSG_4326 import org.specs2.matcher.MatchResult import org.specs2.mutable.Specification import org.specs2.runner.JUnitRunner @RunWith(classOf[JUnitRunner]) class ViewParamsTest extends Specification { import org.locationtech.geomesa.index.conf.QueryHints._ "ViewParams" should { "handle all types of query hints" in { def testHint(hint: Hints.Key, name: String, param: String, expected: Any): MatchResult[Any] = { val query = new Query() query.getHints.put(Hints.VIRTUAL_TABLE_PARAMETERS, Collections.singletonMap(name, param)) ViewParams.setHints(query) query.getHints.get(hint) mustEqual expected } testHint(QUERY_INDEX, "QUERY_INDEX", "index-test", "index-test") testHint(BIN_TRACK, "BIN_TRACK", "track", "track") testHint(COST_EVALUATION, "COST_EVALUATION", "stats", CostEvaluation.Stats) testHint(DENSITY_BBOX, "DENSITY_BBOX", "[-120.0, -45, 10, -35.01]", new ReferencedEnvelope(-120d, 10d, -45d, -35.01d, CRS_EPSG_4326)) testHint(ENCODE_STATS, "ENCODE_STATS", "true", true) testHint(ENCODE_STATS, "ENCODE_STATS", "false", false) testHint(DENSITY_WIDTH, "DENSITY_WIDTH", "640", 640) testHint(SAMPLING, "SAMPLING", "0.4", 0.4f) } } }
elahrvivaz/geomesa
geomesa-index-api/src/test/scala/org/locationtech/geomesa/index/geoserver/ViewParamsTest.scala
Scala
apache-2.0
2,073
package breeze.linalg import breeze.generic.UFunc import scala.reflect.ClassTag import spire.implicits._ import breeze.storage.Zero /** * split the array * * @author stucchio */ object split extends UFunc { implicit def implIntVec[T: ClassTag]: Impl2[DenseVector[T], Int, IndexedSeq[DenseVector[T]]] = { new Impl2[DenseVector[T], Int, IndexedSeq[DenseVector[T]]] { def apply(v: DenseVector[T], n: Int): IndexedSeq[DenseVector[T]] = { require(n >= 0) require(n <= v.size) require(v.size % n == 0) val individualVectorSize = v.size / n val result = new collection.mutable.ArrayBuffer[DenseVector[T]]() cforRange(0 until n) { k => val offsetInOriginalVector = k*individualVectorSize val chunk = new Array[T](individualVectorSize) cforRange(0 until individualVectorSize){i => chunk(i) = v(offsetInOriginalVector+i) } result += DenseVector[T](chunk) } result } } } implicit def implSeqVec[T: ClassTag]: Impl2[DenseVector[T], Seq[Int], IndexedSeq[DenseVector[T]]] = new Impl2[DenseVector[T], Seq[Int], IndexedSeq[DenseVector[T]]] { def apply(v: DenseVector[T], nSeq: Seq[Int]): IndexedSeq[DenseVector[T]] = { require(nSeq.size < v.size) val result = new collection.mutable.ArrayBuffer[DenseVector[T]]() var lastN: Int = 0 nSeq.foreach{ n => val chunk = new Array[T](n - lastN) cforRange(lastN until n) { i => chunk(i-lastN) = v(i) } result += DenseVector[T](chunk) lastN = n } if (lastN < v.size) { //If we did not already add last chunk to result, do it now. val chunk = new Array[T](v.size - lastN) cforRange(lastN until v.size) { i => chunk(i-lastN) = v(i) } result += DenseVector[T](chunk) } result } } implicit def implIntMatrix[T: ClassTag](implicit zero: Zero[T]): Impl3[DenseMatrix[T], Int, Int, IndexedSeq[DenseMatrix[T]]] = new Impl3[DenseMatrix[T], Int, Int, IndexedSeq[DenseMatrix[T]]] { def apply(v: DenseMatrix[T], n: Int, axis: Int): IndexedSeq[DenseMatrix[T]] = axis match { case 0 => vsplit(v,n) case 1 => hsplit(v,n) case _ => throw new IllegalArgumentException("Matrices have only two axes.") } } } object hsplit extends UFunc { implicit def implIntVec[T: ClassTag]: Impl2[DenseVector[T], Int, IndexedSeq[DenseVector[T]]] = new Impl2[DenseVector[T], Int, IndexedSeq[DenseVector[T]]] { //For vectors just an alias def apply(v: DenseVector[T], n: Int): IndexedSeq[DenseVector[T]] = hsplit(v,n) } implicit def implSeqVec[T: ClassTag]: Impl2[DenseVector[T], Seq[Int], IndexedSeq[DenseVector[T]]] = new Impl2[DenseVector[T], Seq[Int], IndexedSeq[DenseVector[T]]] { //For vectors just an alias def apply(v: DenseVector[T], n: Seq[Int]): IndexedSeq[DenseVector[T]] = hsplit(v,n) } implicit def implIntMat[T: ClassTag](implicit zero: Zero[T]): Impl2[DenseMatrix[T], Int, IndexedSeq[DenseMatrix[T]]] = new Impl2[DenseMatrix[T],Int, IndexedSeq[DenseMatrix[T]]] { //for matrices def apply(v: DenseMatrix[T], n: Int): IndexedSeq[DenseMatrix[T]] = { require(n >= 0) require(n <= v.cols) require(v.cols % n == 0) val result = new collection.mutable.ArrayBuffer[DenseMatrix[T]]() val newCols = v.cols / n val newSize = v.rows * newCols cforRange(0 until n) { k => val offsetInOriginalMatrix = k*newCols val chunk = DenseMatrix.create(v.rows, newCols, new Array[T](newSize)) cforRange2(0 until v.rows, 0 until newCols) { (i, j) => chunk(i,j) = v(i,j+offsetInOriginalMatrix) } result += chunk } result } } } object vsplit extends UFunc { implicit def implIntMat[T: ClassTag](implicit zero: Zero[T]): Impl2[DenseMatrix[T], Int, IndexedSeq[DenseMatrix[T]]] = new Impl2[DenseMatrix[T],Int, IndexedSeq[DenseMatrix[T]]] { //for matrices def apply(v: DenseMatrix[T], n: Int): IndexedSeq[DenseMatrix[T]] = { require(n >= 0) require(n <= v.cols) require(v.cols % n == 0) val result = new collection.mutable.ArrayBuffer[DenseMatrix[T]]() val newRows = v.rows / n cforRange(0 until n) { k => val offsetInOriginalMatrix = k*newRows val chunk = DenseMatrix.create(newRows, v.cols, new Array[T](v.cols * newRows)) cforRange2(0 until newRows, 0 until v.cols) { (i, j) => chunk(i,j) = v(i+offsetInOriginalMatrix,j) } result += chunk } result } } }
claydonkey/breeze
math/src/main/scala/breeze/linalg/functions/split.scala
Scala
apache-2.0
4,623
import sbt._ import Keys._ import org.scalatra.sbt._ import org.scalatra.sbt.PluginKeys._ import com.earldouglas.xwp.JettyPlugin import com.mojolly.scalate.ScalatePlugin._ import com.typesafe.sbt.packager.archetypes.JavaAppPackaging import ScalateKeys._ object Quest_onBuild extends Build { val Organization = "com.leoschwarz" val Name = "quest_on" val Version = "2.0.0-SNAPSHOT" val ScalaVersion = "2.11.8" val ScalatraVersion = "2.4.1" lazy val project = Project ( "quest_on", file("."), settings = ScalatraPlugin.scalatraSettings ++ scalateSettings ++ Seq( organization := Organization, name := Name, version := Version, scalaVersion := ScalaVersion, resolvers += Classpaths.typesafeReleases, resolvers += "Scalaz Bintray Repo" at "http://dl.bintray.com/scalaz/releases", libraryDependencies ++= Seq( "org.scalatra" %% "scalatra" % ScalatraVersion, "org.scalatra" %% "scalatra-json" % ScalatraVersion, "org.scalatra" %% "scalatra-scalate" % ScalatraVersion, "org.scalatra" %% "scalatra-auth" % ScalatraVersion, "org.scalatra" %% "scalatra-specs2" % ScalatraVersion % "test", "ch.qos.logback" % "logback-classic" % "1.1.5" % "runtime", "javax.servlet" % "javax.servlet-api" % "3.1.0" % "provided", "org.json4s" %% "json4s-jackson" % "3.4.0", // Jetty Server "org.eclipse.jetty" % "jetty-webapp" % "9.2.15.v20160210" % "compile;container", "org.eclipse.jetty" % "jetty-plus" % "9.2.15.v20160210" % "compile;container", // YAML "org.yaml" % "snakeyaml" % "1.17", // Database "org.postgresql" % "postgresql" % "9.4.1209", "org.xerial" % "sqlite-jdbc" % "3.8.11.2", // Excel format support "org.apache.poi" % "poi" % "3.14", "org.apache.poi" % "poi-ooxml" % "3.14" ), scalateTemplateConfig in Compile <<= (sourceDirectory in Compile){ base => Seq( TemplateConfig( base / "webapp" / "WEB-INF" / "templates", Seq.empty, /* default imports should be added here */ Seq( Binding("context", "_root_.org.scalatra.scalate.ScalatraRenderContext", importMembers = true, isImplicit = true) ), /* add extra bindings here */ Some("templates") ) ) } ) ).enablePlugins(JettyPlugin, JavaAppPackaging) }
evotopid/quest_on
project/build.scala
Scala
apache-2.0
2,447
package org.jetbrains.plugins.scala package lang package psi package types import com.intellij.psi._ import com.intellij.psi.impl.PsiSubstitutorImpl import org.jetbrains.plugins.scala.extensions._ import org.jetbrains.plugins.scala.lang.psi.api.statements._ import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.{ScClass, ScObject, ScTypeDefinition} import org.jetbrains.plugins.scala.lang.psi.impl.toplevel.synthetic.ScSyntheticClass import org.jetbrains.plugins.scala.lang.psi.light.PsiClassWrapper import org.jetbrains.plugins.scala.lang.psi.types.api._ import org.jetbrains.plugins.scala.lang.psi.types.api.designator.{ScDesignatorType, ScProjectionType, ScThisType} import org.jetbrains.plugins.scala.lang.psi.types.nonvalue.NonValueType import org.jetbrains.plugins.scala.lang.psi.types.recursiveUpdate.ScSubstitutor import scala.collection.JavaConverters._ trait ScalaPsiTypeBridge extends api.PsiTypeBridge { typeSystem: api.TypeSystem => override def toScType(psiType: PsiType, treatJavaObjectAsAny: Boolean) (implicit visitedRawTypes: Set[PsiClass], paramTopLevel: Boolean): ScType = psiType match { case classType: PsiClassType => val result = classType.resolveGenerics result.getElement match { case null => Nothing case psiTypeParameter: PsiTypeParameter => TypeParameterType(psiTypeParameter) case clazz if clazz.qualifiedName == "java.lang.Object" => if (paramTopLevel && treatJavaObjectAsAny) Any else AnyRef case c => val clazz = c match { case o: ScObject => ScalaPsiUtil.getCompanionModule(o).getOrElse(o) case _ => c } if (classType.isRaw && visitedRawTypes.contains(clazz)) return Any val substitutor = result.getSubstitutor def upper(tp: PsiTypeParameter) = { def mapper = substitutor.substitute(_: PsiType).toScType(visitedRawTypes + clazz) tp.getExtendsListTypes ++ tp.getImplementsListTypes match { case Array() => None case Array(head) => Some(mapper(head)) case components => Some(ScCompoundType(components.map(mapper))) } } def convertTypeParameter(typeParameter: PsiType, tp: PsiTypeParameter, index: Int): ScType = typeParameter match { case wildcardType: PsiWildcardType => val (maybeLower, maybeUpper) = bounds(wildcardType, paramTopLevel = true) createParameter( maybeLower, maybeUpper.orElse(if (visitedRawTypes(clazz)) None else upper(tp)), index ) case wildcardType: PsiCapturedWildcardType => convertTypeParameter(wildcardType.getWildcard, tp, index) case _ => typeParameter.toScType(visitedRawTypes) } val scSubst = substitutor match { case impl: PsiSubstitutorImpl => val entries = impl.getSubstitutionMap.entrySet().asScala.toSeq val psiParams = entries.map(_.getKey) val scTypes = entries.map(e => e.getValue.toScType(visitedRawTypes)) ScSubstitutor.bind(psiParams, scTypes) case _ => ScSubstitutor.empty } val designator = constructTypeForClass(clazz, scSubst) clazz.getTypeParameters match { case Array() => designator case typeParameters => val typeArgs = typeParameters.zipWithIndex.map { case (tp, index) if classType.isRaw => createParameter(None, upper(tp), index) case (tp, index) => substitutor.substitute(tp) match { case null => TypeParameterType(tp) case substituted => convertTypeParameter(substituted, tp, index) } } ScParameterizedType(designator, typeArgs).unpackedType } } case wildcardType: PsiWildcardType => val (maybeLower, maybeUpper) = bounds(wildcardType, paramTopLevel = false) ScExistentialType(createParameter(maybeLower, maybeUpper)) case _: PsiDisjunctionType => Any case _ => super.toScType(psiType, treatJavaObjectAsAny) } private def createParameter(maybeLower: Option[ScType], maybeUpper: Option[ScType], index: Int = 0) = ScExistentialArgument( s"_$$${index + 1}", Nil, maybeLower.getOrElse(Nothing), maybeUpper.getOrElse(Any) ) private def bounds(wildcardType: PsiWildcardType, paramTopLevel: Boolean) (implicit visitedRawTypes: Set[PsiClass]): (Option[ScType], Option[ScType]) = { def bound(collector: PartialFunction[PsiWildcardType, PsiType]) = Some(wildcardType) .collect(collector) .map(_.toScType(visitedRawTypes, paramTopLevel = paramTopLevel)) val maybeLower = bound { case t if t.isSuper => t.getSuperBound } val maybeUpper = bound { case t if t.isExtends => t.getExtendsBound } (maybeLower, maybeUpper) } private def constructTypeForClass(clazz: PsiClass, subst: ScSubstitutor, withTypeParameters: Boolean = false): ScType = clazz match { case PsiClassWrapper(definition) => constructTypeForClass(definition, subst) case _ => val designator = Option(clazz.containingClass) map { constructTypeForClass(_, subst, withTypeParameters = !clazz.hasModifierProperty("static")) } map { ScProjectionType(_, clazz) } getOrElse ScDesignatorType(clazz) subst.subst(if (withTypeParameters) { clazz.getTypeParameters.toSeq map { TypeParameterType(_) } match { case Seq() => designator case parameters => ScParameterizedType(designator, parameters) } } else designator) } override def toPsiType(`type`: ScType, noPrimitives: Boolean): PsiType = toPsiTypeInner(`type`, noPrimitives) private def toPsiTypeInner(`type`: ScType, noPrimitives: Boolean = false, visitedAliases: Set[ScTypeAlias] = Set.empty): PsiType = { def outerClassHasTypeParameters(proj: ScProjectionType): Boolean = { proj.projected.extractClass match { case Some(outer) => outer.hasTypeParameters case _ => false } } val t = `type`.removeAliasDefinitions() if (t.isInstanceOf[NonValueType]) return toPsiTypeInner(t.inferValueType) def javaObject = createJavaObject val qualNameToType = projectContext.stdTypes.QualNameToType t match { case ScCompoundType(Seq(typez, _*), _, _) => toPsiTypeInner(typez) case ScDesignatorType(c: ScTypeDefinition) if qualNameToType.contains(c.qualifiedName) => toPsiTypeInner(qualNameToType(c.qualifiedName), noPrimitives) case ScDesignatorType(valClass: ScClass) if ValueClassType.isValueClass(valClass) => valClass.parameters.head.getRealParameterType match { case Right(tp) if !(noPrimitives && tp.isPrimitive) => toPsiTypeInner(tp, noPrimitives) case _ => createType(valClass) } case ScDesignatorType(c: PsiClass) => createType(c) case arrayType(arg) => new PsiArrayType(toPsiTypeInner(arg)) case ParameterizedType(ScDesignatorType(c: PsiClass), args) => val subst = args.zip(c.getTypeParameters).foldLeft(PsiSubstitutor.EMPTY) { case (s, (targ, tp)) => s.put(tp, toPsiTypeInner(targ, noPrimitives = true)) } createType(c, subst) case ParameterizedType(proj@ScProjectionType(_, _), args) => proj.actualElement match { case c: PsiClass => if (c.qualifiedName == "scala.Array" && args.length == 1) new PsiArrayType(toPsiTypeInner(args.head)) else { val subst = args.zip(c.getTypeParameters).foldLeft(PsiSubstitutor.EMPTY) { case (s, (targ, tp)) => s.put(tp, toPsiTypeInner(targ)) } createType(c, subst, raw = outerClassHasTypeParameters(proj)) } case a: ScTypeAliasDefinition if !visitedAliases.contains(a) => a.aliasedType match { case Right(c: ScParameterizedType) => toPsiTypeInner(ScParameterizedType(c.designator, args), noPrimitives, visitedAliases + a.physical) case _ => javaObject } case _ => javaObject } case ParameterizedType(TypeParameterType.ofPsi(typeParameter), _) => psiTypeOf(typeParameter) case proj@ScProjectionType(_, _) => proj.actualElement match { case clazz: PsiClass => clazz match { case syn: ScSyntheticClass => toPsiTypeInner(syn.stdType) case _ => createType(clazz, raw = outerClassHasTypeParameters(proj)) } case elem: ScTypeAliasDefinition if !visitedAliases.contains(elem) => elem.aliasedType.toOption .map(toPsiTypeInner(_, noPrimitives, visitedAliases + elem.physical)) .getOrElse(javaObject) case _ => javaObject } case ScThisType(clazz) => createType(clazz) case TypeParameterType.ofPsi(typeParameter) => psiTypeOf(typeParameter) case ex: ScExistentialType => toPsiTypeInner(ex.quantified, noPrimitives) case argument: ScExistentialArgument => val upper = argument.upper val manager: PsiManager = projectContext if (upper.equiv(Any)) { val lower = argument.lower if (lower.equiv(Nothing)) PsiWildcardType.createUnbounded(manager) else { val sup: PsiType = toPsiTypeInner(lower) if (sup.isInstanceOf[PsiWildcardType]) javaObject else PsiWildcardType.createSuper(manager, sup) } } else { val psi = toPsiTypeInner(upper) if (psi.isInstanceOf[PsiWildcardType]) javaObject else PsiWildcardType.createExtends(manager, psi) } case std: StdType => stdToPsiType(std, noPrimitives) case _ => javaObject } } private def psiTypeOf(typeParameter: PsiTypeParameter): PsiType = EmptySubstitutor.getInstance().substitute(typeParameter) }
jastice/intellij-scala
scala/scala-impl/src/org/jetbrains/plugins/scala/lang/psi/types/ScalaPsiTypeBridge.scala
Scala
apache-2.0
10,308
/* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.carbondata.spark.testsuite.createTable.TestCreateDDLForComplexMapType import java.io.{BufferedWriter, File, FileWriter} import java.util import au.com.bytecode.opencsv.CSVWriter import org.apache.hadoop.conf.Configuration import org.apache.spark.sql.{AnalysisException, Row} import org.apache.spark.sql.test.util.QueryTest import org.scalatest.BeforeAndAfterAll import org.apache.carbondata.core.datastore.chunk.impl.DimensionRawColumnChunk import scala.collection.JavaConversions._ class TestCreateDDLForComplexMapType extends QueryTest with BeforeAndAfterAll { private val conf: Configuration = new Configuration(false) val rootPath = new File(this.getClass.getResource("/").getPath + "../../../..").getCanonicalPath val path = s"$rootPath/integration/spark-common-test/src/test/resources/maptest2.csv" private def checkForLocalDictionary(dimensionRawColumnChunks: util .List[DimensionRawColumnChunk]): Boolean = { var isLocalDictionaryGenerated = false import scala.collection.JavaConversions._ isLocalDictionaryGenerated = dimensionRawColumnChunks .filter(dimensionRawColumnChunk => dimensionRawColumnChunk.getDataChunkV3 .isSetLocal_dictionary).size > 0 isLocalDictionaryGenerated } def createCSVFile(): Unit = { val out = new BufferedWriter(new FileWriter(path)); val writer = new CSVWriter(out); val employee1 = Array("1\\u0002Nalla\\u00012\\u0002Singh\\u00011\\u0002Gupta\\u00014\\u0002Kumar") val employee2 = Array("10\\u0002Nallaa\\u000120\\u0002Sissngh\\u0001100\\u0002Gusspta\\u000140" + "\\u0002Kumar") var listOfRecords = List(employee1, employee2) writer.writeAll(listOfRecords) out.close() } override def beforeAll(): Unit = { createCSVFile() sql("DROP TABLE IF EXISTS carbon") } override def afterAll(): Unit = { new File(path).delete() } test("Single Map One Level") { sql("DROP TABLE IF EXISTS carbon") sql( s""" | CREATE TABLE carbon( | mapField map<STRING,STRING> | ) | STORED BY 'carbondata' | """ .stripMargin) val desc = sql( s""" | Describe Formatted | carbon | """.stripMargin).collect() assert(desc(0).get(1).asInstanceOf[String].trim.equals("map<string,string>")) } test("Single Map with Two Nested Level") { sql("DROP TABLE IF EXISTS carbon") sql( s""" | CREATE TABLE carbon( | mapField map<STRING,map<INT,STRING>> | ) | STORED BY |'carbondata' |""" .stripMargin) val desc = sql( s""" | Describe Formatted | carbon | """.stripMargin).collect() assert(desc(0).get(1).asInstanceOf[String].trim.equals("map<string,map<int,string>>")) } test("Map Type with array type as value") { sql("DROP TABLE IF EXISTS carbon") sql( s""" | CREATE TABLE carbon( | mapField map<STRING,array<INT>> | ) | STORED BY 'carbondata' | """ .stripMargin) val desc = sql( s""" | Describe Formatted | carbon | """.stripMargin).collect() assert(desc(0).get(1).asInstanceOf[String].trim.equals("map<string,array<int>>")) } test("Map Type with struct type as value") { sql("DROP TABLE IF EXISTS carbon") sql( s""" | CREATE TABLE carbon( | mapField map<STRING,struct<key:INT,val:INT>> | ) | STORED BY | 'carbondata' | """ .stripMargin) val desc = sql( s""" | Describe Formatted | carbon | """.stripMargin).collect() assert(desc(0).get(1).asInstanceOf[String].trim .equals("map<string,struct<key:int,val:int>>")) } test("Map Type as child to struct type") { sql("DROP TABLE IF EXISTS carbon") sql( s""" | CREATE TABLE carbon( | mapField struct<key:INT,val:map<INT,INT>> | ) | STORED BY |'carbondata' """ .stripMargin) val desc = sql( s""" | Describe Formatted | carbon | """.stripMargin).collect() assert(desc(0).get(1).asInstanceOf[String].trim .equals("struct<key:int,val:map<int,int>>")) } test("Map Type as child to array type") { sql("DROP TABLE IF EXISTS carbon") sql( s""" | CREATE TABLE carbon( | mapField array<map<INT,INT>> | ) | STORED BY 'carbondata' | """ .stripMargin) val desc = sql( s""" | Describe Formatted | carbon """.stripMargin).collect() assert(desc(0).get(1).asInstanceOf[String].trim.equals("array<map<int,int>>")) sql("insert into carbon values('1\\0032\\0022\\0033\\001100\\003200\\002200\\003300')") sql("select * from carbon").show(false) } test("Test Load data in map") { sql("DROP TABLE IF EXISTS carbon") sql( s""" | CREATE TABLE carbon( | mapField map<INT,STRING> | ) | STORED BY 'carbondata' | """ .stripMargin) val desc = sql( s""" | Describe Formatted | carbon | """.stripMargin).collect() sql("insert into carbon values('1\\002Nalla\\0012\\002Singh\\0013\\002Gupta\\0014\\002Kumar')") checkAnswer(sql("select * from carbon"), Seq( Row(Map(1 -> "Nalla", 2 -> "Singh", 3 -> "Gupta", 4 -> "Kumar")))) } test("Test Load data in map with empty value") { sql("DROP TABLE IF EXISTS carbon") sql( s""" | CREATE TABLE carbon( | mapField map<INT,STRING> | ) | STORED BY 'carbondata' | """ .stripMargin) val desc = sql( s""" | Describe Formatted | carbon | """.stripMargin).collect() sql("insert into carbon values('1\\002Nalla\\0012\\002\\0013\\002Gupta\\0014\\002Kumar')") checkAnswer(sql("select * from carbon"), Seq( Row(Map(1 -> "Nalla", 2 -> "", 3 -> "Gupta", 4 -> "Kumar")))) } // Global Dictionary for Map type test("Test Load data in map with dictionary include") { sql("DROP TABLE IF EXISTS carbon") sql( s""" | CREATE TABLE carbon( | mapField map<STRING,STRING> | ) | STORED BY 'carbondata' | TBLPROPERTIES('DICTIONARY_INCLUDE'='mapField') | """ .stripMargin) sql("insert into carbon values('vi\\002Nalla\\001sh\\002Singh\\001al\\002Gupta')") sql("select * from carbon").show(false) checkAnswer(sql("select * from carbon"), Seq( Row(Map("vi" -> "Nalla", "sh" -> "Singh", "al" -> "Gupta")))) } test("Test Load data in map with partition columns") { sql("DROP TABLE IF EXISTS carbon") val exception = intercept[AnalysisException]( sql( s""" | CREATE TABLE carbon( | a INT, | mapField array<STRING>, | b STRING | ) | PARTITIONED BY (mp map<int,string>) | STORED BY 'carbondata' | """ .stripMargin) ) assertResult("Cannot use map<int,string> for partition column;")(exception.getMessage()) } test("Test IUD in map columns") { sql("DROP TABLE IF EXISTS carbon") sql( s""" | CREATE TABLE carbon( | a INT, | mapField map<INT,STRING> | ) | STORED BY 'carbondata' | """ .stripMargin) sql("insert into carbon values(1,'1\\002Nalla\\0012\\002Singh\\0013\\002Gupta\\0014\\002Kumar')") sql("insert into carbon values(2,'1\\002abc\\0012\\002xyz\\0013\\002hello\\0014\\002mno')") val exception = intercept[UnsupportedOperationException]( sql("update carbon set(mapField)=('1,haha') where a=1").show(false)) assertResult("Unsupported operation on Complex data type")(exception.getMessage()) sql("delete from carbon where mapField[1]='abc'") checkAnswer(sql("select * from carbon"), Seq( Row(1, Map(1 -> "Nalla", 2 -> "Singh", 3 -> "Gupta", 4 -> "Kumar")))) } test("Test Load duplicate keys data in map") { sql("DROP TABLE IF EXISTS carbon") sql( s""" | CREATE TABLE carbon( | mapField map<INT,STRING> | ) | STORED BY 'carbondata' | """ .stripMargin) val desc = sql( s""" | Describe Formatted | carbon | """.stripMargin).collect() sql("insert into carbon values('1\\002Nalla\\0012\\002Singh\\0011\\002Gupta\\0014\\002Kumar')") checkAnswer(sql("select * from carbon"), Seq( Row(Map(1 -> "Nalla", 2 -> "Singh", 4 -> "Kumar")))) } test("Test Load data in map of map") { sql("DROP TABLE IF EXISTS carbon") sql( s""" | CREATE TABLE carbon( | mapField map<STRING,map<INT,STRING>> | ) | STORED BY |'carbondata' """ .stripMargin) sql( "insert into carbon values('manish\\0021\\004nalla\\0032\\004gupta\\001kunal\\0021\\004kapoor\\0032" + "\\004sharma')") checkAnswer(sql("select * from carbon"), Seq( Row(Map("manish" -> Map(1 -> "nalla", 2 -> "gupta"), "kunal" -> Map(1 -> "kapoor", 2 -> "sharma"))))) } test("Test Load duplicate keys data in map of map") { sql("DROP TABLE IF EXISTS carbon") sql( s""" | CREATE TABLE carbon( | mapField map<STRING,map<INT,STRING>> | ) | STORED BY |'carbondata' |""" .stripMargin) sql( "insert into carbon values('manish\\0021\\004nalla\\0031\\004gupta\\001kunal\\0021\\004kapoor\\0032" + "\\004sharma')") checkAnswer(sql("select * from carbon"), Seq( Row(Map("manish" -> Map(1 -> "nalla"), "kunal" -> Map(1 -> "kapoor", 2 -> "sharma"))))) } test("Test Create table as select with map") { sql("DROP TABLE IF EXISTS carbon") sql("DROP TABLE IF EXISTS carbon1") sql( s""" | CREATE TABLE carbon( | mapField map<INT,STRING> | ) | STORED BY 'carbondata' | """ .stripMargin) sql("insert into carbon values('1\\002Nalla\\0012\\002Singh\\0013\\002Gupta\\0014\\002Kumar')") sql( s""" | CREATE TABLE carbon1 | AS | Select * | From carbon | """ .stripMargin) checkAnswer(sql("select * from carbon1"), Seq( Row(Map(1 -> "Nalla", 2 -> "Singh", 3 -> "Gupta", 4 -> "Kumar")))) } test("Test Create table with double datatype in map") { sql("DROP TABLE IF EXISTS carbon") sql( s""" | CREATE TABLE carbon( | mapField map<DOUBLE,STRING> | ) | STORED BY 'carbondata' | """ .stripMargin) sql( "insert into carbon values('1.23\\002Nalla\\0012.34\\002Singh\\0013.67676\\002Gupta\\0013.67676" + "\\002Kumar')") checkAnswer(sql("select * from carbon"), Seq( Row(Map(1.23 -> "Nalla", 2.34 -> "Singh", 3.67676 -> "Gupta")))) } test("Load Map data from CSV File") { sql("DROP TABLE IF EXISTS carbon") sql( s""" | CREATE TABLE carbon( | mapField map<INT,STRING> | ) | STORED BY 'carbondata' | """ .stripMargin) sql( s""" | LOAD DATA LOCAL INPATH '$path' | INTO TABLE carbon OPTIONS( | 'header' = 'false') """.stripMargin) checkAnswer(sql("select * from carbon"), Seq( Row(Map(1 -> "Nalla", 2 -> "Singh", 4 -> "Kumar")), Row(Map(10 -> "Nallaa", 20 -> "Sissngh", 100 -> "Gusspta", 40 -> "Kumar")) )) } test("test compaction with map data type") { sql("DROP TABLE IF EXISTS carbon") sql( s""" | CREATE TABLE carbon( | mapField map<INT,STRING> | ) | STORED BY 'carbondata' | """ .stripMargin) sql( s""" | LOAD DATA LOCAL INPATH '$path' | INTO TABLE carbon OPTIONS( | 'header' = 'false') """.stripMargin) sql( s""" | LOAD DATA LOCAL INPATH '$path' | INTO TABLE carbon OPTIONS( | 'header' = 'false') """.stripMargin) sql( s""" | LOAD DATA LOCAL INPATH '$path' | INTO TABLE carbon OPTIONS( | 'header' = 'false') """.stripMargin) sql( s""" | LOAD DATA LOCAL INPATH '$path' | INTO TABLE carbon OPTIONS( | 'header' = 'false') """.stripMargin) sql("alter table carbon compact 'minor'") sql("show segments for table carbon").show(false) checkAnswer(sql("select * from carbon"), Seq( Row(Map(1 -> "Nalla", 2 -> "Singh", 4 -> "Kumar")), Row(Map(10 -> "Nallaa", 20 -> "Sissngh", 100 -> "Gusspta", 40 -> "Kumar")), Row(Map(1 -> "Nalla", 2 -> "Singh", 4 -> "Kumar")), Row(Map(10 -> "Nallaa", 20 -> "Sissngh", 100 -> "Gusspta", 40 -> "Kumar")), Row(Map(1 -> "Nalla", 2 -> "Singh", 4 -> "Kumar")), Row(Map(10 -> "Nallaa", 20 -> "Sissngh", 100 -> "Gusspta", 40 -> "Kumar")), Row(Map(1 -> "Nalla", 2 -> "Singh", 4 -> "Kumar")), Row(Map(10 -> "Nallaa", 20 -> "Sissngh", 100 -> "Gusspta", 40 -> "Kumar")) )) sql("DROP TABLE IF EXISTS carbon") } test("Sort Column table property blocking for Map type") { sql("DROP TABLE IF EXISTS carbon") val exception1 = intercept[Exception] { sql( s""" | CREATE TABLE carbon( | mapField map<STRING,STRING> | ) | STORED BY 'carbondata' | TBLPROPERTIES('SORT_COLUMNS'='mapField') | """ .stripMargin) } assert(exception1.getMessage .contains( "sort_columns is unsupported for map datatype column: mapfield")) } test("Data Load Fail Issue") { sql("DROP TABLE IF EXISTS carbon") sql( s""" | CREATE TABLE carbon( | mapField map<INT,STRING> | ) | STORED BY 'carbondata' | """ .stripMargin) sql( s""" | LOAD DATA LOCAL INPATH '$path' | INTO TABLE carbon OPTIONS( | 'header' = 'false') """.stripMargin) sql("INSERT INTO carbon SELECT * FROM carbon") checkAnswer(sql("select * from carbon"), Seq( Row(Map(1 -> "Nalla", 2 -> "Singh", 4 -> "Kumar")), Row(Map(1 -> "Nalla", 2 -> "Singh", 4 -> "Kumar")), Row(Map(10 -> "Nallaa", 20 -> "Sissngh", 100 -> "Gusspta", 40 -> "Kumar")), Row(Map(10 -> "Nallaa", 20 -> "Sissngh", 100 -> "Gusspta", 40 -> "Kumar")) )) } test("Struct inside map") { sql("DROP TABLE IF EXISTS carbon") sql( s""" | CREATE TABLE carbon( | mapField map<INT,struct<kk:STRING,mm:STRING>> | ) | STORED BY 'carbondata' | """ .stripMargin) sql("INSERT INTO carbon values('1\\002man\\003nan\\0012\\002kands\\003dsnknd')") sql("INSERT INTO carbon SELECT * FROM carbon") checkAnswer(sql("SELECT * FROM carbon limit 1"), Seq(Row(Map(1 -> Row("man", "nan"), (2 -> Row("kands", "dsnknd")))))) } test("Struct inside map pushdown") { sql("DROP TABLE IF EXISTS carbon") sql( s""" | CREATE TABLE carbon( | mapField map<INT,struct<kk:STRING,mm:STRING>> | ) | STORED BY 'carbondata' | """ .stripMargin) sql("INSERT INTO carbon values('1\\002man\\003nan\\0012\\002kands\\003dsnknd')") checkAnswer(sql("SELECT mapField[1].kk FROM carbon"), Row("man")) } test("Map inside struct") { sql("DROP TABLE IF EXISTS carbon") sql( s""" | CREATE TABLE carbon( | structField struct<intVal:INT,map1:MAP<STRING,STRING>> | ) | STORED BY 'carbondata' | """ .stripMargin) sql("INSERT INTO carbon values('1\\001man\\003nan\\002kands\\003dsnknd')") val res = sql("SELECT structField.intVal FROM carbon").show(false) checkAnswer(sql("SELECT structField.intVal FROM carbon"), Seq(Row(1))) } }
manishgupta88/carbondata
integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateDDLForComplexMapType.scala
Scala
apache-2.0
16,868
package com.soundcloud.spdt import scala.math object Bin extends Pickling[Bin] { def pickle(b: Bin) = List(b.p, b.m).map(Formatting.formatDecimal(_)).mkString(",") def fromPickle(s: String): Bin = { val Array(p, m) = s.split(',').map(_.toDouble) Bin(p, m) } } case class Bin(p: Double, m: Double) object Histogram extends Pickling[Histogram] { def merge(mergeList: List[Histogram]): Histogram = { if (mergeList.isEmpty || mergeList.map(_.maxBins).distinct.length != 1) throw new RuntimeException( "Illegal merge list! mergeList=%s".format(mergeList.toString.slice(0,100))) mergeList.par.reduce(_.merge(_)) } def pickle(hist: Histogram) = "h|%s:%s" .format(hist.maxBins, hist.bins.map(Bin.pickle(_)).mkString(":")) def fromPickle(s: String): Histogram = { val Array(tag, classValString) = s.split('|') require(tag == "h") val classVals = classValString.split(":").toList val maxBins = classVals.head.toInt val bins = classVals.tail.map(Bin.fromPickle(_)).toVector if (maxBins == 2 && bins.map(_.p) == Vector(0.0, 1.0)) { new BooleanHistogram(bins) } else { new RealValuedHistogram(bins, maxBins) } } } abstract class Histogram { val maxBins: Int val bins: Vector[Bin] def numBins = bins.length def update(p: Double): Histogram def merge(h2: Histogram): Histogram def uniform: List[Double] def sum(b: Double): Double val total: Double = if (bins.isEmpty) 0 else bins.par.map(_.m).reduce(_ + _) } class BooleanHistogram(val bins: Vector[Bin]) extends Histogram { if (bins.length != 2 || bins(0).p != 0.0 || bins(1).p != 1.0) throw new RuntimeException( "Illegal boolean histogram! bins=%s".format(bins.toString)) val maxBins: Int = 2 def this() = this(Vector[Bin](Bin(0.0, 0), Bin(1.0, 0))) def this(numFalse: Double, numTrue: Double) = this(Vector[Bin](Bin(0.0, numFalse), Bin(1.0, numTrue))) def update(p: Double): BooleanHistogram = { if (p != 0.0 && p != 1.0) throw new RuntimeException( "Illegal update point! p=%f".format(p)) val newBins = if (p == 0.0) { Vector(Bin(0.0, bins(0).m + 1), bins(1)) } else { Vector(bins(0), Bin(1.0, bins(1).m + 1)) } new BooleanHistogram(newBins) } def merge(h2: Histogram): BooleanHistogram = { new BooleanHistogram(Vector( Bin(0.0, h2.bins(0).m + bins(0).m), Bin(1.0, h2.bins(1).m + bins(1).m))) } def uniform: List[Double] = if (bins(0).m > 0 && bins(1).m > 0) List(0.5) else List() def sum(b: Double): Double = { if (!List(0.0, 0.5, 1.0).contains(b)) throw new RuntimeException( "Illegal sum point! b=%f".format(b)) bins(0).m + (if (b == 1.0) bins(1).m else 0.0) } } class RealValuedHistogram(val bins: Vector[Bin], val maxBins: Int) extends Histogram { def this(mBins: Int) = this(Vector[Bin](), mBins) def update(p: Double): RealValuedHistogram = { val currentInd = bins.map(_.p).indexOf(p) val newBins = if (currentInd != -1) { val current = bins(currentInd) (bins.slice(0, currentInd) :+ Bin(current.p, current.m + 1)) ++ (if (currentInd+1 < bins.length) bins.slice(currentInd+1, bins.length) else Nil) } else { var nb = (bins :+ Bin(p, 1)).sortBy(_.p) if (nb.length > maxBins) { if (nb.length > maxBins + 1) throw new RuntimeException( "Illegal histogram state length! nb.length=%d, maxBins=%d".format(nb.length, maxBins)) nb = replace(nb, minDiff(nb)) } nb } new RealValuedHistogram(newBins, maxBins) } def merge(h2: Histogram): RealValuedHistogram = { var newBins = (bins ++ h2.bins).sortBy(_.p) val numIdenticals = (newBins.length - newBins.map(_.p).distinct.length) 0.until(numIdenticals).foreach(i => { newBins = replace(newBins, minDiff(newBins)) }) val sizeDiff = newBins.length - maxBins 0.until(sizeDiff).foreach(i => { newBins = replace(newBins, minDiff(newBins)) }) new RealValuedHistogram(newBins, maxBins) } def uniform: List[Double] = { if (bins.length <= 1) return List[Double]() var i = 0 0.until(maxBins - 1).map( j => { val s = (j.toDouble + 1.0) / maxBins.toDouble * total while (sum(bins(i+1).p) < s) { i += 1 } val (pi, mi, pip1, mip1) = unpackInterval(bins, i) val d = s - sum(bins(i).p) val a = mip1 - mi val b = 2.0 * mi val c = -2.0 * d val z = if (a != 0) { (-b + math.pow(b * b - 4.0 * a * c, 0.5)) / (2.0 * a) } else { - c / b } val uj = pi + (pip1 - pi) * z if (uj equals Double.NaN) None else Some(uj) }).toList.flatten } def sum(b: Double): Double = { if (b >= bins.last.p) { return total } else if (b < bins.head.p) { return 0.0 } val i = findInterval(bins, b) val (pi, mi, pip1, mip1) = unpackInterval(bins, i) val mb = mi + (mip1 - mi) / (pip1 - pi) * (b - pi) val s = (mi + mb) / 2.0 * (b - pi) / (pip1 - pi) s + bins.take(i).map(_.m).sum + mi / 2.0 } private def minDiff(binsState: Vector[Bin]): Int = { val diff = binsState.sliding(2) .toList .par .map(pair => pair(1).p - pair(0).p) diff.indexOf(diff.min) } private def replace(binsState: Vector[Bin], ind: Int): Vector[Bin] = { val (pi, mi, pip1, mip1) = unpackInterval(binsState, ind) val newM = (mi + mip1) val newP = (pi * mi + pip1 * mip1) / (mi + mip1) (binsState.slice(0,ind) :+ Bin(newP, newM)) ++ (if (ind+2 < binsState.length) binsState.slice(ind+2, binsState.length) else Nil) } private def findInterval(binsState: Vector[Bin], b: Double): Int = { val stateLen = binsState.length if (b >= binsState.last.p) throw new java.lang.RuntimeException( "point p=%f is not located in any bin interval. \n %s".format(b, binsState.map(_.p))) if (stateLen < 2) throw new java.lang.RuntimeException( "Illegal state length! stateLen=%d".format(stateLen)) var imin = 0 var imax = stateLen - 1 var imid = -1 var continue = true while (continue && imax >= imin) { imid = imin + (imax - imin) / 2 if (imid == stateLen - 2) { continue = false } else if (binsState(imid+1).p > b && binsState(imid).p <= b) { continue = false } else if (binsState(imid+1).p <= b) { imin = imid + 1 } else { imax = imid - 1 } } imid } private def unpackInterval( binsState: Vector[Bin], i: Int): (Double, Double, Double, Double) = { val mi = binsState(i).m val mip1 = binsState(i+1).m val pi = binsState(i).p val pip1 = binsState(i+1).p (pi, mi, pip1, mip1) } }
soundcloud/spdt
compute/src/main/scala/com.soundcloud.spdt/Histogram.scala
Scala
mit
6,791
/** * Copyright (C) 2007 Orbeon, Inc. * * This program is free software; you can redistribute it and/or modify it under the terms of the * GNU Lesser General Public License as published by the Free Software Foundation; either version * 2.1 of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. * See the GNU Lesser General Public License for more details. * * The full text of the license is available at http://www.gnu.org/copyleft/lesser.html */ package org.orbeon.oxf.xforms.control import org.orbeon.oxf.util.CoreUtils._ import org.orbeon.oxf.util.DynamicVariable import org.orbeon.oxf.xforms.analysis.ElementAnalysis import org.orbeon.oxf.xforms.control.controls._ import org.orbeon.oxf.xforms.state.{ControlState, InstanceState, InstancesControls} import org.orbeon.oxf.xforms.xbl.XBLContainer import org.orbeon.oxf.xforms.{BindingContext, _} import org.orbeon.xforms.Constants.{RepeatIndexSeparatorString, RepeatSeparatorString} import org.orbeon.xforms.{XFormsNames, XFormsId} import scala.jdk.CollectionConverters._ import scala.collection.compat._ object Controls { // Special for Form Runner // It's not ideal to have this here, but we currently don't have a pluggable way for Form Runner to indicate to // XForms that some controls should be indexed. val SectionTemplateUriPrefix = "http://orbeon.org/oxf/xml/form-builder/component/" // Create the entire tree of control from the root def createTree( containingDocument : XFormsContainingDocument, controlIndex : ControlIndex, state : Option[Map[String, ControlState]] ): Option[XFormsControl] = { val bindingContext = containingDocument.getContextStack.resetBindingContext() val rootControl = containingDocument.staticState.topLevelPart.getTopLevelControls.head buildTree( controlIndex, state, containingDocument, bindingContext, None, rootControl, Nil ) |!> logTreeIfNeeded("after building full tree") } // Create a new repeat iteration for insertion into the current tree of controls def createRepeatIterationTree( containingDocument : XFormsContainingDocument, // TODO: unused controlIndex : ControlIndex, repeatControl : XFormsRepeatControl, iterationIndex : Int ): XFormsRepeatIterationControl = { val idSuffix = XFormsId.getEffectiveIdSuffixParts(repeatControl.getEffectiveId).toSeq :+ iterationIndex // This is the context of the iteration // buildTree() does a pushBinding(), but that won't change the context (no @ref, etc. on the iteration itself) val container = repeatControl.container val bindingContext = container.getContextStack.setBinding(repeatControl.bindingContext) // This has to be the case at this point, otherwise it's a bug in our code assert(repeatControl.staticControl.iteration.isDefined) val controlOpt = buildTree( controlIndex, None, container, bindingContext, Some(repeatControl), repeatControl.staticControl.iteration.get, idSuffix ) |!> logTreeIfNeeded("after building repeat iteration tree") controlOpt.get.asInstanceOf[XFormsRepeatIterationControl] // we "know" this, right? } // Create a new subtree of controls (used by xxf:dynamic) def createSubTree( container : XBLContainer, controlIndex : ControlIndex, containerControl : XFormsContainerControl, rootAnalysis : ElementAnalysis, state : Option[Map[String, ControlState]] ): Option[XFormsControl] = { val idSuffix = XFormsId.getEffectiveIdSuffixParts(containerControl.getEffectiveId).toSeq val bindingContext = containerControl.bindingContextForChildOrEmpty buildTree( controlIndex, state, container, bindingContext, Some(containerControl), rootAnalysis, idSuffix ) |!> logTreeIfNeeded("after building subtree") } // Build a component subtree private def buildTree( controlIndex : ControlIndex, state : Option[Map[String, ControlState]], container : XBLContainer, bindingContext : BindingContext, parentOption : Option[XFormsControl], staticElement : ElementAnalysis, idSuffix : Seq[Int] ): Option[XFormsControl] = { // Determine effective id val effectiveId = if (idSuffix.isEmpty) staticElement.prefixedId else staticElement.prefixedId + RepeatSeparatorString + (idSuffix mkString RepeatIndexSeparatorString) // Instantiate the control // TODO LATER: controls must take ElementAnalysis, not Element val stats = container.containingDocument.getRequestStats // NOTE: If we are unable to create a control (case of Model at least), this has no effect XFormsControlFactory.createXFormsControl(container, parentOption.orNull, staticElement, effectiveId) map { control => stats.controlsCreated += 1 // Index the new control // NOTE: We used to do this after evaluating the binding. In general it shouldn't hurt to do it here. // 2018-12-21: The probable reason to move indexing before is so that variables can resolve controls indexed so far. controlIndex.indexControl(control) // Determine binding control.evaluateBindingAndValues( parentContext = bindingContext, update = false, restoreState = state.isDefined, state = state flatMap (_.get(effectiveId)) ) // Build the control's children if any control.buildChildren(buildTree(controlIndex, state, _, _, Some(control), _, _), idSuffix) control } } // Build children controls if any, delegating the actual construction to the given `buildTree` function def buildChildren( control : XFormsControl, children : => Iterable[ElementAnalysis], buildTree : (XBLContainer, BindingContext, ElementAnalysis, Seq[Int]) => Option[XFormsControl], idSuffix : Seq[Int] ): Unit = { // Start with the context within the current control var newBindingContext = control.bindingContextForChildOrEmpty // Build each child children foreach { childElement => buildTree(control.container, newBindingContext, childElement, idSuffix) foreach { newChildControl => // Update the context based on the just created control newBindingContext = newChildControl.bindingContextForFollowing } } } def iterateAllRepeatedControlsForTarget( containingDocument : XFormsContainingDocument, sourceControlEffectiveId : String, targetStaticId : String ): Iterator[XFormsControl] = resolveControlsById( containingDocument, sourceControlEffectiveId, targetStaticId, followIndexes = true ).iterator flatMap XFormsRepeatControl.findAllRepeatedControls def resolveControlsById( containingDocument : XFormsContainingDocument, sourceControlEffectiveId : String, targetStaticId : String, followIndexes : Boolean ): List[XFormsControl] = { val sourcePrefixedId = XFormsId.getPrefixedId(sourceControlEffectiveId) val scope = containingDocument.staticOps.scopeForPrefixedId(sourcePrefixedId) val targetPrefixedId = scope.prefixedIdForStaticId(targetStaticId) for { controls <- Option(containingDocument.controls).toList effectiveControlId <- resolveControlsEffectiveIds( containingDocument.staticOps, controls.getCurrentControlTree, sourceControlEffectiveId, targetPrefixedId, followIndexes ) control <- controls.findObjectByEffectiveId(effectiveControlId) } yield control } /** * Find effective control ids based on a source and a control static id, following XBL scoping and the repeat * hierarchy. * * @return effective control ids (0 or 1 if `followIndexes == true`, 0 to n if `followIndexes = false`) */ def resolveControlsEffectiveIds( ops : StaticStateGlobalOps, tree : ControlTree, sourceEffectiveId : String, // reference to source control, e.g. "list$age.3" targetPrefixedId : String, // reference to target control, e.g. "list$xf-10" followIndexes : Boolean // whether to follow repeat indexes ): List[String] = { // Don't do anything if there are no controls if (tree.children.isEmpty) return Nil // NOTE: The implementation tries to do a maximum using the static state. One reason is that the source // control's effective id might not yet have an associated control during construction. E.g.: // // <xf:group id="my-group" ref="employee[index('employee-repeat')]"> // // In that case, the XFormsGroupControl does not yet exist when its binding is evaluated. However, its // effective id is known and passed as source, and can be used for resolving the id passed to the index() // function. // // We trust the caller to pass a valid source effective id. That value is always internal, i.e. not created by a // form author. On the other hand, the target id cannot be trusted as it is typically specified by the form // author. val (_, commonIndexesLeafToRoot, remainingRepeatPrefixedIdsLeafToRoot) = getStaticRepeatDetails(ops, sourceEffectiveId, targetPrefixedId) def searchNextRepeatLevel(indexes: List[Int], nextRepeatPrefixedIds: List[String]): List[List[Int]] = nextRepeatPrefixedIds match { case Nil => List(indexes) case repeatPrefixedId :: remainingPrefixedIds => tree.findRepeatControl(repeatPrefixedId + buildSuffix(indexes)) match { case None => Nil // control might not exist (but why?) case Some(repeatControl) if followIndexes => searchNextRepeatLevel(repeatControl.getIndex :: indexes, remainingPrefixedIds) case Some(repeatControl) => 1 to repeatControl.getSize flatMap (i => searchNextRepeatLevel(i :: indexes, remainingPrefixedIds)) toList case _ => throw new IllegalStateException } } val allIndexes = searchNextRepeatLevel(commonIndexesLeafToRoot, remainingRepeatPrefixedIdsLeafToRoot.reverse) allIndexes map (indexes => targetPrefixedId + buildSuffix(indexes)) } def buildSuffix(iterations: List[Int]): String = if (iterations.isEmpty) "" else iterations.reverse map (_.toString) mkString (RepeatSeparatorString, RepeatIndexSeparatorString, "") def getStaticRepeatDetails( ops : StaticStateGlobalOps, sourceEffectiveId : String, targetPrefixedId : String ): (Option[String], List[Int], List[String]) = { require(sourceEffectiveId ne null, "Source effective id is required.") val sourcePrefixedId = XFormsId.getPrefixedId(sourceEffectiveId) val sourceParts = XFormsId.getEffectiveIdSuffixParts(sourceEffectiveId) val ancestorRepeatPrefixedIdOpt = ops.findClosestCommonAncestorRepeat(sourcePrefixedId, targetPrefixedId) val commonIndexes = for { ancestorRepeatPrefixedId <- ancestorRepeatPrefixedIdOpt.to(List) index <- sourceParts.take(ops.getAncestorRepeatIds(ancestorRepeatPrefixedId).size + 1).reverse } yield index // Find list of ancestor repeats for destination WITHOUT including the closest ancestor repeat if any val remainingTargetRepeatsPrefixedIds = ops.getAncestorRepeatIds(targetPrefixedId, ancestorRepeatPrefixedIdOpt) (ancestorRepeatPrefixedIdOpt, commonIndexes, remainingTargetRepeatsPrefixedIds) } // Update the container's and all its descendants' bindings // This is used by `xf:switch` and `xxf:dialog` as of 2021-04-14. def updateBindings(control: XFormsContainerControl): BindingUpdater = { val xpathDependencies = control.containingDocument.xpathDependencies xpathDependencies.bindingUpdateStart() val startBindingContext = control.preceding map (_.bindingContextForFollowing) getOrElse control.parent.bindingContextForChildOrEmpty val updater = new BindingUpdater(control.containingDocument, startBindingContext) visitControls(control, updater, includeCurrent = true) xpathDependencies.bindingUpdateDone() Option(control) foreach logTreeIfNeeded("after subtree update") updater } // Update the bindings for the entire tree of controls def updateBindings(containingDocument: XFormsContainingDocument): BindingUpdater = { val updater = new BindingUpdater(containingDocument, containingDocument.getContextStack.resetBindingContext()) visitAllControls(containingDocument, updater) containingDocument.controls.getCurrentControlTree.rootOpt foreach logTreeIfNeeded("after full tree update") updater } class BindingUpdater( val containingDocument : XFormsContainingDocument, val startBindingContext : BindingContext ) extends XFormsControlVisitorListener { private var newIterationsIds = Set.empty[String] // Start with initial context private var bindingContext = startBindingContext private val xpathDependencies = containingDocument.xpathDependencies private var level = 0 private var relevanceChangeLevel = -1 private var _visitedCount = 0 def visitedCount: Int = _visitedCount private var _updatedCount = 0 def updatedCount: Int = _updatedCount private var _optimizedCount = 0 def optimizedCount: Int = _optimizedCount var _partialFocusRepeatOption: Option[XFormsRepeatControl] = None def partialFocusRepeat: Option[XFormsRepeatControl] = _partialFocusRepeatOption val stats = containingDocument.getRequestStats def startVisitControl(control: XFormsControl): Boolean = { // Increment before the early return as `endVisitControl` always decrements. // Caused https://github.com/orbeon/orbeon-forms/issues/3976 level += 1 // If this is a new iteration, don't recurse into it if (newIterationsIds.nonEmpty && control.isInstanceOf[XFormsRepeatIterationControl] && newIterationsIds(control.effectiveId)) return false _visitedCount += 1 // Value of relevance of content before messing with the binding val wasContentRelevant = control.wasContentRelevant // Update is required if: // // - we are within a container whose content relevance has changed // - or dependencies tell us an update is required // - or the control has a @model attribute (TODO TEMP HACK: because that causes model variable evaluation!) def mustReEvaluateBinding = (relevanceChangeLevel != -1 && level > relevanceChangeLevel) || xpathDependencies.requireBindingUpdate(control.staticControl, control.effectiveId) || control.staticControl.model.isDefined // Only update the binding if needed if (mustReEvaluateBinding) { stats.bindingsUpdated += 1 def evaluateBindingAndValues(): Unit = control.evaluateBindingAndValues( parentContext = bindingContext, update = true, restoreState = false, state = None ) control match { case repeatControl: XFormsRepeatControl => // Update iterations val oldRepeatSeq = control.bindingContext.nodeset.asScala evaluateBindingAndValues() val (newIterations, partialFocusRepeatOption) = repeatControl.updateIterations(oldRepeatSeq) // Remember partial focus out of repeat if needed if (this._partialFocusRepeatOption.isEmpty && partialFocusRepeatOption.isDefined) this._partialFocusRepeatOption = partialFocusRepeatOption // Remember newly created iterations so we don't recurse into them in startRepeatIteration() // // - It is not needed to recurse into them because their bindings are up to date since they have // just been created // - However they have not yet been evaluated. They will be evaluated at the same time the other // controls are evaluated // // NOTE: don't call ControlTree.initializeRepeatIterationTree() here because refresh evaluates // controls and dispatches events this.newIterationsIds = newIterations map (_.getEffectiveId) toSet case _ => // Simply set the new binding evaluateBindingAndValues() } _updatedCount += 1 } else { stats.bindingsRefreshed += 1 control.refreshBindingAndValues(bindingContext) _optimizedCount += 1 } // Remember whether we are in a container whose content relevance has changed // NOTE: The correct logic at this time is to force binding re-evaluation if container relevance has // changed. Doing this only when content becomes relevant is not enough as shown with the following bug: // https://github.com/orbeon/orbeon-forms/issues/939 if (relevanceChangeLevel == -1 && control.isInstanceOf[XFormsContainerControl] && wasContentRelevant != control.contentRelevant) relevanceChangeLevel = level // entering level of containing control.bindingContextForChildOpt match { case Some(bindingContextForChild) => bindingContext = bindingContextForChild true case None => bindingContext = null // should not be used false } } def endVisitControl(control: XFormsControl): Unit = { // Check if we are exiting the level of a container whose content relevance has changed if (relevanceChangeLevel == level) relevanceChangeLevel = -1 // Update context for following controls bindingContext = control.bindingContextForFollowing // When we exit a repeat control, discard the list of new iterations so we don't unnecessarily test on them if (control.isInstanceOf[XFormsRepeatControl]) newIterationsIds = Set.empty[String] level -= 1 } } // Iterator over a control's ancestors class AncestorOrSelfIterator(start: XFormsControl) extends Iterator[XFormsControl] { private var _next = start def hasNext: Boolean = _next ne null def next(): XFormsControl = { val result = _next _next = _next.parent result } } trait XFormsControlVisitorListener { def startVisitControl(control: XFormsControl): Boolean def endVisitControl(control: XFormsControl): Unit } // Visit all the controls // 2018-01-04: 1 use left def visitAllControls(containingDocument: XFormsContainingDocument, listener: XFormsControlVisitorListener): Unit = visitSiblings(listener, containingDocument.controls.getCurrentControlTree.children) // Iterator over the given control and its descendants class ControlsIterator( private val start : XFormsControl, private val includeSelf : Boolean, private val followVisible : Boolean ) extends Iterator[XFormsControl] { private val children = start match { case c: XFormsSwitchControl if followVisible => c.selectedCaseIfRelevantOpt.iterator case c: XFormsContainerControl => c.children.iterator case _ => Iterator.empty } private var descendants: Iterator[XFormsControl] = Iterator.empty private def findNext(): XFormsControl = if (descendants.hasNext) // Descendants of current child descendants.next() else if (children.hasNext) { // Move to next child val next = children.next() if (next.isInstanceOf[XFormsContainerControl]) descendants = ControlsIterator(next, includeSelf = false) next } else null private var current = if (includeSelf) start else findNext() def next(): XFormsControl = { val result = current current = findNext() result } def hasNext: Boolean = current ne null } object ControlsIterator { def apply(start: XFormsControl, includeSelf: Boolean, followVisible: Boolean = false): Iterator[XFormsControl] = new ControlsIterator(start, includeSelf, followVisible) def apply(start: ControlTree): Iterator[XFormsControl] = start.children.iterator flatMap (new ControlsIterator(_, includeSelf = true, followVisible = false)) } // Evaluate the body with InstancesControls in scope def withDynamicStateToRestore[T](instancesControls: InstancesControls, topLevel: Boolean = false)(body: => T): T = instancesControlsToRestore.withValue((instancesControls, topLevel))(body) // Get state to restore private def restoringDynamicState = instancesControlsToRestore.value def restoringInstanceControls : Option[InstancesControls] = restoringDynamicState map (_._1) def restoringControls : Option[Map[String, ControlState]] = restoringInstanceControls map (_.controls) def restoringInstances : Option[List[InstanceState]] = restoringInstanceControls map (_.instances) // Whether we are restoring state def isRestoringDynamicState: Boolean = restoringDynamicState exists (_._2) // ThreadLocal for dynamic state restoration private val instancesControlsToRestore = new DynamicVariable[(InstancesControls, Boolean)] // Visit all the descendant controls of the given container control // 2018-01-04: 2 uses left: // - `updateBindings` // - `dispatchDestructionEventsForRemovedRepeatIteration` def visitControls(control: XFormsControl, listener: XFormsControlVisitorListener, includeCurrent: Boolean): Unit = control match { case containerControl: XFormsContainerControl => // Container itself if (includeCurrent) if (! listener.startVisitControl(containerControl)) return // Children visitSiblings(listener, containerControl.children) // Container itself if (includeCurrent) listener.endVisitControl(containerControl) case control => if (includeCurrent) { listener.startVisitControl(control) listener.endVisitControl(control) } } private def visitSiblings(listener: XFormsControlVisitorListener, children: Seq[XFormsControl]): Unit = for (currentControl <- children) { if (listener.startVisitControl(currentControl)) { currentControl match { case container: XFormsContainerControl => visitSiblings(listener, container.children) case nonContainer => // NOTE: Unfortunately we handle children actions of non container controls a bit differently val childrenActions = nonContainer.childrenActions if (childrenActions.nonEmpty) visitSiblings(listener, childrenActions) } } listener.endVisitControl(currentControl) } // Log a subtree of controls as XML private def logTreeIfNeeded(message: String)(control: XFormsControl): Unit = if (XFormsGlobalProperties.getDebugLogging.contains("control-tree")) control.containingDocument.controls.indentedLogger.logDebug(message, ControlsDebugSupport.controlTreeAsXmlString(control)) }
orbeon/orbeon-forms
xforms-runtime/shared/src/main/scala/org/orbeon/oxf/xforms/control/Controls.scala
Scala
lgpl-2.1
23,712