code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
package com.ing.baker.runtime.serialization.protomappings
import cats.implicits._
import com.google.protobuf.ByteString
import com.ing.baker.runtime.akka.actor.protobuf
import com.ing.baker.runtime.akka.actor.protobuf.Value.OneofValue._
import com.ing.baker.runtime.serialization.ProtoMap
import com.ing.baker.types
import org.joda.time.format.ISODateTimeFormat
import org.joda.time.{LocalDate, LocalDateTime, LocalTime}
import scala.util.{Failure, Success, Try}
class BakerValuesMapping extends ProtoMap[types.Value, protobuf.Value] {
val companion = protobuf.Value
def toProto(t: types.Value): protobuf.Value = protobuf.Value(t match {
case types.NullValue =>
NullValue(true)
case types.PrimitiveValue(value: Boolean) =>
BooleanValue(value)
case types.PrimitiveValue(value: Byte) =>
ByteValue(value)
case types.PrimitiveValue(value: Short) =>
ShortValue(value)
case types.PrimitiveValue(value: Char) =>
CharValue(value)
case types.PrimitiveValue(value: Int) =>
IntValue(value)
case types.PrimitiveValue(value: Long) =>
LongValue(value)
case types.PrimitiveValue(value: Float) =>
FloatValue(value)
case types.PrimitiveValue(value: Double) =>
DoubleValue(value)
case types.PrimitiveValue(value: String) =>
StringValue(value)
case types.PrimitiveValue(value: BigDecimal) =>
BigDecimalScalaValue(value.toString())
case types.PrimitiveValue(value: java.math.BigDecimal) =>
BigDecimalJavaValue(BigDecimal(value).toString())
case types.PrimitiveValue(value: BigInt) =>
BigIntScalaValue(value.toString())
case types.PrimitiveValue(value: java.math.BigInteger) =>
BigIntJavaValue(BigInt(value).toString())
case types.PrimitiveValue(value: Array[Byte]) =>
ByteArrayValue(ByteString.copyFrom(value))
case types.PrimitiveValue(value) =>
throw new IllegalStateException(s"Unknown primitive value of type: ${value.getClass}")
case types.RecordValue(entries) =>
RecordValue(protobuf.Record(entries.view.map { case (key, value) => (key, toProto(value))}.toMap))
case types.ListValue(entries) =>
ListValue(protobuf.List(entries.map(toProto)))
})
def fromProto(message: protobuf.Value): Try[types.Value] = message.oneofValue match {
case NullValue(_) =>
Success(types.NullValue)
case BooleanValue(bool) =>
Success(types.PrimitiveValue(bool))
case ByteValue(byte) =>
Success(types.PrimitiveValue(byte.toByte))
case ShortValue(short) =>
Success(types.PrimitiveValue(short.toShort))
case CharValue(char) =>
Success(types.PrimitiveValue(char.toChar))
case IntValue(int) =>
Success(types.PrimitiveValue(int))
case LongValue(long) =>
Success(types.PrimitiveValue(long))
case FloatValue(float) =>
Success(types.PrimitiveValue(float))
case DoubleValue(double) =>
Success(types.PrimitiveValue(double))
case StringValue(string) =>
Success(types.PrimitiveValue(string))
case BigDecimalScalaValue(bigdecimal) =>
Success(types.PrimitiveValue(BigDecimal(bigdecimal)))
case BigDecimalJavaValue(bigdecimal) =>
Success(types.PrimitiveValue(BigDecimal(bigdecimal).bigDecimal))
case BigIntScalaValue(bigint) =>
Success(types.PrimitiveValue(BigInt(bigint)))
case BigIntJavaValue(bigint) =>
Success(types.PrimitiveValue(BigInt(bigint).bigInteger))
case ByteArrayValue(byteArray) =>
Success(types.PrimitiveValue(byteArray.toByteArray))
case RecordValue(protobuf.Record(fields)) =>
fields.toList.traverse[Try, (String, types.Value)] {
case (key, value) => fromProto(value).map(key -> _)
}.map(inner => types.RecordValue(inner.toMap))
// deprecated fields
case ListValue(protobuf.List(entries)) =>
entries.toList.traverse[Try, types.Value](fromProto).map(types.ListValue)
case JodaDatetimeValue(date) =>
val dateTime = ISODateTimeFormat.dateTime().parseDateTime(date)
Success(types.PrimitiveValue(dateTime.getMillis))
case JodaLocaldateValue(date) =>
val localDate = LocalDate.parse(date)
Success(types.PrimitiveValue(localDate.toDateTime(LocalTime.MIDNIGHT).getMillis))
case JodaLocaldatetimeValue(date) =>
val localDateTime = LocalDateTime.parse(date)
Success(types.PrimitiveValue(localDateTime.toDateTime.getMillis))
case Empty =>
Failure(new IllegalStateException("Empty value cannot be deserialized"))
}
}
| ing-bank/baker | core/baker-interface/src/main/scala/com/ing/baker/runtime/serialization/protomappings/BakerValuesMapping.scala | Scala | mit | 4,554 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.livy.server
import java.io.{BufferedInputStream, InputStream}
import java.net.InetAddress
import java.util.concurrent._
import java.util.EnumSet
import javax.servlet._
import scala.collection.JavaConverters._
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
import org.apache.hadoop.security.{SecurityUtil, UserGroupInformation}
import org.apache.hadoop.security.authentication.server._
import org.eclipse.jetty.servlet.FilterHolder
import org.scalatra.{NotFound, ScalatraServlet}
import org.scalatra.metrics.MetricsBootstrap
import org.scalatra.metrics.MetricsSupportExtensions._
import org.scalatra.servlet.{MultipartConfig, ServletApiImplicits}
import org.apache.livy._
import org.apache.livy.server.auth.LdapAuthenticationHandlerImpl
import org.apache.livy.server.batch.BatchSessionServlet
import org.apache.livy.server.interactive.InteractiveSessionServlet
import org.apache.livy.server.recovery.{SessionStore, StateStore, ZooKeeperManager}
import org.apache.livy.server.ui.UIServlet
import org.apache.livy.sessions.{BatchSessionManager, InteractiveSessionManager}
import org.apache.livy.sessions.SessionManager.SESSION_RECOVERY_MODE_OFF
import org.apache.livy.utils.LivySparkUtils._
import org.apache.livy.utils.SparkYarnApp
class LivyServer extends Logging {
import LivyConf._
private var server: WebServer = _
private var _serverUrl: Option[String] = None
// make livyConf accessible for testing
private[livy] var livyConf: LivyConf = _
private var kinitFailCount: Int = 0
private var executor: ScheduledExecutorService = _
private var accessManager: AccessManager = _
private var _thriftServerFactory: Option[ThriftServerFactory] = None
private var zkManager: Option[ZooKeeperManager] = None
private var ugi: UserGroupInformation = _
def start(): Unit = {
livyConf = new LivyConf().loadFromFile("livy.conf")
accessManager = new AccessManager(livyConf)
val host = livyConf.get(SERVER_HOST)
val port = livyConf.getInt(SERVER_PORT)
val basePath = livyConf.get(SERVER_BASE_PATH)
val multipartConfig = MultipartConfig(
maxFileSize = Some(livyConf.getLong(LivyConf.FILE_UPLOAD_MAX_SIZE))
).toMultipartConfigElement
// Make sure the `spark-submit` program exists, otherwise much of livy won't work.
testSparkHome(livyConf)
// Test spark-submit and get Spark Scala version accordingly.
val (sparkVersion, scalaVersionFromSparkSubmit) = sparkSubmitVersion(livyConf)
testSparkVersion(sparkVersion)
// If Spark and Scala version is set manually, should verify if they're consistent with
// ones parsed from "spark-submit --version"
val formattedSparkVersion = formatSparkVersion(sparkVersion)
Option(livyConf.get(LIVY_SPARK_VERSION)).map(formatSparkVersion).foreach { version =>
require(formattedSparkVersion == version,
s"Configured Spark version $version is not equal to Spark version $formattedSparkVersion " +
"got from spark-submit -version")
}
// Set formatted Spark and Scala version into livy configuration, this will be used by
// session creation.
// TODO Create a new class to pass variables from LivyServer to sessions and remove these
// internal LivyConfs.
livyConf.set(LIVY_SPARK_VERSION.key, formattedSparkVersion.productIterator.mkString("."))
livyConf.set(LIVY_SPARK_SCALA_VERSION.key,
sparkScalaVersion(formattedSparkVersion, scalaVersionFromSparkSubmit, livyConf))
if (livyConf.getBoolean(LivyConf.THRIFT_SERVER_ENABLED)) {
_thriftServerFactory = Some(ThriftServerFactory.getInstance)
}
if (UserGroupInformation.isSecurityEnabled) {
// If Hadoop security is enabled, run kinit periodically. runKinit() should be called
// before any Hadoop operation, otherwise Kerberos exception will be thrown.
executor = Executors.newScheduledThreadPool(1,
new ThreadFactory() {
override def newThread(r: Runnable): Thread = {
val thread = new Thread(r)
thread.setName("kinit-thread")
thread.setDaemon(true)
thread
}
}
)
val launch_keytab = livyConf.get(LAUNCH_KERBEROS_KEYTAB)
val launch_principal = SecurityUtil.getServerPrincipal(
livyConf.get(LAUNCH_KERBEROS_PRINCIPAL), host)
require(launch_keytab != null,
s"Kerberos requires ${LAUNCH_KERBEROS_KEYTAB.key} to be provided.")
require(launch_principal != null,
s"Kerberos requires ${LAUNCH_KERBEROS_PRINCIPAL.key} to be provided.")
if (!runKinit(launch_keytab, launch_principal)) {
error("Failed to run kinit, stopping the server.")
sys.exit(1)
}
// This is and should be the only place where a login() on the UGI is performed.
// If an other login in the codebase is strictly needed, a needLogin check should be added to
// avoid anyway that 2 logins are performed.
// This is needed because the thriftserver requires the UGI to be created from a keytab in
// order to work properly and previously Livy was using a UGI generated from the cached TGT
// (created by the kinit command).
if (livyConf.getBoolean(LivyConf.THRIFT_SERVER_ENABLED)) {
UserGroupInformation.loginUserFromKeytab(launch_principal, launch_keytab)
}
ugi = UserGroupInformation.getCurrentUser
startKinitThread(launch_keytab, launch_principal)
}
testRecovery(livyConf)
// Initialize YarnClient ASAP to save time.
if (livyConf.isRunningOnYarn()) {
SparkYarnApp.init(livyConf)
Future { SparkYarnApp.yarnClient }
}
if (livyConf.get(LivyConf.RECOVERY_STATE_STORE) == "zookeeper") {
zkManager = Some(new ZooKeeperManager(livyConf))
zkManager.foreach(_.start())
}
StateStore.init(livyConf, zkManager)
val sessionStore = new SessionStore(livyConf)
val batchSessionManager = new BatchSessionManager(livyConf, sessionStore)
val interactiveSessionManager = new InteractiveSessionManager(livyConf, sessionStore)
server = new WebServer(livyConf, host, port)
server.context.setResourceBase("src/main/org/apache/livy/server")
val livyVersionServlet = new JsonServlet {
before() { contentType = "application/json" }
get("/") {
Map("version" -> LIVY_VERSION,
"user" -> LIVY_BUILD_USER,
"revision" -> LIVY_REVISION,
"branch" -> LIVY_BRANCH,
"date" -> LIVY_BUILD_DATE,
"url" -> LIVY_REPO_URL)
}
}
// Servlet for hosting static files such as html, css, and js
// Necessary since Jetty cannot set it's resource base inside a jar
// Returns 404 if the file does not exist
val staticResourceServlet = new ScalatraServlet {
get("/*") {
val fileName = params("splat")
val notFoundMsg = "File not found"
if (!fileName.isEmpty) {
getClass.getResourceAsStream(s"ui/static/$fileName") match {
case is: InputStream => new BufferedInputStream(is)
case null => NotFound(notFoundMsg)
}
} else {
NotFound(notFoundMsg)
}
}
}
def uiRedirectServlet(path: String) = new ScalatraServlet {
get("/") {
redirect(path)
}
}
server.context.addEventListener(
new ServletContextListener() with MetricsBootstrap with ServletApiImplicits {
private def mount(sc: ServletContext, servlet: Servlet, mappings: String*): Unit = {
val registration = sc.addServlet(servlet.getClass().getName(), servlet)
registration.addMapping(mappings: _*)
registration.setMultipartConfig(multipartConfig)
}
override def contextDestroyed(sce: ServletContextEvent): Unit = {
}
override def contextInitialized(sce: ServletContextEvent): Unit = {
try {
val context = sce.getServletContext()
context.initParameters(org.scalatra.EnvironmentKey) = livyConf.get(ENVIRONMENT)
val interactiveServlet = new InteractiveSessionServlet(
interactiveSessionManager, sessionStore, livyConf, accessManager)
mount(context, interactiveServlet, "/sessions/*")
val batchServlet =
new BatchSessionServlet(batchSessionManager, sessionStore, livyConf, accessManager)
mount(context, batchServlet, "/batches/*")
if (livyConf.getBoolean(UI_ENABLED)) {
val uiServlet = new UIServlet(basePath, livyConf)
mount(context, uiServlet, "/ui/*")
mount(context, staticResourceServlet, "/static/*")
mount(context, uiRedirectServlet(basePath + "/ui/"), "/*")
_thriftServerFactory.foreach { factory =>
mount(context, factory.getServlet(basePath), factory.getServletMappings: _*)
}
} else {
mount(context, uiRedirectServlet(basePath + "/metrics"), "/*")
}
context.mountMetricsAdminServlet("/metrics")
mount(context, livyVersionServlet, "/version/*")
} catch {
case e: Throwable =>
error("Exception thrown when initializing server", e)
sys.exit(1)
}
}
})
livyConf.get(AUTH_TYPE) match {
case authType @ KerberosAuthenticationHandler.TYPE =>
val principal = SecurityUtil.getServerPrincipal(livyConf.get(AUTH_KERBEROS_PRINCIPAL),
server.host)
val keytab = livyConf.get(AUTH_KERBEROS_KEYTAB)
require(principal != null,
s"Kerberos auth requires ${AUTH_KERBEROS_PRINCIPAL.key} to be provided.")
require(keytab != null,
s"Kerberos auth requires ${AUTH_KERBEROS_KEYTAB.key} to be provided.")
val holder = new FilterHolder(new AuthenticationFilter())
holder.setInitParameter(AuthenticationFilter.AUTH_TYPE, authType)
holder.setInitParameter(KerberosAuthenticationHandler.PRINCIPAL, principal)
holder.setInitParameter(KerberosAuthenticationHandler.KEYTAB, keytab)
holder.setInitParameter(KerberosAuthenticationHandler.NAME_RULES,
livyConf.get(AUTH_KERBEROS_NAME_RULES))
server.context.addFilter(holder, "/*", EnumSet.allOf(classOf[DispatcherType]))
info(s"SPNEGO auth enabled (principal = $principal)")
case authType @ LdapAuthenticationHandlerImpl.TYPE =>
val holder = new FilterHolder(new AuthenticationFilter())
holder.setInitParameter(AuthenticationFilter.AUTH_TYPE,
LdapAuthenticationHandlerImpl.getClass.getCanonicalName.dropRight(1))
Option(livyConf.get(LivyConf.AUTH_LDAP_URL)).foreach { url =>
holder.setInitParameter(LdapAuthenticationHandlerImpl.PROVIDER_URL, url)
}
Option(livyConf.get(LivyConf.AUTH_LDAP_USERNAME_DOMAIN)).foreach { domain =>
holder.setInitParameter(LdapAuthenticationHandlerImpl.LDAP_BIND_DOMAIN, domain)
}
Option(livyConf.get(LivyConf.AUTH_LDAP_BASE_DN)).foreach { baseDN =>
holder.setInitParameter(LdapAuthenticationHandlerImpl.BASE_DN, baseDN)
}
holder.setInitParameter(LdapAuthenticationHandlerImpl.SECURITY_AUTHENTICATION,
livyConf.get(LivyConf.AUTH_LDAP_SECURITY_AUTH))
holder.setInitParameter(LdapAuthenticationHandlerImpl.ENABLE_START_TLS,
livyConf.get(LivyConf.AUTH_LDAP_ENABLE_START_TLS))
server.context.addFilter(holder, "/*", EnumSet.allOf(classOf[DispatcherType]))
info("LDAP auth enabled.")
case null =>
// Nothing to do.
case customType =>
val authClassConf = s"livy.server.auth.$customType.class"
val authClass = livyConf.get(authClassConf)
require(authClass != null, s"$customType auth requires $authClassConf to be provided")
val holder = new FilterHolder()
holder.setClassName(authClass)
val prefix = s"livy.server.auth.$customType.param."
livyConf.asScala.filter { kv =>
kv.getKey.length > prefix.length && kv.getKey.startsWith(prefix)
}.foreach { kv =>
holder.setInitParameter(kv.getKey.substring(prefix.length), kv.getValue)
}
server.context.addFilter(holder, "/*", EnumSet.allOf(classOf[DispatcherType]))
info(s"$customType auth enabled")
}
if (livyConf.getBoolean(CSRF_PROTECTION)) {
info("CSRF protection is enabled.")
val csrfHolder = new FilterHolder(new CsrfFilter())
server.context.addFilter(csrfHolder, "/*", EnumSet.allOf(classOf[DispatcherType]))
}
if (accessManager.isAccessControlOn) {
info("Access control is enabled")
val accessHolder = new FilterHolder(new AccessFilter(accessManager))
server.context.addFilter(accessHolder, "/*", EnumSet.allOf(classOf[DispatcherType]))
}
server.start()
_thriftServerFactory.foreach {
_.start(livyConf, interactiveSessionManager, sessionStore, accessManager)
}
Runtime.getRuntime().addShutdownHook(new Thread("Livy Server Shutdown") {
override def run(): Unit = {
info("Shutting down Livy server.")
zkManager.foreach(_.stop())
server.stop()
_thriftServerFactory.foreach(_.stop())
}
})
_serverUrl = Some(s"${server.protocol}://${server.host}:${server.port}")
sys.props("livy.server.server-url") = _serverUrl.get
}
def runKinit(keytab: String, principal: String): Boolean = {
val commands = Seq("kinit", "-kt", keytab, principal)
val proc = new ProcessBuilder(commands: _*).inheritIO().start()
proc.waitFor() match {
case 0 =>
debug("Ran kinit command successfully.")
kinitFailCount = 0
true
case _ =>
warn("Fail to run kinit command.")
kinitFailCount += 1
false
}
}
def startKinitThread(keytab: String, principal: String): Unit = {
val refreshInterval = livyConf.getTimeAsMs(LAUNCH_KERBEROS_REFRESH_INTERVAL)
val kinitFailThreshold = livyConf.getInt(KINIT_FAIL_THRESHOLD)
executor.schedule(
new Runnable() {
override def run(): Unit = {
if (runKinit(keytab, principal)) {
// The current UGI should never change. If that happens, it is an error condition and
// relogin the original UGI would not update the current UGI. So the server will fail
// due to no valid credentials. The assert here allows to fast detect this error
// condition and fail immediately with a meaningful error.
assert(ugi.equals(UserGroupInformation.getCurrentUser), "Current UGI has changed.")
ugi.reloginFromTicketCache()
// schedule another kinit run with a fixed delay.
executor.schedule(this, refreshInterval, TimeUnit.MILLISECONDS)
} else {
// schedule another retry at once or fail the livy server if too many times kinit fail
if (kinitFailCount >= kinitFailThreshold) {
error(s"Exit LivyServer after ${kinitFailThreshold} times failures running kinit.")
if (server.server.isStarted()) {
stop()
} else {
sys.exit(1)
}
} else {
executor.submit(this)
}
}
}
}, refreshInterval, TimeUnit.MILLISECONDS)
}
def join(): Unit = server.join()
def stop(): Unit = {
if (server != null) {
server.stop()
}
}
def serverUrl(): String = {
_serverUrl.getOrElse(throw new IllegalStateException("Server not yet started."))
}
/** For ITs only */
def getJdbcUrl: Option[String] = {
_thriftServerFactory.map { _ =>
val additionalUrlParams = if (livyConf.get(THRIFT_TRANSPORT_MODE) == "http") {
"?hive.server2.transport.mode=http;hive.server2.thrift.http.path=cliservice"
} else {
""
}
val host = Option(livyConf.get(THRIFT_BIND_HOST)).getOrElse(
InetAddress.getLocalHost.getHostAddress)
val port = livyConf.getInt(THRIFT_SERVER_PORT)
s"jdbc:hive2://$host:$port$additionalUrlParams"
}
}
private[livy] def testRecovery(livyConf: LivyConf): Unit = {
if (!livyConf.isRunningOnYarn()) {
// If recovery is turned on but we are not running on YARN, quit.
require(livyConf.get(LivyConf.RECOVERY_MODE) == SESSION_RECOVERY_MODE_OFF,
"Session recovery requires YARN.")
}
}
}
object LivyServer {
def main(args: Array[String]): Unit = {
val server = new LivyServer()
try {
server.start()
server.join()
} finally {
server.stop()
}
}
}
| ajbozarth/incubator-livy | server/src/main/scala/org/apache/livy/server/LivyServer.scala | Scala | apache-2.0 | 17,519 |
/*
* @author Philip Stutz
* @author Mihaela Verman
*
* Copyright 2013 University of Zurich
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.signalcollect.triplerush
import org.scalatest.FlatSpec
import org.scalatest.ShouldMatchers
import org.scalatest.prop.Checkers
import com.signalcollect.triplerush.QueryParticle._
import org.scalacheck.Arbitrary._
import org.scalacheck.Arbitrary
import org.scalacheck.Gen
import org.scalacheck.Gen._
class QueryParticleSpec extends FlatSpec with ShouldMatchers with Checkers with TestAnnouncements {
val maxId = 4
lazy val genTriple: Gen[TriplePattern] = for {
s <- Gen.choose(1, maxId)
p <- Gen.choose(1, maxId)
o <- Gen.choose(1, maxId)
} yield TriplePattern(s, p, o)
lazy val genSubjectPattern: Gen[TriplePattern] = for {
p <- Gen.choose(1, maxId)
o <- Gen.choose(1, maxId)
} yield TriplePattern(-1, p, o)
implicit lazy val arbTriple: Arbitrary[TriplePattern] = Arbitrary(genTriple)
"QueryParticle" should "correctly encode ids" in {
check(
(id: Int) => {
val qp = QueryParticle(
patterns = Seq(
TriplePattern(-1, 1, 2),
TriplePattern(-1, 3, -2)),
queryId = 1,
numberOfSelectVariables = 2)
qp.writeQueryId(id)
qp.queryId == id
},
minSuccessful(10))
}
it should "correctly encode tickets" in {
check(
(tickets: Long) => {
val qp = QueryParticle(
patterns = Seq(
TriplePattern(-1, 1, 2),
TriplePattern(-1, 3, -2)),
queryId = 1,
numberOfSelectVariables = 2)
qp.writeTickets(tickets)
qp.tickets == tickets
},
minSuccessful(10))
}
it should "correctly encode triple patterns" in {
check(
(a: TriplePattern, b: TriplePattern, c: TriplePattern) => {
val patterns = Array(a, b, c)
val qp = QueryParticle(
patterns = patterns,
queryId = 1,
numberOfSelectVariables = 3)
qp.patterns.toList == patterns.toList
},
minSuccessful(10))
}
}
| jacqueslk/triplerush-filter | src/test/scala/com/signalcollect/triplerush/QueryParticleSpec.scala | Scala | apache-2.0 | 2,656 |
package com.fuscus.seien.appli.output
import com.fuscus.seien.domain.entity.IssueID
import com.fuscus.seien.infra.core.UUID
import com.fuscus.seien.infra.vo.URI
import org.json4s._
/**
* Created by watawuwu on 15/08/09.
*/
// @todo move to infrastructure layer
object CustomSerializer {
import JsonDSL._
class URISerializer extends Serializer[URI] {
private val URIClass = classOf[URI]
def deserialize(implicit format: Formats): PartialFunction[(TypeInfo, JValue), URI] = {
case (TypeInfo(URIClass, _), json) => json match {
case JString(id) => URI(id)
case x => throw new MappingException("Can't convert " + x + " to URI")
}
}
def serialize(implicit formats: Formats): PartialFunction[Any, JValue] = {
case x: URI => x.value
}
}
object URISerializer extends URISerializer
// @todo generics
class IssueIDSerializer extends Serializer[IssueID] {
private val UUIDClass = classOf[IssueID]
def deserialize(implicit format: Formats): PartialFunction[(TypeInfo, JValue), IssueID] = {
case (TypeInfo(UUIDClass, _), json) => json match {
case JString(id) => IssueID(UUID(id))
case x => throw new MappingException("Can't convert " + x + " to IssueID")
}
}
def serialize(implicit formats: Formats): PartialFunction[Any, JValue] = {
case x: IssueID => x.value.toString
}
}
object IssueIDSerializer extends IssueIDSerializer
}
| watawuwu/seien-backend | modules/appli/app/com/fuscus/seien/appli/output/CustomSerializer.scala | Scala | mit | 1,454 |
/**
* Copyright (c) 2014-2016 Snowplow Analytics Ltd. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
package com.snowplowanalytics.snowplow.storage
// Java
import java.util.Properties
// Scala
import scala.collection.JavaConversions._
import scala.collection.JavaConverters._
// Scalaz
import scalaz._
import Scalaz._
// json4s
import org.json4s._
import org.json4s.jackson.JsonMethods._
import org.json4s.JsonDSL._
// Specs2
import org.specs2.mutable.Specification
import org.specs2.scalaz.ValidationMatchers
/**
* Tests Shredder
*/
class SnowplowElasticsearchEmitterSpec extends Specification with ValidationMatchers {
"The emit method" should {
"return all invalid records" in {
val eem = new SnowplowElasticsearchEmitter(Some(new StdouterrSink), new StdouterrSink)
val validInput: ValidatedRecord = "good" -> JObject(Nil).success
val invalidInput: ValidatedRecord = "bad" -> List("malformed event").failure
val input = List(validInput, invalidInput)
val bmb = new BasicMemoryBuffer[ValidatedRecord](kcc, input)
val ub = new UnmodifiableBuffer[ValidatedRecord](bmb)
val actual = eem.emit(ub)
actual must_== List(invalidInput).asJava
}
}
}
| TimothyKlim/snowplow | 4-storage/kafka-elasticsearch-sink/src/test/scala/com.snowplowanalytics.snowplow.storage.kinesis.elasticsearch/SnowplowElasticsearchEmitterSpec.scala | Scala | apache-2.0 | 1,840 |
/*
* Copyright 2011 Chris de Vreeze
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package eu.cdevreeze.springjdbc
package namedparam
import java.{ util => jutil, io => jio }
import java.sql.{ Types, ResultSet }
import scala.collection.{ immutable, mutable }
import scala.collection.JavaConverters._
import org.junit.{ Test, Before, After, Ignore }
import org.junit.runner.RunWith
import org.scalatest.{ Suite, BeforeAndAfterAll }
import org.scalatest.junit.JUnitRunner
import org.springframework.dao.DataIntegrityViolationException
import org.springframework.jdbc.datasource.embedded.{ EmbeddedDatabaseBuilder, EmbeddedDatabase, EmbeddedDatabaseType }
import org.springframework.jdbc.core.namedparam._
import org.springframework.jdbc.datasource.DataSourceTransactionManager
import eu.cdevreeze.springtx.TransactionTemplate
import JdbcTemplateUtils._
/**
* NamedParamJdbcTemplate test case.
*
* For the sample database, see http://pgfoundry.org/.
*
* @author Chris de Vreeze
*/
@RunWith(classOf[JUnitRunner])
class NamedParamJdbcTemplateTest extends Suite with BeforeAndAfterAll {
@volatile private var db: EmbeddedDatabase = _
@volatile private var namedParamJdbcTemplate: NamedParamJdbcTemplate = _
@volatile private var txTemplate: TransactionTemplate = _
override def beforeAll(): Unit = {
db = new EmbeddedDatabaseBuilder().setType(EmbeddedDatabaseType.HSQL).addScript("schema.sql").addScript("data.sql").build()
namedParamJdbcTemplate = new NamedParamJdbcTemplate(db)
val txManager = new DataSourceTransactionManager(db)
txTemplate = new TransactionTemplate(txManager)
}
override def afterAll(): Unit = {
db.shutdown()
db = null
}
@Test def testUsingWrappedObject() {
val sql = "select name from city where id = ?"
val idDenBosch = 20
val action = newPreparedStatementCallback[String] { ps =>
ps.setInt(1, idDenBosch)
var rs: ResultSet = null
try {
rs = ps.executeQuery()
require(rs.next())
rs.getString(1)
} finally {
if (rs ne null) rs.close()
}
}
val denBosch: String =
txTemplate execute { status =>
namedParamJdbcTemplate.wrappedObject.getJdbcOperations.execute(sql, action)
}
expect("s-Hertogenbosch") {
denBosch
}
}
@Test def testQueryUsingTypedArgs() {
val sql = "select name from city where id = :id"
val idDenBosch = 20
val denBosch1: String =
txTemplate execute { status =>
val rowMapper = newRowMapper[String] { (rs: ResultSet, rowNum: Int) =>
rs.getString(1)
}
val typedArgs = TypedArgMap("id" -> TypedArg(int2Integer(idDenBosch), Types.INTEGER))
namedParamJdbcTemplate.query(sql, typedArgs, rowMapper).headOption.getOrElse(sys.error("Expected to find Den Bosch"))
}
expect("s-Hertogenbosch") {
denBosch1
}
val denBosch2: String =
txTemplate execute { status =>
val extractor = newResultSetExtractor[String] { (rs: ResultSet) =>
val hasNext = rs.next()
require(hasNext, "Expected non-empty ResultSet")
rs.getString(1)
}
val typedArgs = TypedArgMap("id" -> TypedArg(int2Integer(idDenBosch), Types.INTEGER))
namedParamJdbcTemplate.query(sql, typedArgs, extractor)
}
expect("s-Hertogenbosch") {
denBosch2
}
val denBosch3: String =
txTemplate execute { status =>
val results = mutable.ArrayBuffer[String]()
val handler = newRowCallbackHandler { (rs: ResultSet) =>
results += rs.getString(1)
}
val typedArgs = TypedArgMap("id" -> TypedArg(int2Integer(idDenBosch), Types.INTEGER))
namedParamJdbcTemplate.query(sql, typedArgs, handler)
results.headOption.getOrElse(sys.error("Expected to find Den Bosch"))
}
expect("s-Hertogenbosch") {
denBosch3
}
}
@Test def testQueryUsingNonTypedArgs() {
val sql = "select name from city where id = :id"
val idDenBosch = 20
val denBosch1: String =
txTemplate execute { status =>
val rowMapper = newRowMapper[String] { (rs: ResultSet, rowNum: Int) =>
rs.getString(1)
}
namedParamJdbcTemplate.query(sql, Map("id" -> int2Integer(idDenBosch)), rowMapper).headOption.getOrElse(sys.error("Expected to find Den Bosch"))
}
expect("s-Hertogenbosch") {
denBosch1
}
val denBosch2: String =
txTemplate execute { status =>
val extractor = newResultSetExtractor[String] { (rs: ResultSet) =>
val hasNext = rs.next()
require(hasNext, "Expected non-empty ResultSet")
rs.getString(1)
}
namedParamJdbcTemplate.query(sql, Map("id" -> int2Integer(idDenBosch)), extractor)
}
expect("s-Hertogenbosch") {
denBosch2
}
val denBosch3: String =
txTemplate execute { status =>
val results = mutable.ArrayBuffer[String]()
val handler = newRowCallbackHandler { (rs: ResultSet) =>
results += rs.getString(1)
}
namedParamJdbcTemplate.query(sql, Map("id" -> int2Integer(idDenBosch)), handler)
results.headOption.getOrElse(sys.error("Expected to find Den Bosch"))
}
expect("s-Hertogenbosch") {
denBosch3
}
}
@Test def testQueryForInt(): Unit = {
val dutchCityCount1 =
txTemplate execute { status =>
val countryCode = "NLD"
val sql = "select count(*) from city where countrycode = :countryCode"
namedParamJdbcTemplate.queryForInt(sql, Map("countryCode" -> countryCode))
}
expect(28) {
dutchCityCount1
}
val dutchCityCount2 =
txTemplate execute { status =>
val countryCode = "NLD"
val sql = "select count(*) from city where countrycode = :countryCode"
namedParamJdbcTemplate.queryForInt(sql, TypedArgMap("countryCode" -> TypedArg(countryCode, Types.VARCHAR)))
}
expect(28) {
dutchCityCount2
}
}
@Test def testQueryForLong(): Unit = {
val dutchCityCount1 =
txTemplate execute { status =>
val countryCode = "NLD"
val sql = "select count(*) from city where countrycode = :countryCode"
namedParamJdbcTemplate.queryForLong(sql, Map("countryCode" -> countryCode))
}
expect(28) {
dutchCityCount1
}
val dutchCityCount2 =
txTemplate execute { status =>
val countryCode = "NLD"
val sql = "select count(*) from city where countrycode = :countryCode"
namedParamJdbcTemplate.queryForLong(sql, TypedArgMap("countryCode" -> TypedArg(countryCode, Types.VARCHAR)))
}
expect(28) {
dutchCityCount2
}
}
@Test def testQueryForMap(): Unit = {
val idDenBosch = 20
val denBosch1: Map[String, AnyRef] =
txTemplate execute { status =>
val sql = "select * from city where id = :id"
namedParamJdbcTemplate.queryForMap(sql, Map("id" -> int2Integer(idDenBosch)))
}
expect("s-Hertogenbosch") {
denBosch1("NAME")
}
val denBosch2: Map[String, AnyRef] =
txTemplate execute { status =>
val sql = "select * from city where id = :id"
namedParamJdbcTemplate.queryForMap(sql, TypedArgMap("id" -> TypedArg(int2Integer(idDenBosch), Types.INTEGER)))
}
expect("s-Hertogenbosch") {
denBosch2("NAME")
}
}
@Test def testQueryForSeq(): Unit = {
val dutchCities: immutable.IndexedSeq[String] =
txTemplate execute { status =>
val countryCode = "NLD"
val sql = "select name from city where countrycode = :countryCode"
namedParamJdbcTemplate.queryForSeq(sql, Map("countryCode" -> countryCode), classOf[String])
}
assert(dutchCities.size > 20)
assert(Set("Rotterdam", "Enschede", "Eindhoven", "Utrecht", "Groningen").subsetOf(dutchCities.toSet))
var dutchCities2 = mutable.ArrayBuffer[String]()
txTemplate executeWithoutResult { status =>
val sql = "select name from city where countrycode = 'NLD'"
val result = namedParamJdbcTemplate.queryForSeq(sql, Map[String, AnyRef](), classOf[String])
dutchCities2 ++= result
}
expect(dutchCities.toSet) {
dutchCities2.toSet
}
}
@Test def testUpdate(): Unit = {
val idDenBosch = 20
val denBosch: String =
txTemplate execute { status =>
val sql = "select name from city where id = :id"
namedParamJdbcTemplate.queryForSeq(sql, Map("id" -> int2Integer(idDenBosch)), classOf[String]).headOption.getOrElse("Expected to find 'Den Bosch'")
}
expect("s-Hertogenbosch") {
denBosch
}
val denBoschSpelledCorrectly = """'s-Hertogenbosch"""
txTemplate executeWithoutResult { status =>
val sql = "update city set name = :name where id = :id"
namedParamJdbcTemplate.update(sql, Map("name" -> denBoschSpelledCorrectly, "id" -> int2Integer(idDenBosch)))
}
val denBosch2 =
txTemplate execute { status =>
val sql = "select name from city where id = :id"
namedParamJdbcTemplate.queryForSeq(sql, Map("id" -> int2Integer(idDenBosch)), classOf[String]).headOption.getOrElse("Expected to find 'Den Bosch'")
}
expect(denBoschSpelledCorrectly) {
denBosch2
}
txTemplate executeWithoutResult { status =>
val sql = "update city set name = :name where id = :id"
namedParamJdbcTemplate.update(sql, Map("name" -> denBosch, "id" -> int2Integer(idDenBosch)))
}
val denBosch3: String =
txTemplate execute { status =>
val sql = "select name from city where id = :id"
namedParamJdbcTemplate.queryForSeq(sql, Map("id" -> int2Integer(idDenBosch)), classOf[String]).headOption.getOrElse("Expected to find 'Den Bosch'")
}
expect("s-Hertogenbosch") {
denBosch3
}
}
}
| dvreeze/spring-jdbc-scala-utils | src/test/scala/eu/cdevreeze/springjdbc/namedparam/NamedParamJdbcTemplateTest.scala | Scala | apache-2.0 | 10,378 |
/*
* Copyright (C) 2009-2018 Lightbend Inc. <https://www.lightbend.com>
*/
package play.api.libs.openid
import org.specs2.mutable.Specification
import scala.Predef._
import org.specs2.mock.Mockito
import org.mockito._
import play.api.mvc.Request
import play.api.http._
import play.api.http.Status._
import play.api.libs.openid.Errors.{ AUTH_ERROR, BAD_RESPONSE }
import scala.concurrent.Await
import scala.concurrent.duration.Duration
import java.util.concurrent.TimeUnit
import play.api.libs.ws.BodyWritable
import scala.concurrent.ExecutionContext.Implicits.global
class OpenIDSpec extends Specification with Mockito {
val claimedId = "http://example.com/openid?id=C123"
val identity = "http://example.com/openid?id=C123&id"
val defaultSigned = "op_endpoint,claimed_id,identity,return_to,response_nonce,assoc_handle"
val dur = Duration(10, TimeUnit.SECONDS)
// 9.1 Request parameters - http://openid.net/specs/openid-authentication-2_0.html#anchor27
def isValidOpenIDRequest(query: Params) = {
query.get("openid.mode") must_== Some(Seq("checkid_setup"))
query.get("openid.ns") must_== Some(Seq("http://specs.openid.net/auth/2.0"))
}
"OpenID" should {
"initiate discovery" in {
val ws = createMockWithValidOpDiscoveryAndVerification
val openId = new WsOpenIdClient(ws, new WsDiscovery(ws))
openId.redirectURL("http://example.com", "http://foo.bar.com/openid")
there was one(ws.request).get()
}
"generate a valid redirectUrl" in {
val ws = createMockWithValidOpDiscoveryAndVerification
val openId = new WsOpenIdClient(ws, new WsDiscovery(ws))
val redirectUrl = Await.result(openId.redirectURL("http://example.com", "http://foo.bar.com/returnto"), dur)
val query = parseQueryString(redirectUrl)
isValidOpenIDRequest(query)
query.get("openid.return_to") must_== Some(Seq("http://foo.bar.com/returnto"))
query.get("openid.realm") must beNone
}
"generate a valid redirectUrl with a proper required extended attributes request" in {
val ws = createMockWithValidOpDiscoveryAndVerification
val openId = new WsOpenIdClient(ws, new WsDiscovery(ws))
val redirectUrl = Await.result(openId.redirectURL("http://example.com", "http://foo.bar.com/returnto",
axRequired = Seq("email" -> "http://schema.openid.net/contact/email")), dur)
val query = parseQueryString(redirectUrl)
isValidOpenIDRequest(query)
query.get("openid.ax.mode") must_== Some(Seq("fetch_request"))
query.get("openid.ns.ax") must_== Some(Seq("http://openid.net/srv/ax/1.0"))
query.get("openid.ax.required") must_== Some(Seq("email"))
query.get("openid.ax.type.email") must_== Some(Seq("http://schema.openid.net/contact/email"))
}
"generate a valid redirectUrl with a proper 'if_available' extended attributes request" in {
val ws = createMockWithValidOpDiscoveryAndVerification
val openId = new WsOpenIdClient(ws, new WsDiscovery(ws))
val redirectUrl = Await.result(openId.redirectURL("http://example.com", "http://foo.bar.com/returnto",
axOptional = Seq("email" -> "http://schema.openid.net/contact/email")), dur)
val query = parseQueryString(redirectUrl)
isValidOpenIDRequest(query)
query.get("openid.ax.mode") must_== Some(Seq("fetch_request"))
query.get("openid.ns.ax") must_== Some(Seq("http://openid.net/srv/ax/1.0"))
query.get("openid.ax.if_available") must_== Some(Seq("email"))
query.get("openid.ax.type.email") must_== Some(Seq("http://schema.openid.net/contact/email"))
}
"generate a valid redirectUrl with a proper 'if_available' AND required extended attributes request" in {
val ws = createMockWithValidOpDiscoveryAndVerification
val openId = new WsOpenIdClient(ws, new WsDiscovery(ws))
val redirectUrl = Await.result(openId.redirectURL("http://example.com", "http://foo.bar.com/returnto",
axRequired = Seq("first" -> "http://axschema.org/namePerson/first"),
axOptional = Seq("email" -> "http://schema.openid.net/contact/email")), dur)
val query = parseQueryString(redirectUrl)
isValidOpenIDRequest(query)
query.get("openid.ax.mode") must_== Some(Seq("fetch_request"))
query.get("openid.ns.ax") must_== Some(Seq("http://openid.net/srv/ax/1.0"))
query.get("openid.ax.required") must_== Some(Seq("first"))
query.get("openid.ax.type.first") must_== Some(Seq("http://axschema.org/namePerson/first"))
query.get("openid.ax.if_available") must_== Some(Seq("email"))
query.get("openid.ax.type.email") must_== Some(Seq("http://schema.openid.net/contact/email"))
}
"verify the response" in {
val ws = createMockWithValidOpDiscoveryAndVerification
val openId = new WsOpenIdClient(ws, new WsDiscovery(ws))
val responseQueryString = openIdResponse
val userInfo = Await.result(openId.verifiedId(setupMockRequest(responseQueryString)), dur)
"the claimedId must be present" in {
userInfo.id must be equalTo claimedId
}
val argument = ArgumentCaptor.forClass(classOf[Params])
"direct verification using a POST request was used" in {
there was one(ws.request).post(argument.capture())(any[BodyWritable[Params]])
val verificationQuery = argument.getValue
"openid.mode was set to check_authentication" in {
verificationQuery.get("openid.mode") must_== Some(Seq("check_authentication"))
}
"every query parameter apart from openid.mode is used in the verification request" in {
(verificationQuery - "openid.mode") forall { case (key, value) => responseQueryString.get(key) == Some(value) } must beTrue
}
}
}
// 11.2 If the Claimed Identifier was not previously discovered by the Relying Party
// (the "openid.identity" in the request was "http://specs.openid.net/auth/2.0/identifier_select" or a different Identifier,
// or if the OP is sending an unsolicited positive assertion), the Relying Party MUST perform discovery on the
// Claimed Identifier in the response to make sure that the OP is authorized to make assertions about the Claimed Identifier.
"verify the response using discovery on the claimed Identifier" in {
val ws = createMockWithValidOpDiscoveryAndVerification
val openId = new WsOpenIdClient(ws, new WsDiscovery(ws))
val spoofedEndpoint = "http://evilhackerendpoint.com"
val responseQueryString = openIdResponse - "openid.op_endpoint" + ("openid.op_endpoint" -> Seq(spoofedEndpoint))
Await.result(openId.verifiedId(setupMockRequest(responseQueryString)), dur)
"direct verification does not use the openid.op_endpoint that is part of the query string" in {
ws.urls contains (spoofedEndpoint) must beFalse
}
"the endpoint is resolved using discovery on the claimed Id" in {
ws.urls(0) must be equalTo claimedId
}
"use endpoint discovery and then direct verification" in {
got {
// Use discovery to resolve the endpoint
one(ws.request).get()
// Verify the response
one(ws.request).post(any[Params])(any[BodyWritable[Params]])
}
}
"use direct verification on the discovered endpoint" in {
ws.urls(1) must be equalTo "https://www.google.com/a/example.com/o8/ud?be=o8" // From the mock XRDS
}
}
"fail response verification if direct verification fails" in {
val ws = new WSMock
ws.response.status returns OK thenReturns OK
ws.response.header(HeaderNames.CONTENT_TYPE) returns Some("application/xrds+xml") thenReturns Some("text/plain")
ws.response.xml returns scala.xml.XML.loadString(readFixture("discovery/xrds/simple-op.xml"))
ws.response.body returns "is_valid:false\\n"
val openId = new WsOpenIdClient(ws, new WsDiscovery(ws))
Await.result(openId.verifiedId(setupMockRequest()), dur) must throwA[AUTH_ERROR.type]
there was one(ws.request).post(any[Params])(any[BodyWritable[Params]])
}
"fail response verification if the response indicates an error" in {
val ws = new WSMock
ws.response.status returns OK thenReturns OK
ws.response.header(HeaderNames.CONTENT_TYPE) returns Some("application/xrds+xml") thenReturns Some("text/plain")
ws.response.xml returns scala.xml.XML.loadString(readFixture("discovery/xrds/simple-op.xml"))
ws.response.body returns "is_valid:false\\n"
val openId = new WsOpenIdClient(ws, new WsDiscovery(ws))
val errorResponse = (openIdResponse - "openid.mode") + ("openid.mode" -> Seq("error"))
Await.result(openId.verifiedId(setupMockRequest(errorResponse)), dur) must throwA[BAD_RESPONSE.type]
}
// OpenID 1.1 compatibility - 14.2.1
"verify an OpenID 1.1 response that is missing the \\"openid.op_endpoint\\" parameter" in {
val ws = createMockWithValidOpDiscoveryAndVerification
val openId = new WsOpenIdClient(ws, new WsDiscovery(ws))
val responseQueryString = (openIdResponse - "openid.op_endpoint")
val userInfo = Await.result(openId.verifiedId(setupMockRequest(responseQueryString)), dur)
"the claimedId must be present" in {
userInfo.id must be equalTo claimedId
}
"using discovery and direct verification" in {
got {
// Use discovery to resolve the endpoint
one(ws.request).get()
// Verify the response
one(ws.request).post(any[Params])(any[BodyWritable[Params]])
}
}
}
}
def createMockWithValidOpDiscoveryAndVerification = {
val ws = new WSMock
ws.response.status returns OK thenReturns OK
ws.response.header(HeaderNames.CONTENT_TYPE) returns Some("application/xrds+xml") thenReturns Some("text/plain")
ws.response.xml returns scala.xml.XML.loadString(readFixture("discovery/xrds/simple-op.xml"))
ws.response.body returns "is_valid:true\\n" // http://openid.net/specs/openid-authentication-2_0.html#kvform
ws
}
def setupMockRequest(queryString: Params = openIdResponse) = {
val request = mock[Request[_]]
request.queryString returns queryString
request
}
def openIdResponse = createDefaultResponse(claimedId, identity, defaultSigned)
}
| Shenker93/playframework | framework/src/play-openid/src/test/scala/play/api/libs/openid/OpenIDSpec.scala | Scala | apache-2.0 | 10,325 |
package org.jetbrains.plugins.scala.lang.parameterInfo.functionParameterInfo
class FunctionParameterInfoSimpleTest extends FunctionParameterInfoTestBase {
override def getTestDataPath: String =
s"${super.getTestDataPath}simple/"
def testAnnotation() = doTest()
def testDefaultParameter() = doTest()
def testDefaultParameterFromSources() = doTest()
def testFromPositionalToNaming() = doTest()
def testGenericJavaLibrary() = doTest()
def testImplicitParameter() = doTest()
def testInfixExpression() = doTest()
def testInfixTuple() = doTest()
def testInfixUnit() = doTest()
def testJavaLibrary() = doTest()
def testLocal() = doTest()
def testNothingExprType() = doTest()
def testPositionalAfterNamed() = doTest()
def testScalaLibrary() = doTest()
def testSimple() = doTest()
def testSyntheticParameter() = doTest()
def testTypeRefinement() = doTest()
def testAliasedMethod() = doTest()
} | ilinum/intellij-scala | test/org/jetbrains/plugins/scala/lang/parameterInfo/functionParameterInfo/FunctionParameterInfoSimpleTest.scala | Scala | apache-2.0 | 948 |
package controllers
import java.io.File
import javax.inject.{Inject, Singleton}
import play.api.{Application, Logger}
import services._
import util.{AppSettings, FileUploadAsset}
import scala.concurrent.ExecutionContext.Implicits.global
/**
* Controller for dynamic resourced handled from the application directory.
*/
@Singleton
class AppResourceController @Inject() (val appSettings: AppSettings,
val application: Application,
val userService: UserService,
val sessionService: SessionService,
val permissionService: PermissionService,
postService: PostService,
forumService: ForumService,
threadService: ThreadService,
categoryService: ForumCategoryService) extends RestController with FileUploadAsset {
private[this] val RES_FAVICON = "favicon.ico"
private[this] val RES_LOGO = "logo.png"
def favicon = handleFromAppResourcesOrDefault(RES_FAVICON)
def logo = handleFromAppResourcesOrDefault(RES_LOGO)
def logoSmall = handleFromAppResourcesOrDefault(RES_LOGO) // TODO: Lower-Res Logo
def downloadAvatar(id: String) = OptionalSessionRestAction.async {
implicit request =>
// TODO: Check for permissions
userService.getUser(id).toFuture.map {
_.filter { _.avatar.isDefined }.map {
userOpt =>
Logger.info(s"File: appdata/avatars/${userOpt.avatar.get}")
new File(s"appdata/avatars/${userOpt.avatar.get}")
}.filter { _.exists }.fold {
NotFound("not found")
} {
file => Ok.sendFile(content = file, inline = true)
}
}
}
def downloadPostUpload(id: String, filename: String) = OptionalSessionRestAction.async {
implicit request =>
val req = for {
post <- postService.getPost(id)
thread <- threadService.getThread(post.thread)
forum <- forumService.getForum(thread.forum)
category <- categoryService.getCategory(forum.category)
} yield {
val permissionResult = thread.checkAccess(category, forum)
if (permissionResult) {
val postId = post._id
val uploadOpt = post.uploads.find(_.filename == filename)
val fileOpt = uploadOpt map {
upload =>
Logger.info(s"appdata/uploads/$postId/${upload.source}")
new File(s"appdata/uploads/$postId/${upload.source}")
} filter {
_.exists
}
fileOpt.fold {
NotFound("not found")
} {
file =>
Ok.sendFile(content = file, fileName = _ => filename)
}
} else {
Forbidden("forbidden")
}
}
req.flatten(NotFound("not found"))
}
}
| metaxmx/FridayNightBeer | app/controllers/AppResourceController.scala | Scala | apache-2.0 | 3,054 |
/*
* Sonar Scoverage Plugin
* Copyright (C) 2013 Rado Buransky
* [email protected]
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 3 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02
*/
package com.buransky.plugins.scoverage.sensor
import java.io.File
import com.buransky.plugins.scoverage.language.Scala
import com.buransky.plugins.scoverage.measure.ScalaMetrics
import com.buransky.plugins.scoverage.pathcleaner.{BruteForceSequenceMatcher, PathSanitizer}
import com.buransky.plugins.scoverage.util.LogUtil
import com.buransky.plugins.scoverage.xml.XmlScoverageReportParser
import com.buransky.plugins.scoverage.{CoveredStatement, DirectoryStatementCoverage, FileStatementCoverage, _}
import org.sonar.api.batch.fs.{FileSystem, InputFile, InputPath}
import org.sonar.api.batch.{CoverageExtension, Sensor, SensorContext}
import org.sonar.api.config.Settings
import org.sonar.api.measures.{CoverageMeasuresBuilder, Measure}
import org.sonar.api.resources.{Project, Resource}
import org.sonar.api.scan.filesystem.PathResolver
import org.sonar.api.utils.log.Loggers
import scala.collection.JavaConversions._
/**
* Main sensor for importing Scoverage report to Sonar.
*
* @author Rado Buransky
*/
class ScoverageSensor(settings: Settings, pathResolver: PathResolver, fileSystem: FileSystem)
extends Sensor with CoverageExtension {
private val log = Loggers.get(classOf[ScoverageSensor])
protected val SCOVERAGE_REPORT_PATH_PROPERTY = "sonar.scoverage.reportPath"
protected lazy val scoverageReportParser: ScoverageReportParser = XmlScoverageReportParser()
override def shouldExecuteOnProject(project: Project): Boolean = fileSystem.languages().contains(Scala.key)
override def analyse(project: Project, context: SensorContext) {
scoverageReportPath match {
case Some(reportPath) =>
// Single-module project
val srcOption = Option(settings.getString("sonar.sources"))
val sonarSources = srcOption match {
case Some(src) => src
case None => {
log.warn(s"could not find settings key sonar.sources assuming src/main/scala.")
"src/main/scala"
}
}
val pathSanitizer = createPathSanitizer(sonarSources)
processProject(scoverageReportParser.parse(reportPath, pathSanitizer), project, context, sonarSources)
case None =>
// Multi-module project has report path set for each module individually
analyseMultiModuleProject(project, context)
}
}
override val toString = getClass.getSimpleName
protected def createPathSanitizer(sonarSources: String): PathSanitizer
= new BruteForceSequenceMatcher(fileSystem.baseDir(), sonarSources)
private lazy val scoverageReportPath: Option[String] = {
settings.getString(SCOVERAGE_REPORT_PATH_PROPERTY) match {
case null => None
case path: String =>
pathResolver.relativeFile(fileSystem.baseDir, path) match {
case report: java.io.File if !report.exists || !report.isFile =>
log.error(LogUtil.f("Report not found at {}"), report)
None
case report: java.io.File => Some(report.getAbsolutePath)
}
}
}
private def analyseMultiModuleProject(project: Project, context: SensorContext) {
project.isModule match {
case true => log.warn(LogUtil.f("Report path not set for " + project.name + " module! [" +
project.name + "." + SCOVERAGE_REPORT_PATH_PROPERTY + "]"))
case _ =>
// Compute overall statement coverage from submodules
val totalStatementCount = project.getModules.map(analyseStatementCountForModule(_, context)).sum
val coveredStatementCount = project.getModules.map(analyseCoveredStatementCountForModule(_, context)).sum
if (totalStatementCount > 0) {
// Convert to percentage
val overall = (coveredStatementCount.toDouble / totalStatementCount.toDouble) * 100.0
// Set overall statement coverage
context.saveMeasure(project, createStatementCoverage(overall))
log.info(LogUtil.f("Overall statement coverage is " + ("%1.2f" format overall)))
}
}
}
private def analyseCoveredStatementCountForModule(module: Project, context: SensorContext): Long = {
// Aggregate modules
context.getMeasure(module, ScalaMetrics.coveredStatements) match {
case null =>
log.debug(LogUtil.f("Module has no statement coverage. [" + module.name + "]"))
0
case moduleCoveredStatementCount: Measure[_] =>
log.debug(LogUtil.f("Covered statement count for " + module.name + " module. [" +
moduleCoveredStatementCount.getValue + "]"))
moduleCoveredStatementCount.getValue.toLong
}
}
private def analyseStatementCountForModule(module: Project, context: SensorContext): Long = {
// Aggregate modules
context.getMeasure(module, ScalaMetrics.totalStatements) match {
case null =>
log.debug(LogUtil.f("Module has no number of statements. [" + module.name + "]"))
0
case moduleStatementCount: Measure[_] =>
log.debug(LogUtil.f("Statement count for " + module.name + " module. [" +
moduleStatementCount.getValue + "]"))
moduleStatementCount.getValue.toLong
}
}
private def processProject(projectCoverage: ProjectStatementCoverage, project: Project, context: SensorContext, sonarSources: String) {
// Save measures
saveMeasures(context, project, projectCoverage)
log.info(LogUtil.f("Statement coverage for " + project.getKey + " is " + ("%1.2f" format projectCoverage.rate)))
// Process children
processChildren(projectCoverage.children, context, sonarSources)
}
private def processDirectory(directoryCoverage: DirectoryStatementCoverage, context: SensorContext, parentDirectory: String) {
// save measures if any
if (directoryCoverage.statementCount > 0) {
val path = appendFilePath(parentDirectory, directoryCoverage.name)
getResource(path, context, false) match {
case Some(srcDir) => {
// Save directory measures
saveMeasures(context, srcDir, directoryCoverage)
}
case None =>
}
}
// Process children
processChildren(directoryCoverage.children, context, appendFilePath(parentDirectory, directoryCoverage.name))
}
private def processFile(fileCoverage: FileStatementCoverage, context: SensorContext, directory: String) {
val path = appendFilePath(directory, fileCoverage.name)
getResource(path, context, true) match {
case Some(scalaSourceFile) => {
// Save measures
saveMeasures(context, scalaSourceFile, fileCoverage)
// Save line coverage. This is needed just for source code highlighting.
saveLineCoverage(fileCoverage.statements, scalaSourceFile, context)
}
case None =>
}
}
private def getResource(path: String, context: SensorContext, isFile: Boolean): Option[Resource] = {
val inputOption: Option[InputPath] = if (isFile) {
val p = fileSystem.predicates()
val pathPredicate = if (new File(path).isAbsolute) p.hasAbsolutePath(path) else p.hasRelativePath(path)
Option(fileSystem.inputFile(p.and(
pathPredicate,
p.hasLanguage(Scala.key),
p.hasType(InputFile.Type.MAIN))))
} else {
Option(fileSystem.inputDir(pathResolver.relativeFile(fileSystem.baseDir(), path)))
}
inputOption match {
case Some(path: InputPath) =>
Some(context.getResource(path))
case None => {
log.warn(s"File or directory not found in file system! ${path}")
None
}
}
}
private def saveMeasures(context: SensorContext, resource: Resource, statementCoverage: StatementCoverage) {
context.saveMeasure(resource, createStatementCoverage(statementCoverage.rate))
context.saveMeasure(resource, createStatementCount(statementCoverage.statementCount))
context.saveMeasure(resource, createCoveredStatementCount(statementCoverage.coveredStatementsCount))
log.debug(LogUtil.f("Save measures [" + statementCoverage.rate + ", " + statementCoverage.statementCount +
", " + statementCoverage.coveredStatementsCount + ", " + resource.getKey + "]"))
}
private def saveLineCoverage(coveredStatements: Iterable[CoveredStatement], resource: Resource,
context: SensorContext) {
// Convert statements to lines
val coveredLines = StatementCoverage.statementCoverageToLineCoverage(coveredStatements)
// Set line hits
val coverage = CoverageMeasuresBuilder.create()
coveredLines.foreach { coveredLine =>
coverage.setHits(coveredLine.line, coveredLine.hitCount)
}
// Save measures
coverage.createMeasures().toList.foreach(context.saveMeasure(resource, _))
}
private def processChildren(children: Iterable[StatementCoverage], context: SensorContext, directory: String) {
children.foreach(processChild(_, context, directory))
}
private def processChild(dirOrFile: StatementCoverage, context: SensorContext, directory: String) {
dirOrFile match {
case dir: DirectoryStatementCoverage => processDirectory(dir, context, directory)
case file: FileStatementCoverage => processFile(file, context, directory)
case _ => throw new IllegalStateException("Not a file or directory coverage! [" +
dirOrFile.getClass.getName + "]")
}
}
private def createStatementCoverage[T <: Serializable](rate: Double): Measure[T] =
new Measure[T](ScalaMetrics.statementCoverage, rate)
private def createStatementCount[T <: Serializable](statements: Int): Measure[T] =
new Measure(ScalaMetrics.totalStatements, statements.toDouble, 0)
private def createCoveredStatementCount[T <: Serializable](coveredStatements: Int): Measure[T] =
new Measure(ScalaMetrics.coveredStatements, coveredStatements.toDouble, 0)
private def appendFilePath(src: String, name: String) = {
val result = src match {
case java.io.File.separator => java.io.File.separator
case empty if empty.isEmpty => ""
case other => other + java.io.File.separator
}
result + name
}
}
| Sagacify/sonar-scala | src/main/scala/com/buransky/plugins/scoverage/sensor/ScoverageSensor.scala | Scala | lgpl-3.0 | 10,840 |
/**
* Copyright (C) 2015-2016 DANS - Data Archiving and Networked Services ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package nl.knaw.dans.api.sword2
import java.io.{File, IOException, PrintWriter, StringWriter}
import org.apache.commons.configuration.PropertiesConfiguration
import org.joda.time.{DateTime, DateTimeZone}
import org.slf4j.LoggerFactory
import scala.util.Try
object DepositProperties {
val log = LoggerFactory.getLogger(getClass)
case class State(label: String, description: String, timeStamp: String)
def set(id: String, stateLabel: String, stateDescription: String, userId: Option[String] = None, lookInTempFirst: Boolean = false, throwable: Throwable = null)(implicit settings: Settings): Try[Unit] = Try {
val depositDir = new File(if (lookInTempFirst) settings.tempDir
else settings.depositRootDir, id)
val props = readProperties(new File(depositDir, "deposit.properties"))
props.setProperty("state.label", stateLabel)
props.setProperty("state.description",
s"""
|$stateDescription
|${if(throwable != null) throwable.getMessage else ""}
""".stripMargin.trim)
userId.foreach(uid => props.setProperty("depositor.userId", uid))
props.save()
}
def getState(id: String)(implicit settings: Settings): Try[State] = {
log.debug(s"[$id] Trying to retrieve state")
readState(id, new File(settings.tempDir, s"$id/deposit.properties")).recoverWith {
case f: IOException => readState(id, new File(settings.depositRootDir, s"$id/deposit.properties"))
}
}
private def readState(id: String, f: File): Try[State] = Try {
val s = readProperties(f)
log.debug(s"[$id] Trying to retrieve state from $f")
if(!f.exists()) throw new IOException(s"$f does not exist")
val state = Option(s.getString("state.label")).getOrElse("")
val userId = Option(s.getString("depositor.userId")).getOrElse("")
if(state.isEmpty || userId.isEmpty) {
if (state.isEmpty) log.error(s"[$id] State not present in $f")
if (userId.isEmpty) log.error(s"[$id] User ID not present in $f")
State("FAILED", "There occured unexpected failure in deposit", new DateTime(s.getFile.lastModified()).withZone(DateTimeZone.UTC).toString)
}
else
State(state, s.getString("state.description"), new DateTime(s.getFile.lastModified()).withZone(DateTimeZone.UTC).toString)
}
private def stackTraceToString(t: Throwable): String = {
val sw = new StringWriter()
val pw = new PrintWriter(sw)
t.printStackTrace(pw)
pw.flush()
sw.toString
}
private def readProperties(f: File) = {
val ps = new PropertiesConfiguration()
ps.setDelimiterParsingDisabled(true)
if(f.exists) ps.load(f)
ps.setFile(f)
ps
}
}
| vesaakerman/easy-sword2 | src/main/scala/nl/knaw/dans/api/sword2/DepositProperties.scala | Scala | apache-2.0 | 3,324 |
package mesosphere.marathon.event.http
import java.util.UUID
import javax.inject.{ Inject, Named }
import javax.servlet.http.{ Cookie, HttpServletRequest, HttpServletResponse }
import akka.actor.ActorRef
import mesosphere.marathon.api.RequestFacade
import mesosphere.marathon.event.http.HttpEventStreamActor._
import mesosphere.marathon.plugin.auth._
import mesosphere.marathon.plugin.http.HttpResponse
import mesosphere.marathon.{ MarathonConf, ModuleNames }
import org.eclipse.jetty.servlets.EventSource.Emitter
import org.eclipse.jetty.servlets.{ EventSource, EventSourceServlet }
import scala.concurrent.{ Await, blocking }
/**
* The Stream handle implementation for SSE.
*
* @param request the initial http request.
* @param emitter the emitter to emit data
*/
class HttpEventSSEHandle(request: HttpServletRequest, emitter: Emitter) extends HttpEventStreamHandle {
lazy val id: String = UUID.randomUUID().toString
override def remoteAddress: String = request.getRemoteAddr
override def close(): Unit = emitter.close()
override def sendEvent(event: String, message: String): Unit = blocking(emitter.event(event, message))
override def toString: String = s"HttpEventSSEHandle($id on $remoteAddress)"
}
/**
* Handle a server side event client stream by delegating events to the stream actor.
*/
class HttpEventStreamServlet @Inject() (
@Named(ModuleNames.HTTP_EVENT_STREAM) streamActor: ActorRef,
conf: MarathonConf,
val authenticator: Authenticator,
val authorizer: Authorizer)
extends EventSourceServlet {
override def doGet(request: HttpServletRequest, response: HttpServletResponse): Unit = {
val requestFacade = new RequestFacade(request)
val maybeIdentity = Await.result(authenticator.authenticate(requestFacade), conf.zkTimeoutDuration)
def withResponseFacade(fn: HttpResponse => Unit): Unit = {
val facade = new HttpResponse {
override def body(mediaType: String, bytes: Array[Byte]): Unit = {
response.setHeader("Content-Type", mediaType)
response.getWriter.write(new String(bytes))
}
override def sendRedirect(url: String): Unit = {
response.sendRedirect(url)
}
override def header(header: String, value: String): Unit = {
response.addHeader(header, value)
}
override def cookie(name: String, value: String, maxAge: Int, secure: Boolean): Unit = {
val cookie = new Cookie(name, value)
cookie.setMaxAge(maxAge)
cookie.setSecure(secure)
response.addCookie(cookie)
}
override def status(code: Int): Unit = {
response.setStatus(code)
}
}
fn(facade)
}
def isAuthorized(identity: Identity): Boolean = {
authorizer.isAuthorized(identity, ViewResource, AuthorizedResource.Events)
}
maybeIdentity match {
case Some(identity) if isAuthorized(identity) =>
super.doGet(request, response)
case Some(identity) =>
withResponseFacade(authorizer.handleNotAuthorized(identity, _))
case None =>
withResponseFacade(authenticator.handleNotAuthenticated(requestFacade, _))
}
}
override def newEventSource(request: HttpServletRequest): EventSource = new EventSource {
@volatile private var handler: Option[HttpEventSSEHandle] = None
override def onOpen(emitter: Emitter): Unit = {
val handle = new HttpEventSSEHandle(request, emitter)
this.handler = Some(handle)
streamActor ! HttpEventStreamConnectionOpen(handle)
}
override def onClose(): Unit = {
handler.foreach(streamActor ! HttpEventStreamConnectionClosed(_))
handler = None
}
}
}
| yp-engineering/marathon | src/main/scala/mesosphere/marathon/event/http/HttpEventStreamServlet.scala | Scala | apache-2.0 | 3,707 |
/*
* Copyright 2014 IBM Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ibm.spark.magic.dependencies
import com.ibm.spark.magic.Magic
import org.apache.spark.SparkContext
import org.apache.spark.sql.SQLContext
trait IncludeSparkContext {
this: Magic =>
//val sparkContext: SparkContext
private var _sparkContext: SparkContext = _
private var _sqlContext: SQLContext = _
def sparkContext: SparkContext = _sparkContext
def sqlContext: SQLContext = _sqlContext
def sparkContext_=(newSparkContext: SparkContext) =
_sparkContext = newSparkContext
def sqlContext_=(newSqlContext: SQLContext) =
_sqlContext = newSqlContext
}
| slowenthal/spark-kernel | kernel-api/src/main/scala/com/ibm/spark/magic/dependencies/IncludeSparkContext.scala | Scala | apache-2.0 | 1,184 |
package com.pwootage.sasm.test.bitint
/*
* Copyright (c) 2014 Pwootage
*
* This file is part of SASM.
*
* SASM is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* SASM is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with SASM. If not, see <http://www.gnu.org/licenses/>.
*/
import org.scalatest._
/**
* Base class for testing multiple BitInt implementations
*/
abstract class BitIntSpec extends FlatSpec with Matchers with
OptionValues with Inside with Inspectors {
}
| Pwootage/SASM | src/test/scala/com/pwootage/sasm/test/bitint/BitIntSpec.scala | Scala | gpl-3.0 | 934 |
package service
import model.Profile._
import profile.simple._
import model.{Repository, Account, Collaborator}
import util.JGitUtil
trait RepositoryService { self: AccountService =>
import RepositoryService._
/**
* Creates a new repository.
*
* @param repositoryName the repository name
* @param userName the user name of the repository owner
* @param description the repository description
* @param isPrivate the repository type (private is true, otherwise false)
* @param originRepositoryName specify for the forked repository. (default is None)
* @param originUserName specify for the forked repository. (default is None)
*/
def createRepository(repositoryName: String, userName: String, description: Option[String], isPrivate: Boolean,
originRepositoryName: Option[String] = None, originUserName: Option[String] = None,
parentRepositoryName: Option[String] = None, parentUserName: Option[String] = None)
(implicit s: Session): Unit = {
Repositories insert
Repository(
userName = userName,
repositoryName = repositoryName,
isPrivate = isPrivate,
description = description,
defaultBranch = "master",
registeredDate = currentDate,
updatedDate = currentDate,
lastActivityDate = currentDate,
originUserName = originUserName,
originRepositoryName = originRepositoryName,
parentUserName = parentUserName,
parentRepositoryName = parentRepositoryName)
IssueId insert (userName, repositoryName, 0)
}
def renameRepository(oldUserName: String, oldRepositoryName: String, newUserName: String, newRepositoryName: String)
(implicit s: Session): Unit = {
getAccountByUserName(newUserName).foreach { account =>
(Repositories filter { t => t.byRepository(oldUserName, oldRepositoryName) } firstOption).map { repository =>
Repositories insert repository.copy(userName = newUserName, repositoryName = newRepositoryName)
val webHooks = WebHooks .filter(_.byRepository(oldUserName, oldRepositoryName)).list
val milestones = Milestones .filter(_.byRepository(oldUserName, oldRepositoryName)).list
val issueId = IssueId .filter(_.byRepository(oldUserName, oldRepositoryName)).list
val issues = Issues .filter(_.byRepository(oldUserName, oldRepositoryName)).list
val pullRequests = PullRequests .filter(_.byRepository(oldUserName, oldRepositoryName)).list
val labels = Labels .filter(_.byRepository(oldUserName, oldRepositoryName)).list
val issueComments = IssueComments.filter(_.byRepository(oldUserName, oldRepositoryName)).list
val issueLabels = IssueLabels .filter(_.byRepository(oldUserName, oldRepositoryName)).list
val activities = Activities .filter(_.byRepository(oldUserName, oldRepositoryName)).list
val collaborators = Collaborators.filter(_.byRepository(oldUserName, oldRepositoryName)).list
Repositories.filter { t =>
(t.originUserName === oldUserName.bind) && (t.originRepositoryName === oldRepositoryName.bind)
}.map { t => t.originUserName -> t.originRepositoryName }.update(newUserName, newRepositoryName)
Repositories.filter { t =>
(t.parentUserName === oldUserName.bind) && (t.parentRepositoryName === oldRepositoryName.bind)
}.map { t => t.originUserName -> t.originRepositoryName }.update(newUserName, newRepositoryName)
PullRequests.filter { t =>
t.requestRepositoryName === oldRepositoryName.bind
}.map { t => t.requestUserName -> t.requestRepositoryName }.update(newUserName, newRepositoryName)
deleteRepository(oldUserName, oldRepositoryName)
WebHooks .insertAll(webHooks .map(_.copy(userName = newUserName, repositoryName = newRepositoryName)) :_*)
Milestones .insertAll(milestones .map(_.copy(userName = newUserName, repositoryName = newRepositoryName)) :_*)
IssueId .insertAll(issueId .map(_.copy(_1 = newUserName, _2 = newRepositoryName)) :_*)
val newMilestones = Milestones.filter(_.byRepository(newUserName, newRepositoryName)).list
Issues.insertAll(issues.map { x => x.copy(
userName = newUserName,
repositoryName = newRepositoryName,
milestoneId = x.milestoneId.map { id =>
newMilestones.find(_.title == milestones.find(_.milestoneId == id).get.title).get.milestoneId
}
)} :_*)
PullRequests .insertAll(pullRequests .map(_.copy(userName = newUserName, repositoryName = newRepositoryName)) :_*)
IssueComments .insertAll(issueComments .map(_.copy(userName = newUserName, repositoryName = newRepositoryName)) :_*)
Labels .insertAll(labels .map(_.copy(userName = newUserName, repositoryName = newRepositoryName)) :_*)
IssueLabels .insertAll(issueLabels .map(_.copy(userName = newUserName, repositoryName = newRepositoryName)) :_*)
Activities .insertAll(activities .map(_.copy(userName = newUserName, repositoryName = newRepositoryName)) :_*)
if(account.isGroupAccount){
Collaborators.insertAll(getGroupMembers(newUserName).map(m => Collaborator(newUserName, newRepositoryName, m.userName)) :_*)
} else {
Collaborators.insertAll(collaborators.map(_.copy(userName = newUserName, repositoryName = newRepositoryName)) :_*)
}
// Update activity messages
val updateActivities = Activities.filter { t =>
(t.message like s"%:${oldUserName}/${oldRepositoryName}]%") ||
(t.message like s"%:${oldUserName}/${oldRepositoryName}#%")
}.map { t => t.activityId -> t.message }.list
updateActivities.foreach { case (activityId, message) =>
Activities.filter(_.activityId === activityId.bind).map(_.message).update(
message
.replace(s"[repo:${oldUserName}/${oldRepositoryName}]" ,s"[repo:${newUserName}/${newRepositoryName}]")
.replace(s"[branch:${oldUserName}/${oldRepositoryName}#" ,s"[branch:${newUserName}/${newRepositoryName}#")
.replace(s"[tag:${oldUserName}/${oldRepositoryName}#" ,s"[tag:${newUserName}/${newRepositoryName}#")
.replace(s"[pullreq:${oldUserName}/${oldRepositoryName}#",s"[pullreq:${newUserName}/${newRepositoryName}#")
.replace(s"[issue:${oldUserName}/${oldRepositoryName}#" ,s"[issue:${newUserName}/${newRepositoryName}#")
)
}
}
}
}
def deleteRepository(userName: String, repositoryName: String)(implicit s: Session): Unit = {
Activities .filter(_.byRepository(userName, repositoryName)).delete
Collaborators .filter(_.byRepository(userName, repositoryName)).delete
IssueLabels .filter(_.byRepository(userName, repositoryName)).delete
Labels .filter(_.byRepository(userName, repositoryName)).delete
IssueComments .filter(_.byRepository(userName, repositoryName)).delete
PullRequests .filter(_.byRepository(userName, repositoryName)).delete
Issues .filter(_.byRepository(userName, repositoryName)).delete
IssueId .filter(_.byRepository(userName, repositoryName)).delete
Milestones .filter(_.byRepository(userName, repositoryName)).delete
WebHooks .filter(_.byRepository(userName, repositoryName)).delete
Repositories .filter(_.byRepository(userName, repositoryName)).delete
}
/**
* Returns the repository names of the specified user.
*
* @param userName the user name of repository owner
* @return the list of repository names
*/
def getRepositoryNamesOfUser(userName: String)(implicit s: Session): List[String] =
Repositories filter(_.userName === userName.bind) map (_.repositoryName) list
/**
* Returns the specified repository information.
*
* @param userName the user name of the repository owner
* @param repositoryName the repository name
* @param baseUrl the base url of this application
* @return the repository information
*/
def getRepository(userName: String, repositoryName: String, baseUrl: String)(implicit s: Session): Option[RepositoryInfo] = {
(Repositories filter { t => t.byRepository(userName, repositoryName) } firstOption) map { repository =>
// for getting issue count and pull request count
val issues = Issues.filter { t =>
t.byRepository(repository.userName, repository.repositoryName) && (t.closed === false.bind)
}.map(_.pullRequest).list
new RepositoryInfo(
JGitUtil.getRepositoryInfo(repository.userName, repository.repositoryName, baseUrl),
repository,
issues.size,
issues.filter(_ == true).size,
getForkedCount(
repository.originUserName.getOrElse(repository.userName),
repository.originRepositoryName.getOrElse(repository.repositoryName)
),
getRepositoryManagers(repository.userName))
}
}
/**
* Returns the repositories without private repository that user does not have access right.
* Include public repository, private own repository and private but collaborator repository.
*
* @param userName the user name of collaborator
* @return the repository infomation list
*/
def getAllRepositories(userName: String)(implicit s: Session): List[(String, String)] = {
Repositories.filter { t1 =>
(t1.isPrivate === false.bind) ||
(t1.userName === userName.bind) ||
(Collaborators.filter { t2 => t2.byRepository(t1.userName, t1.repositoryName) && (t2.collaboratorName === userName.bind)} exists)
}.sortBy(_.lastActivityDate desc).map{ t =>
(t.userName, t.repositoryName)
}.list
}
def getUserRepositories(userName: String, baseUrl: String, withoutPhysicalInfo: Boolean = false)
(implicit s: Session): List[RepositoryInfo] = {
Repositories.filter { t1 =>
(t1.userName === userName.bind) ||
(Collaborators.filter { t2 => t2.byRepository(t1.userName, t1.repositoryName) && (t2.collaboratorName === userName.bind)} exists)
}.sortBy(_.lastActivityDate desc).list.map{ repository =>
new RepositoryInfo(
if(withoutPhysicalInfo){
new JGitUtil.RepositoryInfo(repository.userName, repository.repositoryName, baseUrl)
} else {
JGitUtil.getRepositoryInfo(repository.userName, repository.repositoryName, baseUrl)
},
repository,
getForkedCount(
repository.originUserName.getOrElse(repository.userName),
repository.originRepositoryName.getOrElse(repository.repositoryName)
),
getRepositoryManagers(repository.userName))
}
}
/**
* Returns the list of visible repositories for the specified user.
* If repositoryUserName is given then filters results by repository owner.
*
* @param loginAccount the logged in account
* @param baseUrl the base url of this application
* @param repositoryUserName the repository owner (if None then returns all repositories which are visible for logged in user)
* @param withoutPhysicalInfo if true then the result does not include physical repository information such as commit count,
* branches and tags
* @return the repository information which is sorted in descending order of lastActivityDate.
*/
def getVisibleRepositories(loginAccount: Option[Account], baseUrl: String, repositoryUserName: Option[String] = None,
withoutPhysicalInfo: Boolean = false)
(implicit s: Session): List[RepositoryInfo] = {
(loginAccount match {
// for Administrators
case Some(x) if(x.isAdmin) => Repositories
// for Normal Users
case Some(x) if(!x.isAdmin) =>
Repositories filter { t => (t.isPrivate === false.bind) || (t.userName === x.userName) ||
(Collaborators.filter { t2 => t2.byRepository(t.userName, t.repositoryName) && (t2.collaboratorName === x.userName.bind)} exists)
}
// for Guests
case None => Repositories filter(_.isPrivate === false.bind)
}).filter { t =>
repositoryUserName.map { userName => t.userName === userName.bind } getOrElse LiteralColumn(true)
}.sortBy(_.lastActivityDate desc).list.map{ repository =>
new RepositoryInfo(
if(withoutPhysicalInfo){
new JGitUtil.RepositoryInfo(repository.userName, repository.repositoryName, baseUrl)
} else {
JGitUtil.getRepositoryInfo(repository.userName, repository.repositoryName, baseUrl)
},
repository,
getForkedCount(
repository.originUserName.getOrElse(repository.userName),
repository.originRepositoryName.getOrElse(repository.repositoryName)
),
getRepositoryManagers(repository.userName))
}
}
private def getRepositoryManagers(userName: String)(implicit s: Session): Seq[String] =
if(getAccountByUserName(userName).exists(_.isGroupAccount)){
getGroupMembers(userName).collect { case x if(x.isManager) => x.userName }
} else {
Seq(userName)
}
/**
* Updates the last activity date of the repository.
*/
def updateLastActivityDate(userName: String, repositoryName: String)(implicit s: Session): Unit =
Repositories.filter(_.byRepository(userName, repositoryName)).map(_.lastActivityDate).update(currentDate)
/**
* Save repository options.
*/
def saveRepositoryOptions(userName: String, repositoryName: String,
description: Option[String], defaultBranch: String, isPrivate: Boolean)(implicit s: Session): Unit =
Repositories.filter(_.byRepository(userName, repositoryName))
.map { r => (r.description.?, r.defaultBranch, r.isPrivate, r.updatedDate) }
.update (description, defaultBranch, isPrivate, currentDate)
/**
* Add collaborator to the repository.
*
* @param userName the user name of the repository owner
* @param repositoryName the repository name
* @param collaboratorName the collaborator name
*/
def addCollaborator(userName: String, repositoryName: String, collaboratorName: String)(implicit s: Session): Unit =
Collaborators insert Collaborator(userName, repositoryName, collaboratorName)
/**
* Remove collaborator from the repository.
*
* @param userName the user name of the repository owner
* @param repositoryName the repository name
* @param collaboratorName the collaborator name
*/
def removeCollaborator(userName: String, repositoryName: String, collaboratorName: String)(implicit s: Session): Unit =
Collaborators.filter(_.byPrimaryKey(userName, repositoryName, collaboratorName)).delete
/**
* Remove all collaborators from the repository.
*
* @param userName the user name of the repository owner
* @param repositoryName the repository name
*/
def removeCollaborators(userName: String, repositoryName: String)(implicit s: Session): Unit =
Collaborators.filter(_.byRepository(userName, repositoryName)).delete
/**
* Returns the list of collaborators name which is sorted with ascending order.
*
* @param userName the user name of the repository owner
* @param repositoryName the repository name
* @return the list of collaborators name
*/
def getCollaborators(userName: String, repositoryName: String)(implicit s: Session): List[String] =
Collaborators.filter(_.byRepository(userName, repositoryName)).sortBy(_.collaboratorName).map(_.collaboratorName).list
def hasWritePermission(owner: String, repository: String, loginAccount: Option[Account])(implicit s: Session): Boolean = {
loginAccount match {
case Some(a) if(a.isAdmin) => true
case Some(a) if(a.userName == owner) => true
case Some(a) if(getCollaborators(owner, repository).contains(a.userName)) => true
case _ => false
}
}
private def getForkedCount(userName: String, repositoryName: String)(implicit s: Session): Int =
Query(Repositories.filter { t =>
(t.originUserName === userName.bind) && (t.originRepositoryName === repositoryName.bind)
}.length).first
def getForkedRepositories(userName: String, repositoryName: String)(implicit s: Session): List[(String, String)] =
Repositories.filter { t =>
(t.originUserName === userName.bind) && (t.originRepositoryName === repositoryName.bind)
}
.sortBy(_.userName asc).map(t => t.userName -> t.repositoryName).list
}
object RepositoryService {
case class RepositoryInfo(owner: String, name: String, httpUrl: String, repository: Repository,
issueCount: Int, pullCount: Int, commitCount: Int, forkedCount: Int,
branchList: Seq[String], tags: Seq[util.JGitUtil.TagInfo], managers: Seq[String]){
lazy val host = """^https?://(.+?)(:\\d+)?/""".r.findFirstMatchIn(httpUrl).get.group(1)
def sshUrl(port: Int, userName: String) = s"ssh://${userName}@${host}:${port}/${owner}/${name}.git"
/**
* Creates instance with issue count and pull request count.
*/
def this(repo: JGitUtil.RepositoryInfo, model: Repository, issueCount: Int, pullCount: Int, forkedCount: Int, managers: Seq[String]) =
this(repo.owner, repo.name, repo.url, model, issueCount, pullCount, repo.commitCount, forkedCount, repo.branchList, repo.tags, managers)
/**
* Creates instance without issue count and pull request count.
*/
def this(repo: JGitUtil.RepositoryInfo, model: Repository, forkedCount: Int, managers: Seq[String]) =
this(repo.owner, repo.name, repo.url, model, 0, 0, repo.commitCount, forkedCount, repo.branchList, repo.tags, managers)
}
case class RepositoryTreeNode(owner: String, name: String, children: List[RepositoryTreeNode])
} | tily/gitbucket2 | src/main/scala/service/RepositoryService.scala | Scala | apache-2.0 | 17,978 |
package io.github.andrebeat.kadfun
import com.github.nscala_time.time.Imports._
import java.net.InetAddress
import scala.collection.immutable.SortedSet
case class Node(nodeId: Id, address: InetAddress, port: Int) {
def distance(that: Node) = this.nodeId.distance(that.nodeId)
}
case class Contact(node: Node, timestamp: DateTime = DateTime.now)
object Contact {
implicit object LeastRecentlySeenOrdering extends Ordering[Contact] {
def compare(x: Contact, y: Contact) =
x.timestamp.compare(y.timestamp)
}
case class ClosestToNodeOrdering(node: Node) extends Ordering[Contact] {
def compare(x: Contact, y: Contact) =
implicitly[Ordering[Distance]].compare(node.distance(x.node), node.distance(y.node))
}
}
case class Bucket(capacity: Int = Bucket.CAPACITY, entries: SortedSet[Contact] = SortedSet[Contact]()) {
def size: Int = entries.size
def nodes: Set[Node] = entries.map(_.node)
def +(node: Node): Bucket =
entries.find(_.node == node) match {
case Some(old) => copy(entries = (entries - old + Contact(node)))
case _ if size < capacity => copy(entries = (entries + Contact(node)))
case _ => this
// TODO: ping least-recently seen to decide what to do.
}
def -(node: Node): Bucket =
entries.find(_.node == node) match {
case Some(c) => copy(entries = (entries - c))
case _ => this
}
}
object Bucket {
val CAPACITY = 20
}
case class RoutingTable(self: Id, buckets: Vector[Bucket] = Vector(Bucket())) {
import Bits._
private[this] def bucket(id: Id): Bucket = {
val b = self.distance(id).bytes
def loop(bit: Int = 0): Int =
if (bit == Id.SIZE_BITS)
throw new IllegalArgumentException("A node must never put its own nodeId into a bucket as a contact")
else if (b.testBit(bit)) Id.SIZE_BITS - bit - 1
else loop(bit + 1)
buckets(loop())
}
}
| andrebeat/kadfun | src/main/scala/io/github/andrebeat/kadfun/routing.scala | Scala | mit | 1,889 |
package sttp.client3.examples
object PostFormSynchronous extends App {
import sttp.client3._
val signup = Some("yes")
val request = basicRequest
// send the body as form data (x-www-form-urlencoded)
.body(Map("name" -> "John", "surname" -> "doe"))
// use an optional parameter in the URI
.post(uri"https://httpbin.org/post?signup=$signup")
val backend = HttpURLConnectionBackend()
val response = request.send(backend)
println(response.body)
println(response.headers)
}
| softwaremill/sttp | examples/src/main/scala/sttp/client3/examples/PostFormSynchronous.scala | Scala | apache-2.0 | 504 |
/* __ *\
** ________ ___ / / ___ Scala API **
** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
package scala
package runtime
final class RichByte(val self: Byte) extends AnyVal with ScalaWholeNumberProxy[Byte] {
protected def num = scala.math.Numeric.ByteIsIntegral
protected def ord = scala.math.Ordering.Byte
override def doubleValue() = self.toDouble
override def floatValue() = self.toFloat
override def longValue() = self.toLong
override def intValue() = self.toInt
override def byteValue() = self
override def shortValue() = self.toShort
override def isValidByte = true
override def abs: Byte = math.abs(self).toByte
override def max(that: Byte): Byte = math.max(self, that).toByte
override def min(that: Byte): Byte = math.min(self, that).toByte
override def signum: Int = math.signum(self.toInt)
}
| felixmulder/scala | src/library/scala/runtime/RichByte.scala | Scala | bsd-3-clause | 1,301 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.columnar.compression
import java.nio.{ByteBuffer, ByteOrder}
import org.apache.spark.internal.Logging
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.execution.columnar.{ColumnBuilder, NativeColumnBuilder}
import org.apache.spark.sql.types.AtomicType
import org.apache.spark.unsafe.Platform
/**
* A stackable trait that builds optionally compressed byte buffer for a column. Memory layout of
* the final byte buffer is:
* {{{
* .----------------------- Null count N (4 bytes)
* | .------------------- Null positions (4 x N bytes, empty if null count is zero)
* | | .------------- Compression scheme ID (4 bytes)
* | | | .--------- Compressed non-null elements
* V V V V
* +---+-----+---+---------+
* | | ... | | ... ... |
* +---+-----+---+---------+
* \\-------/ \\-----------/
* header body
* }}}
*/
private[columnar] trait CompressibleColumnBuilder[T <: AtomicType]
extends ColumnBuilder with Logging {
this: NativeColumnBuilder[T] with WithCompressionSchemes =>
var compressionEncoders: Seq[Encoder[T]] = _
abstract override def initialize(
initialSize: Int,
columnName: String,
useCompression: Boolean): Unit = {
compressionEncoders =
if (useCompression) {
schemes.filter(_.supports(columnType)).map(_.encoder[T](columnType))
} else {
Seq(PassThrough.encoder(columnType))
}
super.initialize(initialSize, columnName, useCompression)
}
// The various compression schemes, while saving memory use, cause all of the data within
// the row to become unaligned, thus causing crashes. Until a way of fixing the compression
// is found to also allow aligned accesses this must be disabled for SPARC.
protected def isWorthCompressing(encoder: Encoder[T]) = {
CompressibleColumnBuilder.unaligned && encoder.compressionRatio < 0.8
}
private def gatherCompressibilityStats(row: InternalRow, ordinal: Int): Unit = {
compressionEncoders.foreach(_.gatherCompressibilityStats(row, ordinal))
}
abstract override def appendFrom(row: InternalRow, ordinal: Int): Unit = {
super.appendFrom(row, ordinal)
if (!row.isNullAt(ordinal)) {
gatherCompressibilityStats(row, ordinal)
}
}
override def build(): ByteBuffer = {
val nonNullBuffer = buildNonNulls()
val encoder: Encoder[T] = {
val candidate = compressionEncoders.minBy(_.compressionRatio)
if (isWorthCompressing(candidate)) candidate else PassThrough.encoder(columnType)
}
// Header = null count + null positions
val headerSize = 4 + nulls.limit()
val compressedSize = if (encoder.compressedSize == 0) {
nonNullBuffer.remaining()
} else {
encoder.compressedSize
}
val compressedBuffer = ByteBuffer
// Reserves 4 bytes for compression scheme ID
.allocate(headerSize + 4 + compressedSize)
.order(ByteOrder.nativeOrder)
// Write the header
.putInt(nullCount)
.put(nulls)
logDebug(s"Compressor for [$columnName]: $encoder, ratio: ${encoder.compressionRatio}")
encoder.compress(nonNullBuffer, compressedBuffer)
}
}
private[columnar] object CompressibleColumnBuilder {
val unaligned = Platform.unaligned()
}
| bravo-zhang/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/compression/CompressibleColumnBuilder.scala | Scala | apache-2.0 | 4,127 |
package im.actor
import sbt._
object Dependencies {
object V {
val actorCommons = "0.0.20"
val actorBotkit = "1.0.113"
val akka = "2.4.10"
val akkaHttpJson = "1.10.0"
val cats = "0.7.2"
val circe = "0.5.1"
val kamon = "0.5.2"
val slick = "3.1.1"
val slickPg = "0.14.3"
val scalatest = "2.2.4"
val shardakka = "0.1.24"
val scalapbSer = "0.1.14"
}
object Compile {
val actorConcurrent = "im.actor" %% "actor-concurrent" % V.actorCommons
val actorUtil = "im.actor" %% "actor-util" % V.actorCommons
val actorCatsSlick = "im.actor" %% "actor-cats-slick" % V.actorCommons
val actorStorageSlick = "im.actor" %% "actor-storage-slick" % V.actorCommons
val actorBotkit = "im.actor" % "actor-botkit" % V.actorBotkit
val shardakka = "im.actor" %% "shardakka" % V.shardakka
val scalapbSer = "im.actor" %% "akka-scalapb-serialization" % V.scalapbSer
val akkaActor = "com.typesafe.akka" %% "akka-actor" % V.akka
val akkaPersistence = "com.typesafe.akka" %% "akka-persistence" % V.akka
val akkaDdata = "com.typesafe.akka" %% "akka-distributed-data-experimental" % V.akka
val akkaClusterTools = "com.typesafe.akka" %% "akka-cluster-tools" % V.akka
val akkaClusterSharding = "com.typesafe.akka" %% "akka-cluster-sharding" % V.akka
val akkaStream = "com.typesafe.akka" %% "akka-stream" % V.akka
val akkaHttp = "com.typesafe.akka" %% "akka-http-experimental" % V.akka
val akkaHttpPlayJson = "de.heikoseeberger" %% "akka-http-play-json" % V.akkaHttpJson
val akkaHttpCirce = "de.heikoseeberger" %% "akka-http-circe" % V.akkaHttpJson
val akkaSlf4j = "com.typesafe.akka" %% "akka-slf4j" % V.akka
val sprayClient = "io.spray" %% "spray-client" % "1.3.3"
val sprayWebsocket = "com.wandoulabs.akka" %% "spray-websocket" % "0.1.4"
val akkaPersistenceJdbc = "com.github.dnvriend" %% "akka-persistence-jdbc" % "2.3.3"
val apacheEmail = "org.apache.commons" % "commons-email" % "1.4"
val betterFiles = "com.github.pathikrit" %% "better-files" % "2.13.0"
val caffeine = "com.github.ben-manes.caffeine" % "caffeine" % "2.2.7"
val cats = "org.typelevel" %% "cats" % V.cats
val circeCore = "io.circe" %% "circe-core" % V.circe
val circeGeneric = "io.circe" %% "circe-generic" % V.circe
val circeParse = "io.circe" %% "circe-parser" % V.circe
val configs = "com.github.kxbmap" %% "configs" % "0.3.0"
val dispatch = "net.databinder.dispatch" %% "dispatch-core" % "0.11.3"
val javaCompat = "org.scala-lang.modules" %% "scala-java8-compat" % "0.7.0"
val playJson = "com.typesafe.play" %% "play-json" % "2.5.6"
val upickle = "com.lihaoyi" %% "upickle" % "0.3.6"
val postgresJdbc = "org.postgresql" % "postgresql" % "9.4.1208" exclude("org.slf4j", "slf4j-simple")
val slick = "com.typesafe.slick" %% "slick" % "3.1.1.2" //V.slick FIXME: remove after slick/slick#1274 released
val slickHikaricp = "com.typesafe.slick" %% "slick-hikaricp" % "3.1.1.2" exclude("com.zaxxer", "HikariCP-java6") //V.slick FIXME: remove after slick/slick#1274 released
val slickJoda = "com.github.tototoshi" %% "slick-joda-mapper" % "2.0.0"
val slickPg = "com.github.tminglei" %% "slick-pg" % V.slickPg
val slickPgDate2 = "com.github.tminglei" %% "slick-pg_date2" % V.slickPg
val slickTestkit = "com.typesafe.slick" %% "slick-testkit" % V.slick
val flywayCore = "org.flywaydb" % "flyway-core" % "3.1"
val hikariCP = "com.zaxxer" % "HikariCP" % "2.4.6"
val amazonaws = "com.amazonaws" % "aws-java-sdk-s3" % "1.11.32"
val awsWrap = "com.github.dwhjames" %% "aws-wrap" % "0.8.0"
val bcprov = "org.bouncycastle" % "bcprov-jdk15on" % "1.50"
val kamonCore = "io.kamon" %% "kamon-core" % V.kamon
val kamonDatadog = "io.kamon" %% "kamon-datadog" % V.kamon
val libPhoneNumber = "com.googlecode.libphonenumber" % "libphonenumber" % "7.0.+"
val icu4j = "com.ibm.icu" % "icu4j" % "56.1"
val scodecBits = "org.scodec" %% "scodec-bits" % "1.0.9"
val scodecCore = "org.scodec" %% "scodec-core" % "1.8.1"
val scopt = "com.github.scopt" %% "scopt" % "3.3.0"
val shapeless = "com.chuusai" %% "shapeless" % "2.2.4"
val scrImageCore = "com.sksamuel.scrimage" %% "scrimage-core" % "2.1.0"
val tyrex = "tyrex" % "tyrex" % "1.0.1"
val pushy = "com.relayrides" % "pushy" % "0.6.1"
val logbackClassic = "ch.qos.logback" % "logback-classic" % "1.1.2"
val scalaLogging = "com.typesafe.scala-logging" %% "scala-logging" % "3.1.0"
val jodaTime = "joda-time" % "joda-time" % "2.7"
val jodaConvert = "org.joda" % "joda-convert" % "1.7"
val apacheCommonsCodec = "commons-codec" % "commons-codec" % "1.10"
val apacheCommonsIo = "commons-io" % "commons-io" % "2.4"
val apacheCommonsValidator = "commons-validator" % "commons-validator" % "1.4.1"
val guava = "com.google.guava" % "guava" % "19.0"
val alpn = "org.eclipse.jetty.alpn" % "alpn-api" % "1.1.2.v20150522" % "runtime"
val tcnative = "io.netty" % "netty-tcnative" % "1.1.33.Fork15" classifier "linux-x86_64-fedora"
val tcnativeboringssl = "io.netty" % "netty-tcnative-boringssl-static" % "1.1.33.Fork15"
val silencer = "com.github.ghik" % "silencer-lib" % "0.4"
}
object Testing {
val akkaTestkit = "com.typesafe.akka" %% "akka-testkit" % V.akka
val akkaMultiNodeTestkit = "com.typesafe.akka" %% "akka-multi-node-testkit" % V.akka
val scalacheck = "org.scalacheck" %% "scalacheck" % "1.12.5"
val scalatest = "org.scalatest" %% "scalatest" % V.scalatest
val jfairy = "io.codearte.jfairy" % "jfairy" % "0.3.1"
}
import Compile._
import Testing._
val shared = Seq(
alpn,
tcnative,
tcnativeboringssl,
configs,
actorUtil,
javaCompat,
logbackClassic,
scalaLogging,
tyrex,
kamonCore,
kamonDatadog,
silencer
)
val root = shared ++ Seq(
akkaSlf4j, akkaActor, akkaStream
)
val activation = shared ++ Seq(akkaActor, playJson, sprayClient)
val bots = shared ++ Seq(actorBotkit, upickle, shardakka)
val botkit = Seq(actorConcurrent, akkaActor, akkaHttp, akkaSlf4j, javaCompat, sprayWebsocket, upickle)
val botShared = Seq(upickle, javaCompat)
val cli = Seq(akkaClusterTools, scopt)
val core = shared ++ Seq(
actorConcurrent,
akkaActor,
akkaClusterTools,
akkaClusterSharding,
akkaDdata,
caffeine,
pushy,
jodaTime,
postgresJdbc,
shardakka,
scrImageCore,
sprayClient
)
val enrich = shared ++ Seq(akkaActor, akkaHttp)
val rpcApi = shared ++ Seq(
akkaSlf4j, akkaActor, bcprov, apacheCommonsIo, apacheCommonsValidator, shapeless, akkaHttpPlayJson
)
val httpApi = shared ++ Seq(akkaActor, akkaHttp, akkaHttpPlayJson, akkaHttpCirce, circeCore, circeGeneric, circeParse, jodaTime, playJson)
val email = shared ++ Seq(akkaActor, apacheEmail)
val oauth = shared ++ Seq(akkaActor, akkaHttp, playJson)
val session = shared ++ Seq(
akkaSlf4j, akkaActor, akkaStream, scodecCore
)
val sessionMessages = Seq(akkaActor)
val persist = shared ++ Seq(akkaActor, akkaStream, actorCatsSlick, actorStorageSlick, apacheCommonsCodec, guava, postgresJdbc, slick, slickHikaricp, slickJoda, slickPg, slickPgDate2, slickTestkit, flywayCore, hikariCP, jodaTime, jodaConvert)
val presences = shared :+ akkaClusterSharding
val sms = shared ++ Seq(akkaActor, akkaHttp, dispatch)
val codecs = shared ++ Seq(scodecBits, scodecCore)
val models = shared ++ Seq(scodecBits, scodecCore, jodaTime, jodaConvert, slickPg)
val fileAdapter = shared ++ Seq(amazonaws, apacheCommonsCodec, apacheCommonsIo, awsWrap, betterFiles)
val frontend = shared ++ Seq(
akkaSlf4j, akkaActor, akkaStream,
guava,
scodecBits, scodecCore
)
val sdk = Seq.empty
val runtime = shared ++ Seq(akkaActor, actorConcurrent, akkaHttp, akkaSlf4j, akkaStream, akkaPersistenceJdbc, apacheCommonsCodec, caffeine, cats, jodaConvert, jodaTime, icu4j, libPhoneNumber, scalapbSer, akkaTestkit % "test", scalatest % "test")
val tests = shared ++ Seq(akkaClusterSharding, amazonaws, jfairy, scalacheck, scalatest, slickTestkit, akkaTestkit, akkaMultiNodeTestkit)
}
| EaglesoftZJ/actor-platform | actor-server/project/Dependencies.scala | Scala | agpl-3.0 | 11,373 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.rules.physical.stream
import org.apache.flink.table.planner.calcite.{FlinkContext, FlinkTypeFactory}
import org.apache.flink.table.planner.plan.`trait`.FlinkRelDistribution
import org.apache.flink.table.planner.plan.nodes.FlinkConventions
import org.apache.flink.table.planner.plan.nodes.logical.FlinkLogicalJoin
import org.apache.flink.table.planner.plan.nodes.physical.stream.StreamExecWindowJoin
import org.apache.flink.table.planner.plan.utils.{FlinkRelOptUtil, WindowJoinUtil}
import org.apache.calcite.plan.{RelOptRule, RelOptRuleCall, RelTraitSet}
import org.apache.calcite.rel.RelNode
import org.apache.calcite.rel.convert.ConverterRule
import java.util
import scala.collection.JavaConversions._
/**
* Rule that converts non-SEMI/ANTI [[FlinkLogicalJoin]] with window bounds in join condition
* to [[StreamExecWindowJoin]].
*/
class StreamExecWindowJoinRule
extends ConverterRule(
classOf[FlinkLogicalJoin],
FlinkConventions.LOGICAL,
FlinkConventions.STREAM_PHYSICAL,
"StreamExecWindowJoinRule") {
override def matches(call: RelOptRuleCall): Boolean = {
val join: FlinkLogicalJoin = call.rel(0)
val joinRowType = join.getRowType
val joinInfo = join.analyzeCondition()
// joins require an equi-condition or a conjunctive predicate with at least one equi-condition
// TODO support SEMI/ANTI join
if (!join.getJoinType.projectsRight || joinInfo.pairs().isEmpty) {
return false
}
val tableConfig = FlinkRelOptUtil.getTableConfigFromContext(join)
val (windowBounds, _) = WindowJoinUtil.extractWindowBoundsFromPredicate(
join.getCondition,
join.getLeft.getRowType.getFieldCount,
joinRowType,
join.getCluster.getRexBuilder,
tableConfig)
if (windowBounds.isDefined) {
if (windowBounds.get.isEventTime) {
true
} else {
// Check that no event-time attributes are in the input because the processing time window
// join does not correctly hold back watermarks.
// We rely on projection pushdown to remove unused attributes before the join.
!joinRowType.getFieldList.exists(f => FlinkTypeFactory.isRowtimeIndicatorType(f.getType))
}
} else {
// the given join does not have valid window bounds. We cannot translate it.
false
}
}
override def convert(rel: RelNode): RelNode = {
val join: FlinkLogicalJoin = rel.asInstanceOf[FlinkLogicalJoin]
val joinRowType = join.getRowType
val left = join.getLeft
val right = join.getRight
def toHashTraitByColumns(
columns: util.Collection[_ <: Number],
inputTraitSet: RelTraitSet): RelTraitSet = {
val distribution = if (columns.size() == 0) {
FlinkRelDistribution.SINGLETON
} else {
FlinkRelDistribution.hash(columns)
}
inputTraitSet
.replace(FlinkConventions.STREAM_PHYSICAL)
.replace(distribution)
}
val joinInfo = join.analyzeCondition
val (leftRequiredTrait, rightRequiredTrait) = (
toHashTraitByColumns(joinInfo.leftKeys, left.getTraitSet),
toHashTraitByColumns(joinInfo.rightKeys, right.getTraitSet))
val newLeft = RelOptRule.convert(left, leftRequiredTrait)
val newRight = RelOptRule.convert(right, rightRequiredTrait)
val providedTraitSet = join.getTraitSet.replace(FlinkConventions.STREAM_PHYSICAL)
val tableConfig = rel.getCluster.getPlanner.getContext.asInstanceOf[FlinkContext].getTableConfig
val (windowBounds, remainCondition) = WindowJoinUtil.extractWindowBoundsFromPredicate(
join.getCondition,
left.getRowType.getFieldCount,
joinRowType,
join.getCluster.getRexBuilder,
tableConfig)
new StreamExecWindowJoin(
rel.getCluster,
providedTraitSet,
newLeft,
newRight,
join.getCondition,
join.getJoinType,
joinRowType,
windowBounds.get.isEventTime,
windowBounds.get.leftLowerBound,
windowBounds.get.leftUpperBound,
windowBounds.get.leftTimeIdx,
windowBounds.get.rightTimeIdx,
remainCondition)
}
}
object StreamExecWindowJoinRule {
val INSTANCE: RelOptRule = new StreamExecWindowJoinRule
}
| fhueske/flink | flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/plan/rules/physical/stream/StreamExecWindowJoinRule.scala | Scala | apache-2.0 | 5,035 |
package com.eharmony.aloha.models
import com.eharmony.aloha.util.Logging
import scala.language.existentials
import java.{lang => jl}
import org.junit.Assert._
import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.BlockJUnit4ClassRunner
import com.eharmony.aloha.models.TypeCoercionTest.{matrixAndTypes, Precision}
import com.eharmony.aloha.reflect.{RefInfo, RefInfoOps}
/**
* Test that the proper coercionsexist and that they work. There are a ton of coercions.
* At the time of this writing: 557 (3 * 219) coercions. Here we tests all of the coercions
* from types not lifted to option. We also test the that option-based coercions exist. We
* don't check the correctness of the option coercions because they are just a mapping of the
* non-lifted coercions.
*
* Notice a lot of the string tests are unstable and throw. This may mean that we should remove
* from-string based coercions.
*
* @author R M Deak
*/
@RunWith(classOf[BlockJUnit4ClassRunner])
class TypeCoercionTest extends Logging {
val (matrix, types) = matrixAndTypes()
@Test def test_Bo_Bo_0(): Unit = assertEquals(true, TypeCoercion[Boolean, Boolean].get.apply(true))
@Test def test_Bo_Bo_1(): Unit = assertEquals(false, TypeCoercion[Boolean, Boolean].get.apply(false))
@Test def test_Bo_JBo_0(): Unit = assertEquals(true, TypeCoercion[Boolean, jl.Boolean].get.apply(true))
@Test def test_Bo_JBo_1(): Unit = assertEquals(false, TypeCoercion[Boolean, jl.Boolean].get.apply(false))
@Test def test_Bo_St_0(): Unit = assertEquals("true", TypeCoercion[Boolean, String].get.apply(true))
@Test def test_Bo_St_1(): Unit = assertEquals("false", TypeCoercion[Boolean, String].get.apply(false))
@Test def test_By_By_0(): Unit = assertEquals(2.toByte, TypeCoercion[Byte, Byte].get.apply(2.toByte))
@Test def test_By_By_1(): Unit = assertEquals((-3).toByte, TypeCoercion[Byte, Byte].get.apply((-3).toByte))
@Test def test_By_C_0(): Unit = assertEquals(2.toByte, TypeCoercion[Byte, Char].get.apply(2.toByte))
@Test def test_By_C_1(): Unit = assertEquals((-3).toChar, TypeCoercion[Byte, Char].get.apply((-3).toByte))
@Test def test_By_D_0(): Unit = assertEquals(2d, TypeCoercion[Byte, Double].get.apply(2.toByte), 0)
@Test def test_By_D_1(): Unit = assertEquals(-3d, TypeCoercion[Byte, Double].get.apply((-3).toByte), 0)
@Test def test_By_F_0(): Unit = assertEquals(2f, TypeCoercion[Byte, Float].get.apply(2.toByte), 0)
@Test def test_By_F_1(): Unit = assertEquals(-3f, TypeCoercion[Byte, Float].get.apply((-3).toByte), 0)
@Test def test_By_I_0(): Unit = assertEquals(2.toByte, TypeCoercion[Byte, Int].get.apply(2.toByte))
@Test def test_By_I_1(): Unit = assertEquals((-3).toByte, TypeCoercion[Byte, Int].get.apply((-3).toByte))
@Test def test_By_JBy_0(): Unit = assertEquals(2.toByte, TypeCoercion[Byte, jl.Byte].get.apply(2.toByte))
@Test def test_By_JBy_1(): Unit = assertEquals((-3).toByte, TypeCoercion[Byte, jl.Byte].get.apply((-3).toByte))
@Test def test_By_JC_0(): Unit = assertEquals(jChar(2), TypeCoercion[Byte, jl.Character].get.apply(2.toByte))
@Test def test_By_JC_1(): Unit = assertEquals(jChar(65533.toChar), TypeCoercion[Byte, jl.Character].get.apply((-3).toByte)) //underflow
@Test def test_By_JD_0(): Unit = assertEquals(2d, TypeCoercion[Byte, jl.Double].get.apply(2.toByte), 0)
@Test def test_By_JD_1(): Unit = assertEquals(-3d, TypeCoercion[Byte, jl.Double].get.apply((-3).toByte), 0)
@Test def test_By_JF_0(): Unit = assertEquals(2f, TypeCoercion[Byte, jl.Float].get.apply(2.toByte).floatValue(), 0)
@Test def test_By_JF_1(): Unit = assertEquals(-3f, TypeCoercion[Byte, jl.Float].get.apply((-3).toByte).floatValue(), 0)
@Test def test_By_JI_0(): Unit = assertEquals(jInt(2), TypeCoercion[Byte, jl.Integer].get.apply(2.toByte))
@Test def test_By_JI_1(): Unit = assertEquals(jInt(-3), TypeCoercion[Byte, jl.Integer].get.apply((-3).toByte))
@Test def test_By_JL_0(): Unit = assertEquals(jLong(2), TypeCoercion[Byte, jl.Long].get.apply(2.toByte))
@Test def test_By_JL_1(): Unit = assertEquals(jLong(-3), TypeCoercion[Byte, jl.Long].get.apply((-3).toByte))
@Test def test_By_JSh_0(): Unit = assertEquals(jShort(2.toShort), TypeCoercion[Byte, jl.Short].get.apply(2.toByte))
@Test def test_By_JSh_1(): Unit = assertEquals(jShort(-3.toShort), TypeCoercion[Byte, jl.Short].get.apply((-3).toByte))
@Test def test_By_L_0(): Unit = assertEquals(2.toByte, TypeCoercion[Byte, Long].get.apply(2.toByte))
@Test def test_By_L_1(): Unit = assertEquals((-3).toByte, TypeCoercion[Byte, Long].get.apply((-3).toByte))
@Test def test_By_Sh_0(): Unit = assertEquals(2.toByte, TypeCoercion[Byte, Short].get.apply(2.toByte))
@Test def test_By_Sh_1(): Unit = assertEquals((-3).toByte, TypeCoercion[Byte, Short].get.apply((-3).toByte))
@Test def test_By_St_0(): Unit = assertEquals("2", TypeCoercion[Byte, String].get.apply(2.toByte))
@Test def test_By_St_1(): Unit = assertEquals("-3", TypeCoercion[Byte, String].get.apply((-3).toByte))
@Test def test_C_By_0(): Unit = assertEquals('2'.toByte, TypeCoercion[Char, Byte].get.apply('2'))
@Test def test_C_By_1(): Unit = assertEquals('3'.toByte, TypeCoercion[Char, Byte].get.apply('3'))
@Test def test_C_C_0(): Unit = assertEquals('2', TypeCoercion[Char, Char].get.apply('2'))
@Test def test_C_C_1(): Unit = assertEquals('3', TypeCoercion[Char, Char].get.apply('3'))
@Test def test_C_D_0(): Unit = assertEquals('2'.toDouble, TypeCoercion[Char, Double].get.apply('2'), 0)
@Test def test_C_D_1(): Unit = assertEquals('3'.toDouble, TypeCoercion[Char, Double].get.apply('3'), 0)
@Test def test_C_F_0(): Unit = assertEquals('2'.toFloat, TypeCoercion[Char, Float].get.apply('2'), 0)
@Test def test_C_F_1(): Unit = assertEquals('3'.toFloat, TypeCoercion[Char, Float].get.apply('3'), 0)
@Test def test_C_I_0(): Unit = assertEquals('2'.toInt, TypeCoercion[Char, Int].get.apply('2'))
@Test def test_C_I_1(): Unit = assertEquals('3'.toInt, TypeCoercion[Char, Int].get.apply('3'))
@Test def test_C_JBy_0(): Unit = assertEquals(jByte('2'.toByte), TypeCoercion[Char, jl.Byte].get.apply('2'))
@Test def test_C_JBy_1(): Unit = assertEquals(jByte('3'.toByte), TypeCoercion[Char, jl.Byte].get.apply('3'))
@Test def test_C_JC_0(): Unit = assertEquals(jChar('2'), TypeCoercion[Char, jl.Character].get.apply('2'))
@Test def test_C_JC_1(): Unit = assertEquals(jChar('3'), TypeCoercion[Char, jl.Character].get.apply('3'))
@Test def test_C_JD_0(): Unit = assertEquals(jDouble('2'.toDouble).doubleValue, TypeCoercion[Char, jl.Double].get.apply('2').doubleValue, 0)
@Test def test_C_JD_1(): Unit = assertEquals(jDouble('3'.toDouble).doubleValue, TypeCoercion[Char, jl.Double].get.apply('3').doubleValue, 0)
@Test def test_C_JF_0(): Unit = assertEquals(jFloat('2'.toFloat).floatValue, TypeCoercion[Char, jl.Float].get.apply('2').floatValue, 0)
@Test def test_C_JF_1(): Unit = assertEquals(jFloat('3'.toFloat).floatValue, TypeCoercion[Char, jl.Float].get.apply('3').floatValue, 0)
@Test def test_C_JI_0(): Unit = assertEquals(jInt('2'.toInt), TypeCoercion[Char, jl.Integer].get.apply('2'))
@Test def test_C_JI_1(): Unit = assertEquals(jInt('3'.toInt), TypeCoercion[Char, jl.Integer].get.apply('3'))
@Test def test_C_JL_0(): Unit = assertEquals(jLong('2'.toLong), TypeCoercion[Char, jl.Long].get.apply('2'))
@Test def test_C_JL_1(): Unit = assertEquals(jLong('3'.toLong), TypeCoercion[Char, jl.Long].get.apply('3'))
@Test def test_C_JSh_0(): Unit = assertEquals(jShort('2'.toShort), TypeCoercion[Char, jl.Short].get.apply('2'))
@Test def test_C_JSh_1(): Unit = assertEquals(jShort('3'.toShort), TypeCoercion[Char, jl.Short].get.apply('3'))
@Test def test_C_L_0(): Unit = assertEquals('2'.toLong, TypeCoercion[Char, Long].get.apply('2'))
@Test def test_C_L_1(): Unit = assertEquals('3'.toLong, TypeCoercion[Char, Long].get.apply('3'))
@Test def test_C_Sh_0(): Unit = assertEquals('2'.toShort, TypeCoercion[Char, Short].get.apply('2'))
@Test def test_C_Sh_1(): Unit = assertEquals('3'.toShort, TypeCoercion[Char, Short].get.apply('3'))
@Test def test_C_St_0(): Unit = assertEquals("2", TypeCoercion[Char, String].get.apply('2'))
@Test def test_C_St_1(): Unit = assertEquals("3", TypeCoercion[Char, String].get.apply('3'))
@Test def test_D_By_0(): Unit = assertEquals(7.toByte, TypeCoercion[Double, Byte].get.apply(7.5))
@Test def test_D_By_1(): Unit = assertEquals(-8.toByte, TypeCoercion[Double, Byte].get.apply(-8.5))
@Test def test_D_C_0(): Unit = assertEquals(7.toChar, TypeCoercion[Double, Char].get.apply(7.5))
@Test def test_D_C_1(): Unit = assertEquals(-8.toChar, TypeCoercion[Double, Char].get.apply(-8.5))
@Test def test_D_D_0(): Unit = assertEquals(7.5, TypeCoercion[Double, Double].get.apply(7.5), 0)
@Test def test_D_D_1(): Unit = assertEquals(-8.5, TypeCoercion[Double, Double].get.apply(-8.5), 0)
@Test def test_D_F_0(): Unit = assertEquals(7.5f, TypeCoercion[Double, Float].get.apply(7.5), 0)
@Test def test_D_F_1(): Unit = assertEquals(-8.5f, TypeCoercion[Double, Float].get.apply(-8.5), 0)
@Test def test_D_I_0(): Unit = assertEquals(7, TypeCoercion[Double, Int].get.apply(7.5))
@Test def test_D_I_1(): Unit = assertEquals(-8, TypeCoercion[Double, Int].get.apply(-8.5))
@Test def test_D_JBy_0(): Unit = assertEquals(jByte(7.toByte), TypeCoercion[Double, jl.Byte].get.apply(7.5))
@Test def test_D_JBy_1(): Unit = assertEquals(jByte(-8.toByte), TypeCoercion[Double, jl.Byte].get.apply(-8.5))
@Test def test_D_JC_0(): Unit = assertEquals(jChar(7.toChar), TypeCoercion[Double, jl.Character].get.apply(7.5))
@Test def test_D_JC_1(): Unit = assertEquals(jChar(-8.toChar), TypeCoercion[Double, jl.Character].get.apply(-8.5))
@Test def test_D_JD_0(): Unit = assertEquals(jDouble(7.5).doubleValue, TypeCoercion[Double, jl.Double].get.apply(7.5).doubleValue, 0)
@Test def test_D_JD_1(): Unit = assertEquals(jDouble(-8.5).doubleValue, TypeCoercion[Double, jl.Double].get.apply(-8.5).doubleValue, 0)
@Test def test_D_JF_0(): Unit = assertEquals(jFloat(7.5f).floatValue, TypeCoercion[Double, jl.Float].get.apply(7.5).floatValue, 0)
@Test def test_D_JF_1(): Unit = assertEquals(jFloat(-8.5f).floatValue, TypeCoercion[Double, jl.Float].get.apply(-8.5).floatValue, 0)
@Test def test_D_JI_0(): Unit = assertEquals(jInt(7), TypeCoercion[Double, jl.Integer].get.apply(7.5))
@Test def test_D_JI_1(): Unit = assertEquals(jInt(-8), TypeCoercion[Double, jl.Integer].get.apply(-8.5))
@Test def test_D_JL_0(): Unit = assertEquals(jLong(7L), TypeCoercion[Double, jl.Long].get.apply(7.5))
@Test def test_D_JL_1(): Unit = assertEquals(jLong(-8L), TypeCoercion[Double, jl.Long].get.apply(-8.5))
@Test def test_D_JSh_0(): Unit = assertEquals(jShort(7.toShort), TypeCoercion[Double, jl.Short].get.apply(7.5))
@Test def test_D_JSh_1(): Unit = assertEquals(jShort(-8.toShort), TypeCoercion[Double, jl.Short].get.apply(-8.5))
@Test def test_D_L_0(): Unit = assertEquals(7L, TypeCoercion[Double, Long].get.apply(7.5))
@Test def test_D_L_1(): Unit = assertEquals(-8L, TypeCoercion[Double, Long].get.apply(-8.5))
@Test def test_D_Sh_0(): Unit = assertEquals(7.toShort, TypeCoercion[Double, Short].get.apply(7.5))
@Test def test_D_Sh_1(): Unit = assertEquals(-8.toShort, TypeCoercion[Double, Short].get.apply(-8.5))
@Test def test_D_St_0(): Unit = assertEquals("7.5", TypeCoercion[Double, String].get.apply(7.5))
@Test def test_D_St_1(): Unit = assertEquals("-8.5", TypeCoercion[Double, String].get.apply(-8.5))
@Test def test_F_By_0(): Unit = assertEquals(6.toByte, TypeCoercion[Float, Byte].get.apply(6.499f))
@Test def test_F_By_1(): Unit = assertEquals(-7.toByte, TypeCoercion[Float, Byte].get.apply(-7.499f))
@Test def test_F_C_0(): Unit = assertEquals(6.toChar, TypeCoercion[Float, Char].get.apply(6.499f))
@Test def test_F_C_1(): Unit = assertEquals(-7.toChar, TypeCoercion[Float, Char].get.apply(-7.499f))
@Test def test_F_D_0(): Unit = assertEquals(6.499, TypeCoercion[Float, Double].get.apply(6.499f), Precision)
@Test def test_F_D_1(): Unit = assertEquals(-7.499, TypeCoercion[Float, Double].get.apply(-7.499f), Precision)
@Test def test_F_F_0(): Unit = assertEquals(6.499f, TypeCoercion[Float, Float].get.apply(6.499f), 0)
@Test def test_F_F_1(): Unit = assertEquals(-7.499f, TypeCoercion[Float, Float].get.apply(-7.499f), 0)
@Test def test_F_I_0(): Unit = assertEquals(6, TypeCoercion[Float, Int].get.apply(6.499f))
@Test def test_F_I_1(): Unit = assertEquals(-7, TypeCoercion[Float, Int].get.apply(-7.499f))
@Test def test_F_JBy_0(): Unit = assertEquals(jByte(6.toByte), TypeCoercion[Float, jl.Byte].get.apply(6.499f))
@Test def test_F_JBy_1(): Unit = assertEquals(jByte(-7.toByte), TypeCoercion[Float, jl.Byte].get.apply(-7.499f))
@Test def test_F_JC_0(): Unit = assertEquals(jChar(6.toChar), TypeCoercion[Float, jl.Character].get.apply(6.499f))
@Test def test_F_JC_1(): Unit = assertEquals(jChar(65529.toChar), TypeCoercion[Float, jl.Character].get.apply(-7.499f)) // underflow
@Test def test_F_JD_0(): Unit = assertEquals(jDouble(6.499).doubleValue, TypeCoercion[Float, jl.Double].get.apply(6.499f).doubleValue, Precision)
@Test def test_F_JD_1(): Unit = assertEquals(jDouble(-7.499).doubleValue, TypeCoercion[Float, jl.Double].get.apply(-7.499f).doubleValue, Precision)
@Test def test_F_JF_0(): Unit = assertEquals(jFloat(6.499f).floatValue, TypeCoercion[Float, jl.Float].get.apply(6.499f).floatValue, 0)
@Test def test_F_JF_1(): Unit = assertEquals(jFloat(-7.499f).floatValue, TypeCoercion[Float, jl.Float].get.apply(-7.499f).floatValue, 0)
@Test def test_F_JI_0(): Unit = assertEquals(jInt(6), TypeCoercion[Float, jl.Integer].get.apply(6.499f))
@Test def test_F_JI_1(): Unit = assertEquals(jInt(-7), TypeCoercion[Float, jl.Integer].get.apply(-7.499f))
@Test def test_F_JL_0(): Unit = assertEquals(jLong(6L), TypeCoercion[Float, jl.Long].get.apply(6.499f))
@Test def test_F_JL_1(): Unit = assertEquals(jLong(-7L), TypeCoercion[Float, jl.Long].get.apply(-7.499f))
@Test def test_F_JSh_0(): Unit = assertEquals(jShort(6.toShort), TypeCoercion[Float, jl.Short].get.apply(6.499f))
@Test def test_F_JSh_1(): Unit = assertEquals(jShort(-7.toShort), TypeCoercion[Float, jl.Short].get.apply(-7.499f))
@Test def test_F_L_0(): Unit = assertEquals(6.toLong, TypeCoercion[Float, Long].get.apply(6.499f))
@Test def test_F_L_1(): Unit = assertEquals(-7.toLong, TypeCoercion[Float, Long].get.apply(-7.499f))
@Test def test_F_Sh_0(): Unit = assertEquals(6.toShort, TypeCoercion[Float, Short].get.apply(6.499f))
@Test def test_F_Sh_1(): Unit = assertEquals(-7.toShort, TypeCoercion[Float, Short].get.apply(-7.499f))
@Test def test_F_St_0(): Unit = assertEquals("6.499", TypeCoercion[Float, String].get.apply(6.499f))
@Test def test_F_St_1(): Unit = assertEquals("-7.499", TypeCoercion[Float, String].get.apply(-7.499f))
@Test def test_I_By_0(): Unit = assertEquals(4.toByte, TypeCoercion[Int, Byte].get.apply(4))
@Test def test_I_By_1(): Unit = assertEquals(-5.toByte, TypeCoercion[Int, Byte].get.apply(-5))
@Test def test_I_C_0(): Unit = assertEquals(4.toChar, TypeCoercion[Int, Char].get.apply(4))
@Test def test_I_C_1(): Unit = assertEquals(-5.toChar, TypeCoercion[Int, Char].get.apply(-5))
@Test def test_I_D_0(): Unit = assertEquals(4d, TypeCoercion[Int, Double].get.apply(4), 0)
@Test def test_I_D_1(): Unit = assertEquals(-5d, TypeCoercion[Int, Double].get.apply(-5), 0)
@Test def test_I_F_0(): Unit = assertEquals(4f, TypeCoercion[Int, Float].get.apply(4), 0)
@Test def test_I_F_1(): Unit = assertEquals(-5f, TypeCoercion[Int, Float].get.apply(-5), 0)
@Test def test_I_I_0(): Unit = assertEquals(4, TypeCoercion[Int, Int].get.apply(4))
@Test def test_I_I_1(): Unit = assertEquals(-5, TypeCoercion[Int, Int].get.apply(-5))
@Test def test_I_JBy_0(): Unit = assertEquals(jByte(4.toByte), TypeCoercion[Int, jl.Byte].get.apply(4))
@Test def test_I_JBy_1(): Unit = assertEquals(jByte(-5.toByte), TypeCoercion[Int, jl.Byte].get.apply(-5))
@Test def test_I_JC_0(): Unit = assertEquals(jChar(4.toChar), TypeCoercion[Int, jl.Character].get.apply(4))
@Test def test_I_JC_1(): Unit = assertEquals(jChar(65531.toChar), TypeCoercion[Int, jl.Character].get.apply(-5))
@Test def test_I_JD_0(): Unit = assertEquals(jDouble(4d).doubleValue, TypeCoercion[Int, jl.Double].get.apply(4).doubleValue, 0)
@Test def test_I_JD_1(): Unit = assertEquals(jDouble(-5d).doubleValue, TypeCoercion[Int, jl.Double].get.apply(-5).doubleValue, 0)
@Test def test_I_JF_0(): Unit = assertEquals(jFloat(4f).floatValue, TypeCoercion[Int, jl.Float].get.apply(4).floatValue, 0)
@Test def test_I_JF_1(): Unit = assertEquals(jFloat(-5f).floatValue, TypeCoercion[Int, jl.Float].get.apply(-5).floatValue, 0)
@Test def test_I_JI_0(): Unit = assertEquals(jInt(4), TypeCoercion[Int, jl.Integer].get.apply(4))
@Test def test_I_JI_1(): Unit = assertEquals(jInt(-5), TypeCoercion[Int, jl.Integer].get.apply(-5))
@Test def test_I_JL_0(): Unit = assertEquals(jLong(4L), TypeCoercion[Int, jl.Long].get.apply(4))
@Test def test_I_JL_1(): Unit = assertEquals(jLong(-5L), TypeCoercion[Int, jl.Long].get.apply(-5))
@Test def test_I_JSh_0(): Unit = assertEquals(jShort(4.toShort), TypeCoercion[Int, jl.Short].get.apply(4))
@Test def test_I_JSh_1(): Unit = assertEquals(jShort(-5.toShort), TypeCoercion[Int, jl.Short].get.apply(-5))
@Test def test_I_L_0(): Unit = assertEquals(4L, TypeCoercion[Int, Long].get.apply(4))
@Test def test_I_L_1(): Unit = assertEquals(-5L, TypeCoercion[Int, Long].get.apply(-5))
@Test def test_I_Sh_0(): Unit = assertEquals(4.toShort, TypeCoercion[Int, Short].get.apply(4))
@Test def test_I_Sh_1(): Unit = assertEquals(-5.toShort, TypeCoercion[Int, Short].get.apply(-5))
@Test def test_I_St_0(): Unit = assertEquals("4", TypeCoercion[Int, String].get.apply(4))
@Test def test_I_St_1(): Unit = assertEquals("-5", TypeCoercion[Int, String].get.apply(-5))
@Test def test_JBo_Bo_0(): Unit = assertEquals(false, TypeCoercion[jl.Boolean, Boolean].get.apply(jl.Boolean.FALSE))
@Test def test_JBo_Bo_1(): Unit = assertEquals(true, TypeCoercion[jl.Boolean, Boolean].get.apply(jl.Boolean.TRUE))
@Test def test_JBo_JBo_0(): Unit = assertEquals(jl.Boolean.FALSE, TypeCoercion[jl.Boolean, jl.Boolean].get.apply(jl.Boolean.FALSE))
@Test def test_JBo_JBo_1(): Unit = assertEquals(jl.Boolean.TRUE, TypeCoercion[jl.Boolean, jl.Boolean].get.apply(jl.Boolean.TRUE))
@Test def test_JBo_St_0(): Unit = assertEquals(jl.Boolean.FALSE.toString, TypeCoercion[jl.Boolean, String].get.apply(jl.Boolean.FALSE))
@Test def test_JBo_St_1(): Unit = assertEquals(jl.Boolean.TRUE.toString, TypeCoercion[jl.Boolean, String].get.apply(jl.Boolean.TRUE))
@Test def test_JBy_By_0(): Unit = assertEquals(8.toByte, TypeCoercion[jl.Byte, Byte].get.apply(jByte(8)))
@Test def test_JBy_By_1(): Unit = assertEquals(-9.toByte, TypeCoercion[jl.Byte, Byte].get.apply(jByte(-9)))
@Test def test_JBy_C_0(): Unit = assertEquals(8.toChar, TypeCoercion[jl.Byte, Char].get.apply(jByte(8)))
@Test def test_JBy_C_1(): Unit = assertEquals(-9.toChar, TypeCoercion[jl.Byte, Char].get.apply(jByte(-9)))
@Test def test_JBy_D_0(): Unit = assertEquals(8d, TypeCoercion[jl.Byte, Double].get.apply(jByte(8)), 0)
@Test def test_JBy_D_1(): Unit = assertEquals(-9d, TypeCoercion[jl.Byte, Double].get.apply(jByte(-9)), 0)
@Test def test_JBy_F_0(): Unit = assertEquals(8f, TypeCoercion[jl.Byte, Float].get.apply(jByte(8)), 0)
@Test def test_JBy_F_1(): Unit = assertEquals(-9f, TypeCoercion[jl.Byte, Float].get.apply(jByte(-9)), 0)
@Test def test_JBy_I_0(): Unit = assertEquals(8, TypeCoercion[jl.Byte, Int].get.apply(jByte(8)))
@Test def test_JBy_I_1(): Unit = assertEquals(-9, TypeCoercion[jl.Byte, Int].get.apply(jByte(-9)))
@Test def test_JBy_JBy_0(): Unit = assertEquals(jByte(8), TypeCoercion[jl.Byte, jl.Byte].get.apply(jByte(8)))
@Test def test_JBy_JBy_1(): Unit = assertEquals(jByte(-9), TypeCoercion[jl.Byte, jl.Byte].get.apply(jByte(-9)))
@Test def test_JBy_JC_0(): Unit = assertEquals(jChar(8), TypeCoercion[jl.Byte, jl.Character].get.apply(jByte(8)))
@Test def test_JBy_JC_1(): Unit = assertEquals(jChar(-9), TypeCoercion[jl.Byte, jl.Character].get.apply(jByte(-9)))
@Test def test_JBy_JD_0(): Unit = assertEquals(jDouble(8).doubleValue, TypeCoercion[jl.Byte, jl.Double].get.apply(jByte(8)).doubleValue, 0)
@Test def test_JBy_JD_1(): Unit = assertEquals(jDouble(-9).doubleValue, TypeCoercion[jl.Byte, jl.Double].get.apply(jByte(-9)).doubleValue, 0)
@Test def test_JBy_JF_0(): Unit = assertEquals(jFloat(8).floatValue, TypeCoercion[jl.Byte, jl.Float].get.apply(jByte(8)).floatValue, 0)
@Test def test_JBy_JF_1(): Unit = assertEquals(jByte(-9).floatValue, TypeCoercion[jl.Byte, jl.Float].get.apply(jByte(-9)).floatValue, 0)
@Test def test_JBy_JI_0(): Unit = assertEquals(jInt(8), TypeCoercion[jl.Byte, jl.Integer].get.apply(jByte(8)))
@Test def test_JBy_JI_1(): Unit = assertEquals(jInt(-9), TypeCoercion[jl.Byte, jl.Integer].get.apply(jByte(-9)))
@Test def test_JBy_JL_0(): Unit = assertEquals(jLong(8), TypeCoercion[jl.Byte, jl.Long].get.apply(jByte(8)))
@Test def test_JBy_JL_1(): Unit = assertEquals(jLong(-9), TypeCoercion[jl.Byte, jl.Long].get.apply(jByte(-9)))
@Test def test_JBy_JSh_0(): Unit = assertEquals(jShort(8), TypeCoercion[jl.Byte, jl.Short].get.apply(jByte(8)))
@Test def test_JBy_JSh_1(): Unit = assertEquals(jShort(-9), TypeCoercion[jl.Byte, jl.Short].get.apply(jByte(-9)))
@Test def test_JBy_L_0(): Unit = assertEquals(8L, TypeCoercion[jl.Byte, Long].get.apply(jByte(8)))
@Test def test_JBy_L_1(): Unit = assertEquals(-9L, TypeCoercion[jl.Byte, Long].get.apply(jByte(-9)))
@Test def test_JBy_Sh_0(): Unit = assertEquals(8.toShort, TypeCoercion[jl.Byte, Short].get.apply(jByte(8)))
@Test def test_JBy_Sh_1(): Unit = assertEquals(-9.toShort, TypeCoercion[jl.Byte, Short].get.apply(jByte(-9)))
@Test def test_JBy_St_0(): Unit = assertEquals("8", TypeCoercion[jl.Byte, String].get.apply(jByte(8)))
@Test def test_JBy_St_1(): Unit = assertEquals("-9", TypeCoercion[jl.Byte, String].get.apply(jByte(-9)))
@Test def test_JC_C_0(): Unit = assertEquals('4', TypeCoercion[jl.Character, Char].get.apply(jChar('4')))
@Test def test_JC_C_1(): Unit = assertEquals('5', TypeCoercion[jl.Character, Char].get.apply(jChar('5')))
@Test def test_JC_JC_0(): Unit = assertEquals(jChar('4'), TypeCoercion[jl.Character, jl.Character].get.apply(jChar('4')))
@Test def test_JC_JC_1(): Unit = assertEquals(jChar('5'), TypeCoercion[jl.Character, jl.Character].get.apply(jChar('5')))
@Test def test_JC_St_0(): Unit = assertEquals("4", TypeCoercion[jl.Character, String].get.apply(jChar('4')))
@Test def test_JC_St_1(): Unit = assertEquals("5", TypeCoercion[jl.Character, String].get.apply(jChar('5')))
@Test def test_JD_By_0(): Unit = assertEquals(jByte(13), TypeCoercion[jl.Double, Byte].get.apply(jDouble(13.5)))
@Test def test_JD_By_1(): Unit = assertEquals(jByte(-14), TypeCoercion[jl.Double, Byte].get.apply(jDouble(-14.5)))
@Test def test_JD_C_0(): Unit = assertEquals(13.toChar, TypeCoercion[jl.Double, Char].get.apply(jDouble(13.5)))
@Test def test_JD_C_1(): Unit = assertEquals((-14).toChar, TypeCoercion[jl.Double, Char].get.apply(jDouble(-14.5)))
@Test def test_JD_D_0(): Unit = assertEquals(13.5, TypeCoercion[jl.Double, Double].get.apply(jDouble(13.5)), 0)
@Test def test_JD_D_1(): Unit = assertEquals(-14.5, TypeCoercion[jl.Double, Double].get.apply(jDouble(-14.5)), 0)
@Test def test_JD_F_0(): Unit = assertEquals(13.5f, TypeCoercion[jl.Double, Float].get.apply(jDouble(13.5)), 0)
@Test def test_JD_F_1(): Unit = assertEquals(-14.5f, TypeCoercion[jl.Double, Float].get.apply(jDouble(-14.5)), 0)
@Test def test_JD_I_0(): Unit = assertEquals(13, TypeCoercion[jl.Double, Int].get.apply(jDouble(13.5)))
@Test def test_JD_I_1(): Unit = assertEquals(-14, TypeCoercion[jl.Double, Int].get.apply(jDouble(-14.5)))
@Test def test_JD_JBy_0(): Unit = assertEquals(jByte(13), TypeCoercion[jl.Double, jl.Byte].get.apply(jDouble(13.5)))
@Test def test_JD_JBy_1(): Unit = assertEquals(jByte(-14), TypeCoercion[jl.Double, jl.Byte].get.apply(jDouble(-14.5)))
@Test def test_JD_JC_0(): Unit = assertEquals(jChar(13), TypeCoercion[jl.Double, jl.Character].get.apply(jDouble(13.5)))
@Test def test_JD_JC_1(): Unit = assertEquals(jChar(-14), TypeCoercion[jl.Double, jl.Character].get.apply(jDouble(-14.5)))
@Test def test_JD_JD_0(): Unit = assertEquals(jDouble(13.5), TypeCoercion[jl.Double, jl.Double].get.apply(jDouble(13.5)))
@Test def test_JD_JD_1(): Unit = assertEquals(jDouble(-14.5), TypeCoercion[jl.Double, jl.Double].get.apply(jDouble(-14.5)))
@Test def test_JD_JF_0(): Unit = assertEquals(jFloat(13.5f), TypeCoercion[jl.Double, jl.Float].get.apply(jDouble(13.5)))
@Test def test_JD_JF_1(): Unit = assertEquals(jFloat(-14.5f), TypeCoercion[jl.Double, jl.Float].get.apply(jDouble(-14.5)))
@Test def test_JD_JI_0(): Unit = assertEquals(jInt(13), TypeCoercion[jl.Double, jl.Integer].get.apply(jDouble(13.5)))
@Test def test_JD_JI_1(): Unit = assertEquals(jInt(-14), TypeCoercion[jl.Double, jl.Integer].get.apply(jDouble(-14.5)))
@Test def test_JD_JL_0(): Unit = assertEquals(jLong(13), TypeCoercion[jl.Double, jl.Long].get.apply(jDouble(13.5)))
@Test def test_JD_JL_1(): Unit = assertEquals(jLong(-14), TypeCoercion[jl.Double, jl.Long].get.apply(jDouble(-14.5)))
@Test def test_JD_JSh_0(): Unit = assertEquals(jShort(13), TypeCoercion[jl.Double, jl.Short].get.apply(jDouble(13.5)))
@Test def test_JD_JSh_1(): Unit = assertEquals(jShort(-14), TypeCoercion[jl.Double, jl.Short].get.apply(jDouble(-14.5)))
@Test def test_JD_L_0(): Unit = assertEquals(13L, TypeCoercion[jl.Double, Long].get.apply(jDouble(13.5)))
@Test def test_JD_L_1(): Unit = assertEquals(-14L, TypeCoercion[jl.Double, Long].get.apply(jDouble(-14.5)))
@Test def test_JD_Sh_0(): Unit = assertEquals(jShort(13), TypeCoercion[jl.Double, Short].get.apply(jDouble(13.5)))
@Test def test_JD_Sh_1(): Unit = assertEquals(jShort(-14), TypeCoercion[jl.Double, Short].get.apply(jDouble(-14.5)))
@Test def test_JD_St_0(): Unit = assertEquals("13.5", TypeCoercion[jl.Double, String].get.apply(jDouble(13.5)))
@Test def test_JD_St_1(): Unit = assertEquals("-14.5", TypeCoercion[jl.Double, String].get.apply(jDouble(-14.5)))
@Test def test_JF_By_0(): Unit = assertEquals(12.toByte, TypeCoercion[jl.Float, Byte].get.apply(jFloat(12.499f)))
@Test def test_JF_By_1(): Unit = assertEquals((-13).toByte, TypeCoercion[jl.Float, Byte].get.apply(jFloat(-13.499f)))
@Test def test_JF_C_0(): Unit = assertEquals(12.toChar, TypeCoercion[jl.Float, Char].get.apply(jFloat(12.499f)))
@Test def test_JF_C_1(): Unit = assertEquals((-13).toChar, TypeCoercion[jl.Float, Char].get.apply(jFloat(-13.499f)))
@Test def test_JF_D_0(): Unit = assertEquals(12.499d, TypeCoercion[jl.Float, Double].get.apply(jFloat(12.499f)), Precision)
@Test def test_JF_D_1(): Unit = assertEquals(-13.499d, TypeCoercion[jl.Float, Double].get.apply(jFloat(-13.499f)), Precision)
@Test def test_JF_F_0(): Unit = assertEquals(12.499f, TypeCoercion[jl.Float, Float].get.apply(jFloat(12.499f)), 0)
@Test def test_JF_F_1(): Unit = assertEquals(-13.499f, TypeCoercion[jl.Float, Float].get.apply(jFloat(-13.499f)), 0)
@Test def test_JF_I_0(): Unit = assertEquals(12, TypeCoercion[jl.Float, Int].get.apply(jFloat(12.499f)))
@Test def test_JF_I_1(): Unit = assertEquals(-13, TypeCoercion[jl.Float, Int].get.apply(jFloat(-13.499f)))
@Test def test_JF_JBy_0(): Unit = assertEquals(jByte(12), TypeCoercion[jl.Float, jl.Byte].get.apply(jFloat(12.499f)))
@Test def test_JF_JBy_1(): Unit = assertEquals(jByte(-13), TypeCoercion[jl.Float, jl.Byte].get.apply(jFloat(-13.499f)))
@Test def test_JF_JC_0(): Unit = assertEquals(jChar(12), TypeCoercion[jl.Float, jl.Character].get.apply(jFloat(12.499f)))
@Test def test_JF_JC_1(): Unit = assertEquals(jChar(-13), TypeCoercion[jl.Float, jl.Character].get.apply(jFloat(-13.499f)))
@Test def test_JF_JD_0(): Unit = assertEquals(jDouble(12.499).doubleValue, TypeCoercion[jl.Float, jl.Double].get.apply(jFloat(12.499f)).doubleValue, Precision)
@Test def test_JF_JD_1(): Unit = assertEquals(jDouble(-13.499).doubleValue, TypeCoercion[jl.Float, jl.Double].get.apply(jFloat(-13.499f)).doubleValue, Precision)
@Test def test_JF_JF_0(): Unit = assertEquals(jFloat(12.499f).floatValue, TypeCoercion[jl.Float, jl.Float].get.apply(jFloat(12.499f)).floatValue, 0)
@Test def test_JF_JF_1(): Unit = assertEquals(jFloat(-13.499f).floatValue, TypeCoercion[jl.Float, jl.Float].get.apply(jFloat(-13.499f)).floatValue, 0)
@Test def test_JF_JI_0(): Unit = assertEquals(jInt(12), TypeCoercion[jl.Float, jl.Integer].get.apply(jFloat(12.499f)))
@Test def test_JF_JI_1(): Unit = assertEquals(jInt(-13), TypeCoercion[jl.Float, jl.Integer].get.apply(jFloat(-13.499f)))
@Test def test_JF_JL_0(): Unit = assertEquals(jLong(12L), TypeCoercion[jl.Float, jl.Long].get.apply(jFloat(12.499f)))
@Test def test_JF_JL_1(): Unit = assertEquals(jLong(-13L), TypeCoercion[jl.Float, jl.Long].get.apply(jFloat(-13.499f)))
@Test def test_JF_JSh_0(): Unit = assertEquals(jShort(12), TypeCoercion[jl.Float, jl.Short].get.apply(jFloat(12.499f)))
@Test def test_JF_JSh_1(): Unit = assertEquals(jShort(-13), TypeCoercion[jl.Float, jl.Short].get.apply(jFloat(-13.499f)))
@Test def test_JF_L_0(): Unit = assertEquals(12L, TypeCoercion[jl.Float, Long].get.apply(jFloat(12.499f)))
@Test def test_JF_L_1(): Unit = assertEquals(-13L, TypeCoercion[jl.Float, Long].get.apply(jFloat(-13.499f)))
@Test def test_JF_Sh_0(): Unit = assertEquals(12.toShort, TypeCoercion[jl.Float, Short].get.apply(jFloat(12.499f)))
@Test def test_JF_Sh_1(): Unit = assertEquals((-13).toShort, TypeCoercion[jl.Float, Short].get.apply(jFloat(-13.499f)))
@Test def test_JF_St_0(): Unit = assertEquals("12.499", TypeCoercion[jl.Float, String].get.apply(jFloat(12.499f)))
@Test def test_JF_St_1(): Unit = assertEquals("-13.499", TypeCoercion[jl.Float, String].get.apply(jFloat(-13.499f)))
@Test def test_JI_By_0(): Unit = assertEquals(10.toByte, TypeCoercion[jl.Integer, Byte].get.apply(jInt(10)))
@Test def test_JI_By_1(): Unit = assertEquals((-11).toByte, TypeCoercion[jl.Integer, Byte].get.apply(jInt(-11)))
@Test def test_JI_C_0(): Unit = assertEquals(10.toChar, TypeCoercion[jl.Integer, Char].get.apply(jInt(10)))
@Test def test_JI_C_1(): Unit = assertEquals((-11).toChar, TypeCoercion[jl.Integer, Char].get.apply(jInt(-11)))
@Test def test_JI_D_0(): Unit = assertEquals(10d, TypeCoercion[jl.Integer, Double].get.apply(jInt(10)), 0)
@Test def test_JI_D_1(): Unit = assertEquals(-11d, TypeCoercion[jl.Integer, Double].get.apply(jInt(-11)), 0)
@Test def test_JI_F_0(): Unit = assertEquals(10f, TypeCoercion[jl.Integer, Float].get.apply(jInt(10)), 0)
@Test def test_JI_F_1(): Unit = assertEquals(-11f, TypeCoercion[jl.Integer, Float].get.apply(jInt(-11)), 0)
@Test def test_JI_I_0(): Unit = assertEquals(10, TypeCoercion[jl.Integer, Int].get.apply(jInt(10)))
@Test def test_JI_I_1(): Unit = assertEquals(-11, TypeCoercion[jl.Integer, Int].get.apply(jInt(-11)))
@Test def test_JI_JBy_0(): Unit = assertEquals(jByte(10), TypeCoercion[jl.Integer, jl.Byte].get.apply(jInt(10)))
@Test def test_JI_JBy_1(): Unit = assertEquals(jByte(-11), TypeCoercion[jl.Integer, jl.Byte].get.apply(jInt(-11)))
@Test def test_JI_JC_0(): Unit = assertEquals(jChar(10), TypeCoercion[jl.Integer, jl.Character].get.apply(jInt(10)))
@Test def test_JI_JC_1(): Unit = assertEquals(jChar(-11), TypeCoercion[jl.Integer, jl.Character].get.apply(jInt(-11)))
@Test def test_JI_JD_0(): Unit = assertEquals(jDouble(10).doubleValue, TypeCoercion[jl.Integer, jl.Double].get.apply(jInt(10)).doubleValue, 0)
@Test def test_JI_JD_1(): Unit = assertEquals(jDouble(-11).doubleValue, TypeCoercion[jl.Integer, jl.Double].get.apply(jInt(-11)).doubleValue, 0)
@Test def test_JI_JF_0(): Unit = assertEquals(jFloat(10).floatValue, TypeCoercion[jl.Integer, jl.Float].get.apply(jInt(10)).floatValue, 0)
@Test def test_JI_JF_1(): Unit = assertEquals(jFloat(-11).floatValue, TypeCoercion[jl.Integer, jl.Float].get.apply(jInt(-11)).floatValue, 0)
@Test def test_JI_JI_0(): Unit = assertEquals(jInt(10), TypeCoercion[jl.Integer, jl.Integer].get.apply(jInt(10)))
@Test def test_JI_JI_1(): Unit = assertEquals(jInt(-11), TypeCoercion[jl.Integer, jl.Integer].get.apply(jInt(-11)))
@Test def test_JI_JL_0(): Unit = assertEquals(jLong(10), TypeCoercion[jl.Integer, jl.Long].get.apply(jInt(10)))
@Test def test_JI_JL_1(): Unit = assertEquals(jLong(-11), TypeCoercion[jl.Integer, jl.Long].get.apply(jInt(-11)))
@Test def test_JI_JSh_0(): Unit = assertEquals(jShort(10), TypeCoercion[jl.Integer, jl.Short].get.apply(jInt(10)))
@Test def test_JI_JSh_1(): Unit = assertEquals(jShort(-11), TypeCoercion[jl.Integer, jl.Short].get.apply(jInt(-11)))
@Test def test_JI_L_0(): Unit = assertEquals(10L, TypeCoercion[jl.Integer, Long].get.apply(jInt(10)))
@Test def test_JI_L_1(): Unit = assertEquals(-11L, TypeCoercion[jl.Integer, Long].get.apply(jInt(-11)))
@Test def test_JI_Sh_0(): Unit = assertEquals(10.toShort, TypeCoercion[jl.Integer, Short].get.apply(jInt(10)))
@Test def test_JI_Sh_1(): Unit = assertEquals(-11.toShort, TypeCoercion[jl.Integer, Short].get.apply(jInt(-11)))
@Test def test_JI_St_0(): Unit = assertEquals("10", TypeCoercion[jl.Integer, String].get.apply(jInt(10)))
@Test def test_JI_St_1(): Unit = assertEquals("-11", TypeCoercion[jl.Integer, String].get.apply(jInt(-11)))
@Test def test_JL_By_0(): Unit = assertEquals(11.toByte, TypeCoercion[jl.Long, Byte].get.apply(jLong(11)))
@Test def test_JL_By_1(): Unit = assertEquals((-12).toByte, TypeCoercion[jl.Long, Byte].get.apply(jLong(-12)))
@Test def test_JL_C_0(): Unit = assertEquals(11.toChar, TypeCoercion[jl.Long, Char].get.apply(jLong(11)))
@Test def test_JL_C_1(): Unit = assertEquals((-12).toChar, TypeCoercion[jl.Long, Char].get.apply(jLong(-12)))
@Test def test_JL_D_0(): Unit = assertEquals(11d, TypeCoercion[jl.Long, Double].get.apply(jLong(11)), 0)
@Test def test_JL_D_1(): Unit = assertEquals(-12d, TypeCoercion[jl.Long, Double].get.apply(jLong(-12)), 0)
@Test def test_JL_F_0(): Unit = assertEquals(11f, TypeCoercion[jl.Long, Float].get.apply(jLong(11)), 0)
@Test def test_JL_F_1(): Unit = assertEquals(-12f, TypeCoercion[jl.Long, Float].get.apply(jLong(-12)), 0)
@Test def test_JL_I_0(): Unit = assertEquals(11, TypeCoercion[jl.Long, Int].get.apply(jLong(11)))
@Test def test_JL_I_1(): Unit = assertEquals(-12, TypeCoercion[jl.Long, Int].get.apply(jLong(-12)))
@Test def test_JL_JBy_0(): Unit = assertEquals(jByte(11), TypeCoercion[jl.Long, jl.Byte].get.apply(jLong(11)))
@Test def test_JL_JBy_1(): Unit = assertEquals(jByte(-12), TypeCoercion[jl.Long, jl.Byte].get.apply(jLong(-12)))
@Test def test_JL_JC_0(): Unit = assertEquals(jChar(11), TypeCoercion[jl.Long, jl.Character].get.apply(jLong(11)))
@Test def test_JL_JC_1(): Unit = assertEquals(jChar(-12), TypeCoercion[jl.Long, jl.Character].get.apply(jLong(-12)))
@Test def test_JL_JD_0(): Unit = assertEquals(jDouble(11), TypeCoercion[jl.Long, jl.Double].get.apply(jLong(11)))
@Test def test_JL_JD_1(): Unit = assertEquals(jDouble(-12), TypeCoercion[jl.Long, jl.Double].get.apply(jLong(-12)))
@Test def test_JL_JF_0(): Unit = assertEquals(jFloat(11), TypeCoercion[jl.Long, jl.Float].get.apply(jLong(11)))
@Test def test_JL_JF_1(): Unit = assertEquals(jFloat(-12), TypeCoercion[jl.Long, jl.Float].get.apply(jLong(-12)))
@Test def test_JL_JI_0(): Unit = assertEquals(jInt(11), TypeCoercion[jl.Long, jl.Integer].get.apply(jLong(11)))
@Test def test_JL_JI_1(): Unit = assertEquals(jInt(-12), TypeCoercion[jl.Long, jl.Integer].get.apply(jLong(-12)))
@Test def test_JL_JL_0(): Unit = assertEquals(jLong(11), TypeCoercion[jl.Long, jl.Long].get.apply(jLong(11)))
@Test def test_JL_JL_1(): Unit = assertEquals(jLong(-12), TypeCoercion[jl.Long, jl.Long].get.apply(jLong(-12)))
@Test def test_JL_JSh_0(): Unit = assertEquals(jShort(11), TypeCoercion[jl.Long, jl.Short].get.apply(jLong(11)))
@Test def test_JL_JSh_1(): Unit = assertEquals(jShort(-12), TypeCoercion[jl.Long, jl.Short].get.apply(jLong(-12)))
@Test def test_JL_L_0(): Unit = assertEquals(11L, TypeCoercion[jl.Long, Long].get.apply(jLong(11)))
@Test def test_JL_L_1(): Unit = assertEquals(-12L, TypeCoercion[jl.Long, Long].get.apply(jLong(-12)))
@Test def test_JL_Sh_0(): Unit = assertEquals(11.toShort, TypeCoercion[jl.Long, Short].get.apply(jLong(11)))
@Test def test_JL_Sh_1(): Unit = assertEquals(-12.toShort, TypeCoercion[jl.Long, Short].get.apply(jLong(-12)))
@Test def test_JL_St_0(): Unit = assertEquals("11", TypeCoercion[jl.Long, String].get.apply(jLong(11)))
@Test def test_JL_St_1(): Unit = assertEquals("-12", TypeCoercion[jl.Long, String].get.apply(jLong(-12)))
@Test def test_JSh_By_0(): Unit = assertEquals(9.toByte, TypeCoercion[jl.Short, Byte].get.apply(jShort(9)))
@Test def test_JSh_By_1(): Unit = assertEquals(-10.toByte, TypeCoercion[jl.Short, Byte].get.apply(jShort(-10)))
@Test def test_JSh_C_0(): Unit = assertEquals(9.toChar, TypeCoercion[jl.Short, Char].get.apply(jShort(9)))
@Test def test_JSh_C_1(): Unit = assertEquals(-10.toChar, TypeCoercion[jl.Short, Char].get.apply(jShort(-10)))
@Test def test_JSh_D_0(): Unit = assertEquals(9d, TypeCoercion[jl.Short, Double].get.apply(jShort(9)), 0)
@Test def test_JSh_D_1(): Unit = assertEquals(-10d, TypeCoercion[jl.Short, Double].get.apply(jShort(-10)), 0)
@Test def test_JSh_F_0(): Unit = assertEquals(9f, TypeCoercion[jl.Short, Float].get.apply(jShort(9)), 0)
@Test def test_JSh_F_1(): Unit = assertEquals(-10f, TypeCoercion[jl.Short, Float].get.apply(jShort(-10)), 0)
@Test def test_JSh_I_0(): Unit = assertEquals(9, TypeCoercion[jl.Short, Int].get.apply(jShort(9)))
@Test def test_JSh_I_1(): Unit = assertEquals(-10, TypeCoercion[jl.Short, Int].get.apply(jShort(-10)))
@Test def test_JSh_JBy_0(): Unit = assertEquals(jByte(9), TypeCoercion[jl.Short, jl.Byte].get.apply(jShort(9)))
@Test def test_JSh_JBy_1(): Unit = assertEquals(jByte(-10), TypeCoercion[jl.Short, jl.Byte].get.apply(jShort(-10)))
@Test def test_JSh_JC_0(): Unit = assertEquals(jChar(9), TypeCoercion[jl.Short, jl.Character].get.apply(jShort(9)))
@Test def test_JSh_JC_1(): Unit = assertEquals(jChar(-10), TypeCoercion[jl.Short, jl.Character].get.apply(jShort(-10)))
@Test def test_JSh_JD_0(): Unit = assertEquals(jDouble(9).doubleValue, TypeCoercion[jl.Short, jl.Double].get.apply(jShort(9)).doubleValue, 0)
@Test def test_JSh_JD_1(): Unit = assertEquals(jDouble(-10).doubleValue, TypeCoercion[jl.Short, jl.Double].get.apply(jShort(-10)).doubleValue, 0)
@Test def test_JSh_JF_0(): Unit = assertEquals(jFloat(9).floatValue, TypeCoercion[jl.Short, jl.Float].get.apply(jShort(9)).floatValue, 0)
@Test def test_JSh_JF_1(): Unit = assertEquals(jFloat(-10).floatValue, TypeCoercion[jl.Short, jl.Float].get.apply(jShort(-10)).floatValue, 0)
@Test def test_JSh_JI_0(): Unit = assertEquals(jInt(9), TypeCoercion[jl.Short, jl.Integer].get.apply(jShort(9)))
@Test def test_JSh_JI_1(): Unit = assertEquals(jInt(-10), TypeCoercion[jl.Short, jl.Integer].get.apply(jShort(-10)))
@Test def test_JSh_JL_0(): Unit = assertEquals(jLong(9), TypeCoercion[jl.Short, jl.Long].get.apply(jShort(9)))
@Test def test_JSh_JL_1(): Unit = assertEquals(jLong(-10), TypeCoercion[jl.Short, jl.Long].get.apply(jShort(-10)))
@Test def test_JSh_JSh_0(): Unit = assertEquals(jShort(9), TypeCoercion[jl.Short, jl.Short].get.apply(jShort(9)))
@Test def test_JSh_JSh_1(): Unit = assertEquals(jShort(-10), TypeCoercion[jl.Short, jl.Short].get.apply(jShort(-10)))
@Test def test_JSh_L_0(): Unit = assertEquals(9L, TypeCoercion[jl.Short, Long].get.apply(jShort(9)))
@Test def test_JSh_L_1(): Unit = assertEquals(-10L, TypeCoercion[jl.Short, Long].get.apply(jShort(-10)))
@Test def test_JSh_Sh_0(): Unit = assertEquals(9.toShort, TypeCoercion[jl.Short, Short].get.apply(jShort(9)))
@Test def test_JSh_Sh_1(): Unit = assertEquals(-10.toShort, TypeCoercion[jl.Short, Short].get.apply(jShort(-10)))
@Test def test_JSh_St_0(): Unit = assertEquals("9", TypeCoercion[jl.Short, String].get.apply(jShort(9)))
@Test def test_JSh_St_1(): Unit = assertEquals("-10", TypeCoercion[jl.Short, String].get.apply(jShort(-10)))
@Test def test_L_By_0(): Unit = assertEquals(5.toByte, TypeCoercion[Long, Byte].get.apply(5L))
@Test def test_L_By_1(): Unit = assertEquals(-6.toByte, TypeCoercion[Long, Byte].get.apply(-6L))
@Test def test_L_C_0(): Unit = assertEquals(5.toChar, TypeCoercion[Long, Char].get.apply(5L))
@Test def test_L_C_1(): Unit = assertEquals((-6).toChar, TypeCoercion[Long, Char].get.apply(-6L))
@Test def test_L_D_0(): Unit = assertEquals(5d, TypeCoercion[Long, Double].get.apply(5L), 0)
@Test def test_L_D_1(): Unit = assertEquals(-6d, TypeCoercion[Long, Double].get.apply(-6L), 0)
@Test def test_L_F_0(): Unit = assertEquals(5f, TypeCoercion[Long, Float].get.apply(5L), 0)
@Test def test_L_F_1(): Unit = assertEquals(-6f, TypeCoercion[Long, Float].get.apply(-6L), 0)
@Test def test_L_I_0(): Unit = assertEquals(5, TypeCoercion[Long, Int].get.apply(5L))
@Test def test_L_I_1(): Unit = assertEquals(-6, TypeCoercion[Long, Int].get.apply(-6L))
@Test def test_L_JBy_0(): Unit = assertEquals(jByte(5), TypeCoercion[Long, jl.Byte].get.apply(5L))
@Test def test_L_JBy_1(): Unit = assertEquals(jByte(-6), TypeCoercion[Long, jl.Byte].get.apply(-6L))
@Test def test_L_JC_0(): Unit = assertEquals(jChar(5), TypeCoercion[Long, jl.Character].get.apply(5L))
@Test def test_L_JC_1(): Unit = assertEquals(jChar(-6), TypeCoercion[Long, jl.Character].get.apply(-6L))
@Test def test_L_JD_0(): Unit = assertEquals(jDouble(5).doubleValue, TypeCoercion[Long, jl.Double].get.apply(5L).doubleValue, 0)
@Test def test_L_JD_1(): Unit = assertEquals(jDouble(-6).doubleValue, TypeCoercion[Long, jl.Double].get.apply(-6L).doubleValue, 0)
@Test def test_L_JF_0(): Unit = assertEquals(jFloat(5).floatValue, TypeCoercion[Long, jl.Float].get.apply(5L).floatValue, 0)
@Test def test_L_JF_1(): Unit = assertEquals(jFloat(-6).floatValue, TypeCoercion[Long, jl.Float].get.apply(-6L).floatValue, 0)
@Test def test_L_JI_0(): Unit = assertEquals(jInt(5), TypeCoercion[Long, jl.Integer].get.apply(5L))
@Test def test_L_JI_1(): Unit = assertEquals(jInt(-6), TypeCoercion[Long, jl.Integer].get.apply(-6L))
@Test def test_L_JL_0(): Unit = assertEquals(jLong(5), TypeCoercion[Long, jl.Long].get.apply(5L))
@Test def test_L_JL_1(): Unit = assertEquals(jLong(-6), TypeCoercion[Long, jl.Long].get.apply(-6L))
@Test def test_L_JSh_0(): Unit = assertEquals(jShort(5), TypeCoercion[Long, jl.Short].get.apply(5L))
@Test def test_L_JSh_1(): Unit = assertEquals(jShort(-6), TypeCoercion[Long, jl.Short].get.apply(-6L))
@Test def test_L_L_0(): Unit = assertEquals(5L, TypeCoercion[Long, Long].get.apply(5L))
@Test def test_L_L_1(): Unit = assertEquals(-6L, TypeCoercion[Long, Long].get.apply(-6L))
@Test def test_L_Sh_0(): Unit = assertEquals(5.toShort, TypeCoercion[Long, Short].get.apply(5L))
@Test def test_L_Sh_1(): Unit = assertEquals(-6.toShort, TypeCoercion[Long, Short].get.apply(-6L))
@Test def test_L_St_0(): Unit = assertEquals("5", TypeCoercion[Long, String].get.apply(5L))
@Test def test_L_St_1(): Unit = assertEquals("-6", TypeCoercion[Long, String].get.apply(-6L))
@Test def test_Sh_By_0(): Unit = assertEquals(3.toByte, TypeCoercion[Short, Byte].get.apply(3.toShort))
@Test def test_Sh_By_1(): Unit = assertEquals((-4).toByte, TypeCoercion[Short, Byte].get.apply((-4).toShort))
@Test def test_Sh_C_0(): Unit = assertEquals(3.toChar, TypeCoercion[Short, Char].get.apply(3.toShort))
@Test def test_Sh_C_1(): Unit = assertEquals((-4).toChar, TypeCoercion[Short, Char].get.apply((-4).toShort))
@Test def test_Sh_D_0(): Unit = assertEquals(3d, TypeCoercion[Short, Double].get.apply(3.toShort), 0)
@Test def test_Sh_D_1(): Unit = assertEquals(-4d, TypeCoercion[Short, Double].get.apply((-4).toShort), 0)
@Test def test_Sh_F_0(): Unit = assertEquals(3f, TypeCoercion[Short, Float].get.apply(3.toShort), 0)
@Test def test_Sh_F_1(): Unit = assertEquals(-4f, TypeCoercion[Short, Float].get.apply((-4).toShort), 0)
@Test def test_Sh_I_0(): Unit = assertEquals(3, TypeCoercion[Short, Int].get.apply(3.toShort))
@Test def test_Sh_I_1(): Unit = assertEquals(-4, TypeCoercion[Short, Int].get.apply((-4).toShort))
@Test def test_Sh_JBy_0(): Unit = assertEquals(jByte(3), TypeCoercion[Short, jl.Byte].get.apply(3.toShort))
@Test def test_Sh_JBy_1(): Unit = assertEquals(jByte(-4), TypeCoercion[Short, jl.Byte].get.apply((-4).toShort))
@Test def test_Sh_JC_0(): Unit = assertEquals(jChar(3), TypeCoercion[Short, jl.Character].get.apply(3.toShort))
@Test def test_Sh_JC_1(): Unit = assertEquals(jChar(-4), TypeCoercion[Short, jl.Character].get.apply((-4).toShort))
@Test def test_Sh_JD_0(): Unit = assertEquals(jDouble(3).doubleValue, TypeCoercion[Short, jl.Double].get.apply(3.toShort).doubleValue, 0)
@Test def test_Sh_JD_1(): Unit = assertEquals(jDouble(-4).doubleValue, TypeCoercion[Short, jl.Double].get.apply((-4).toShort).doubleValue, 0)
@Test def test_Sh_JF_0(): Unit = assertEquals(jFloat(3).floatValue, TypeCoercion[Short, jl.Float].get.apply(3.toShort).floatValue, 0)
@Test def test_Sh_JF_1(): Unit = assertEquals(jFloat(-4).floatValue, TypeCoercion[Short, jl.Float].get.apply((-4).toShort).floatValue, 0)
@Test def test_Sh_JI_0(): Unit = assertEquals(jInt(3), TypeCoercion[Short, jl.Integer].get.apply(3.toShort))
@Test def test_Sh_JI_1(): Unit = assertEquals(jInt(-4), TypeCoercion[Short, jl.Integer].get.apply((-4).toShort))
@Test def test_Sh_JL_0(): Unit = assertEquals(jLong(3), TypeCoercion[Short, jl.Long].get.apply(3.toShort))
@Test def test_Sh_JL_1(): Unit = assertEquals(jLong(-4), TypeCoercion[Short, jl.Long].get.apply((-4).toShort))
@Test def test_Sh_JSh_0(): Unit = assertEquals(jShort(3), TypeCoercion[Short, jl.Short].get.apply(3.toShort))
@Test def test_Sh_JSh_1(): Unit = assertEquals(jShort(-4), TypeCoercion[Short, jl.Short].get.apply((-4).toShort))
@Test def test_Sh_L_0(): Unit = assertEquals(3L, TypeCoercion[Short, Long].get.apply(3.toShort))
@Test def test_Sh_L_1(): Unit = assertEquals(-4L, TypeCoercion[Short, Long].get.apply((-4).toShort))
@Test def test_Sh_Sh_0(): Unit = assertEquals(3.toShort, TypeCoercion[Short, Short].get.apply(3.toShort))
@Test def test_Sh_Sh_1(): Unit = assertEquals((-4).toShort, TypeCoercion[Short, Short].get.apply((-4).toShort))
@Test def test_Sh_St_0(): Unit = assertEquals("3", TypeCoercion[Short, String].get.apply(3.toShort))
@Test def test_Sh_St_1(): Unit = assertEquals("-4", TypeCoercion[Short, String].get.apply((-4).toShort))
@Test def test_St_St_0(): Unit = assertEquals("31", TypeCoercion[String, String].get.apply("31"))
@Test def test_St_St_1(): Unit = assertEquals("-1", TypeCoercion[String, String].get.apply("-1"))
@Test def test_St_St_2(): Unit = assertEquals("true", TypeCoercion[String, String].get.apply("true"))
@Test def test_St_St_3(): Unit = assertEquals("false", TypeCoercion[String, String].get.apply("false"))
@Test def testNoCoersion_Bo_C(): Unit = assertTrue(TypeCoercion[Boolean, Char].isEmpty)
@Test def testNoCoersion_Bo_By(): Unit = assertTrue(TypeCoercion[Boolean, Byte].isEmpty)
@Test def testNoCoersion_Bo_Sh(): Unit = assertTrue(TypeCoercion[Boolean, Short].isEmpty)
@Test def testNoCoersion_Bo_I(): Unit = assertTrue(TypeCoercion[Boolean, Int].isEmpty)
@Test def testNoCoersion_Bo_L(): Unit = assertTrue(TypeCoercion[Boolean, Long].isEmpty)
@Test def testNoCoersion_Bo_F(): Unit = assertTrue(TypeCoercion[Boolean, Float].isEmpty)
@Test def testNoCoersion_Bo_D(): Unit = assertTrue(TypeCoercion[Boolean, Double].isEmpty)
@Test def testNoCoersion_Bo_JBy(): Unit = assertTrue(TypeCoercion[Boolean, jl.Byte].isEmpty)
@Test def testNoCoersion_Bo_JSh(): Unit = assertTrue(TypeCoercion[Boolean, jl.Short].isEmpty)
@Test def testNoCoersion_Bo_JI(): Unit = assertTrue(TypeCoercion[Boolean, jl.Integer].isEmpty)
@Test def testNoCoersion_Bo_JL(): Unit = assertTrue(TypeCoercion[Boolean, jl.Long].isEmpty)
@Test def testNoCoersion_Bo_JF(): Unit = assertTrue(TypeCoercion[Boolean, jl.Float].isEmpty)
@Test def testNoCoersion_Bo_JD(): Unit = assertTrue(TypeCoercion[Boolean, jl.Double].isEmpty)
@Test def testNoCoersion_Bo_JC(): Unit = assertTrue(TypeCoercion[Boolean, jl.Character].isEmpty)
@Test def testNoCoersion_C_Bo(): Unit = assertTrue(TypeCoercion[Char, Boolean].isEmpty)
@Test def testNoCoersion_C_JBo(): Unit = assertTrue(TypeCoercion[Char, jl.Boolean].isEmpty)
@Test def testNoCoersion_By_Bo(): Unit = assertTrue(TypeCoercion[Byte, Boolean].isEmpty)
@Test def testNoCoersion_By_JBo(): Unit = assertTrue(TypeCoercion[Byte, jl.Boolean].isEmpty)
@Test def testNoCoersion_Sh_Bo(): Unit = assertTrue(TypeCoercion[Short, Boolean].isEmpty)
@Test def testNoCoersion_Sh_JBo(): Unit = assertTrue(TypeCoercion[Short, jl.Boolean].isEmpty)
@Test def testNoCoersion_I_Bo(): Unit = assertTrue(TypeCoercion[Int, Boolean].isEmpty)
@Test def testNoCoersion_I_JBo(): Unit = assertTrue(TypeCoercion[Int, jl.Boolean].isEmpty)
@Test def testNoCoersion_L_Bo(): Unit = assertTrue(TypeCoercion[Long, Boolean].isEmpty)
@Test def testNoCoersion_L_JBo(): Unit = assertTrue(TypeCoercion[Long, jl.Boolean].isEmpty)
@Test def testNoCoersion_F_Bo(): Unit = assertTrue(TypeCoercion[Float, Boolean].isEmpty)
@Test def testNoCoersion_F_JBo(): Unit = assertTrue(TypeCoercion[Float, jl.Boolean].isEmpty)
@Test def testNoCoersion_D_Bo(): Unit = assertTrue(TypeCoercion[Double, Boolean].isEmpty)
@Test def testNoCoersion_D_JBo(): Unit = assertTrue(TypeCoercion[Double, jl.Boolean].isEmpty)
@Test def testNoCoersion_JBy_Bo(): Unit = assertTrue(TypeCoercion[jl.Byte, Boolean].isEmpty)
@Test def testNoCoersion_JBy_JBo(): Unit = assertTrue(TypeCoercion[jl.Byte, jl.Boolean].isEmpty)
@Test def testNoCoersion_JSh_Bo(): Unit = assertTrue(TypeCoercion[jl.Short, Boolean].isEmpty)
@Test def testNoCoersion_JSh_JBo(): Unit = assertTrue(TypeCoercion[jl.Short, jl.Boolean].isEmpty)
@Test def testNoCoersion_JI_Bo(): Unit = assertTrue(TypeCoercion[jl.Integer, Boolean].isEmpty)
@Test def testNoCoersion_JI_JBo(): Unit = assertTrue(TypeCoercion[jl.Integer, jl.Boolean].isEmpty)
@Test def testNoCoersion_JL_Bo(): Unit = assertTrue(TypeCoercion[jl.Long, Boolean].isEmpty)
@Test def testNoCoersion_JL_JBo(): Unit = assertTrue(TypeCoercion[jl.Long, jl.Boolean].isEmpty)
@Test def testNoCoersion_JF_Bo(): Unit = assertTrue(TypeCoercion[jl.Float, Boolean].isEmpty)
@Test def testNoCoersion_JF_JBo(): Unit = assertTrue(TypeCoercion[jl.Float, jl.Boolean].isEmpty)
@Test def testNoCoersion_JD_Bo(): Unit = assertTrue(TypeCoercion[jl.Double, Boolean].isEmpty)
@Test def testNoCoersion_JD_JBo(): Unit = assertTrue(TypeCoercion[jl.Double, jl.Boolean].isEmpty)
@Test def testNoCoersion_JC_Bo(): Unit = assertTrue(TypeCoercion[jl.Character, Boolean].isEmpty)
@Test def testNoCoersion_JC_JBo(): Unit = assertTrue(TypeCoercion[jl.Character, jl.Boolean].isEmpty)
@Test def testNoCoersion_JBo_C(): Unit = assertTrue(TypeCoercion[jl.Boolean, Char].isEmpty)
@Test def testNoCoersion_JBo_By(): Unit = assertTrue(TypeCoercion[jl.Boolean, Byte].isEmpty)
@Test def testNoCoersion_JBo_Sh(): Unit = assertTrue(TypeCoercion[jl.Boolean, Short].isEmpty)
@Test def testNoCoersion_JBo_I(): Unit = assertTrue(TypeCoercion[jl.Boolean, Int].isEmpty)
@Test def testNoCoersion_JBo_L(): Unit = assertTrue(TypeCoercion[jl.Boolean, Long].isEmpty)
@Test def testNoCoersion_JBo_F(): Unit = assertTrue(TypeCoercion[jl.Boolean, Float].isEmpty)
@Test def testNoCoersion_JBo_D(): Unit = assertTrue(TypeCoercion[jl.Boolean, Double].isEmpty)
@Test def testNoCoersion_JBo_JBy(): Unit = assertTrue(TypeCoercion[jl.Boolean, jl.Byte].isEmpty)
@Test def testNoCoersion_JBo_JSh(): Unit = assertTrue(TypeCoercion[jl.Boolean, jl.Short].isEmpty)
@Test def testNoCoersion_JBo_JI(): Unit = assertTrue(TypeCoercion[jl.Boolean, jl.Integer].isEmpty)
@Test def testNoCoersion_JBo_JL(): Unit = assertTrue(TypeCoercion[jl.Boolean, jl.Long].isEmpty)
@Test def testNoCoersion_JBo_JF(): Unit = assertTrue(TypeCoercion[jl.Boolean, jl.Float].isEmpty)
@Test def testNoCoersion_JBo_JD(): Unit = assertTrue(TypeCoercion[jl.Boolean, jl.Double].isEmpty)
@Test def testNoCoersion_JBo_JC(): Unit = assertTrue(TypeCoercion[jl.Boolean, jl.Character].isEmpty)
@Test def testNoCoersion_St_C(): Unit = assertTrue(TypeCoercion[String, Char].isEmpty)
@Test def testNoCoersion_St_By(): Unit = assertTrue(TypeCoercion[String, Byte].isEmpty)
@Test def testNoCoersion_St_Sh(): Unit = assertTrue(TypeCoercion[String, Short].isEmpty)
@Test def testNoCoersion_St_I(): Unit = assertTrue(TypeCoercion[String, Int].isEmpty)
@Test def testNoCoersion_St_L(): Unit = assertTrue(TypeCoercion[String, Long].isEmpty)
@Test def testNoCoersion_St_F(): Unit = assertTrue(TypeCoercion[String, Float].isEmpty)
@Test def testNoCoersion_St_D(): Unit = assertTrue(TypeCoercion[String, Double].isEmpty)
@Test def testNoCoersion_St_JBy(): Unit = assertTrue(TypeCoercion[String, jl.Byte].isEmpty)
@Test def testNoCoersion_St_JSh(): Unit = assertTrue(TypeCoercion[String, jl.Short].isEmpty)
@Test def testNoCoersion_St_JI(): Unit = assertTrue(TypeCoercion[String, jl.Integer].isEmpty)
@Test def testNoCoersion_St_JL(): Unit = assertTrue(TypeCoercion[String, jl.Long].isEmpty)
@Test def testNoCoersion_St_JF(): Unit = assertTrue(TypeCoercion[String, jl.Float].isEmpty)
@Test def testNoCoersion_St_JD(): Unit = assertTrue(TypeCoercion[String, jl.Double].isEmpty)
@Test def testNoCoersion_St_JC(): Unit = assertTrue(TypeCoercion[String, jl.Character].isEmpty)
@Test def testNoCoersion_St_JBo(): Unit = assertTrue(TypeCoercion[String, jl.Boolean].isEmpty)
/**
* Take the matrix from the comment and ensure that if a coercion is supposed to exist, then it does. This
* doesn't test that the conversions are actually correct.
*/
@Test def testRawCoercionsExist(): Unit = {
val funcs = for {
from <- matrix.indices
to <- matrix(from).indices
shouldExist = matrix(from)(to)
f = types(from)
t = types(to)
doesExist = TypeCoercion(f, t).isDefined
ok = shouldExist == doesExist // p -> q == ~p v q
} yield (shouldExist, ok, s"${RefInfoOps.toString(f)} => ${RefInfoOps.toString(t)}")
debug(s"Found ${funcs.count(_._1)} coercion functions.")
val missing = funcs.collect { case (sE, ok, f) if !ok => f }
assertTrue(s"Found ${missing.size} functions missing: ${missing.mkString("\\n\\t", "\\n\\t", "")}", missing.isEmpty)
}
/**
* Take the matrix from the comment and ensure that if a coercion is supposed to exist, then it does. This
* doesn't test that the conversions are actually correct. This tests that coercions of the form:
*
* A => Option[B] exists where A and B are not options.
*/
@Test def testToOptCoercionsExist(): Unit = {
val funcs = for {
from <- matrix.indices
to <- matrix(from).indices
shouldExist = matrix(from)(to)
f = types(from)
t = RefInfoOps.option(types(to))
doesExist = TypeCoercion(f, t).isDefined
ok = shouldExist == doesExist // p -> q == ~p v q
} yield (shouldExist, ok, s"${RefInfoOps.toString(f)} => ${RefInfoOps.toString(t)}")
debug(s"Found ${funcs.count(_._1)} toOption coercion functions.")
val missing = funcs.collect { case (sE, ok, f) if !ok => f }
assertTrue(s"Found ${missing.size} toOption functions missing: ${missing.mkString("\\n\\t", "\\n\\t", "")}", missing.isEmpty)
}
/**
* Take the matrix from the comment and ensure that if a coercion is supposed to exist, then it does. This
* doesn't test that the conversions are actually correct. This tests that coercions of the form:
*
* Option[A] => Option[B] exists where A and B are not options.
*/
@Test def testOptOptCoercionsExist(): Unit = {
val funcs = for {
from <- matrix.indices
to <- matrix(from).indices
shouldExist = matrix(from)(to)
f = RefInfoOps.option(types(from))
t = RefInfoOps.option(types(to))
doesExist = TypeCoercion(f, t).isDefined
ok = shouldExist == doesExist // p -> q == ~p v q
} yield (shouldExist, ok, s"${RefInfoOps.toString(f)} => ${RefInfoOps.toString(t)}")
debug(s"Found ${funcs.count(_._1)} Option to Option coercion functions.")
val missing = funcs.collect { case (sE, ok, f) if !ok => f }
assertTrue(s"Found ${missing.size} Option to Option functions missing: ${missing.mkString("\\n\\t", "\\n\\t", "")}", missing.isEmpty)
}
private[this] def jChar(v: Int) = jl.Character.valueOf(v.toChar)
private[this] def jByte(v: Int) = jl.Byte.valueOf(v.toByte)
private[this] def jShort(v: Int) = jl.Short.valueOf(v.toShort)
private[this] def jInt(v: Int) = jl.Integer.valueOf(v)
private[this] def jLong(v: Long) = jl.Long.valueOf(v)
private[this] def jFloat(v: Float) = jl.Float.valueOf(v)
private[this] def jDouble(v: Double) = jl.Double.valueOf(v)
// NOTE: The generated tests are based on the following programmatically outputted code, that was then hand modified.
// @Test def xyz() {
// val valMap = vals.toMap
// for {
// (from, fi) <- types.zipWithIndex
// (to, ti) <- types.zipWithIndex
// if matrix(fi)(ti)
// (in, n) <- valMap(from).zipWithIndex
// fromAbbrev = typeToAbbrev(from)
// toAbbrev = typeToAbbrev(to)
// fS = RefInfoOps.toString(from)
// tS = RefInfoOps.toString(to)
// // tc = TypeCoercion(from, to).asInstanceOf[Option[Any => Any]].get
// // y = tc(in.asInstanceOf[Any])
// } {
// println(s"""@Test def test_${fromAbbrev}_${toAbbrev}_$n(): Unit = assertEquals($in, TypeCoercion[$fS, $tS].get.apply($in))""")
// }
// }
// @Test def test(): Unit = {
// val data =
// """
// |Bo C By Sh I L F D JBy JSh JI JL JF JD JC
// |C Bo JBo
// |By Bo JBo
// |Sh Bo JBo
// |I Bo JBo
// |L Bo JBo
// |F Bo JBo
// |D Bo JBo
// |JBy Bo JBo
// |JSh Bo JBo
// |JI Bo JBo
// |JL Bo JBo
// |JF Bo JBo
// |JD Bo JBo
// |JC Bo JBo
// |JBo C By Sh I L F D JBy JSh JI JL JF JD JC
// |St C By Sh I L F D JBy JSh JI JL JF JD JC JBo
// """.stripMargin.trim
//
// val typeMap = TypeCoercionTest.types.toMap
//
// data.split("\\n").map(_.split(" ").toList).foreach { case from :: toLst =>
// val f = typeMap(from)
// toLst foreach { ts =>
// val t = typeMap(ts)
// val a = RefInfoOps.toString(f).replaceAll("java.lang.String", "String")
// val b = RefInfoOps.toString(t).replaceAll("java.lang.String", "String")
// val fn = s"@Test def testNoCoersion_${from}_$ts(): Unit = assertTrue(TypeCoercion[$a, $b].isEmpty)"
// println(fn)
// }
// }
// }
}
private object TypeCoercionTest {
val Precision = 1e-6
/**
* This matrix is copied from the scaladoc for TypeCoercion.
*/
val matrixStr =
"""
| * Bo C By Sh I L F D JBy JSh JI JL JF JD JC JBo St
| * +--------------------------------------------------------------------
| * Bo | I bB tS
| * C | I A A A A A A A A A A A A A tS
| * By | A I A A A A A A A A A A A A tS
| * Sh | A A I A A A A A A A A A A A tS
| * I | A A A I A A A A A A A A A A tS
| * L | A A A A I A A A A A A A A A tS
| * F F | A A A A A I A A A A A A A A tS
| * R D | A A A A A A I A A A A A A A tS
| * O JBy | N N N N N N N I N N N N N N tS
| * M JSh | N N N N N N N N I N N N N N tS
| * JI | N N N N N N N N N I N N N N tS
| * JL | N N N N N N N N N N I N N N tS
| * JF | N N N N N N N N N N N I N N tS
| * JD | N N N N N N N N N N N N I N tS
| * JC | uC I tS
| * JBo | uB I tS
| * St | I
""".stripMargin.trim
// val matrixStr =
// """
// | * Bo C By Sh I L F D JBy JSh JI JL JF JD JC JBo St
// | * +--------------------------------------------------------------------
// | * Bo | I bB tS
// | * C | I A A A A A A A A A A A A A tS
// | * By | A I A A A A A A A A A A A A tS
// | * Sh | A A I A A A A A A A A A A A tS
// | * I | A A A I A A A A A A A A A A tS
// | * L | A A A A I A A A A A A A A A tS
// | * F F | A A A A A I A A A A A A A A tS
// | * R D | A A A A A A I A A A A A A A tS
// | * O JBy | N N N N N N N I N N N N N N tS
// | * M JSh | N N N N N N N N I N N N N N tS
// | * JI | N N N N N N N N N I N N N N tS
// | * JL | N N N N N N N N N N I N N N tS
// | * JF | N N N N N N N N N N N I N N tS
// | * JD | N N N N N N N N N N N N I N tS
// | * JC | uC I tS
// | * JBo | uB I tS
// | * St | fS fS fS fS fS fS fS fS fS fS fS fS fS fS I
// """.stripMargin.trim
val types = Vector(
"Bo" -> RefInfo.Boolean,
"C" -> RefInfo.Char,
"By" -> RefInfo.Byte,
"Sh" -> RefInfo.Short,
"I" -> RefInfo.Int,
"L" -> RefInfo.Long,
"F" -> RefInfo.Float,
"D" -> RefInfo.Double,
"JBy" -> RefInfo.JavaByte,
"JSh" -> RefInfo.JavaShort,
"JI" -> RefInfo.JavaInteger,
"JL" -> RefInfo.JavaLong,
"JF" -> RefInfo.JavaFloat,
"JD" -> RefInfo.JavaDouble,
"JC" -> RefInfo.JavaCharacter,
"JBo" -> RefInfo.JavaBoolean,
"St" -> RefInfo.String
)
// Was used to generate tests.
val typeToAbbrev = Map(
RefInfo.Boolean -> "Bo",
RefInfo.Char -> "C",
RefInfo.Byte -> "By",
RefInfo.Short -> "Sh",
RefInfo.Int -> "I",
RefInfo.Long -> "L",
RefInfo.Float -> "F",
RefInfo.Double -> "D",
RefInfo.JavaByte -> "JBy",
RefInfo.JavaShort -> "JSh",
RefInfo.JavaInteger -> "JI",
RefInfo.JavaLong -> "JL",
RefInfo.JavaFloat -> "JF",
RefInfo.JavaDouble -> "JD",
RefInfo.JavaCharacter -> "JC",
RefInfo.JavaBoolean -> "JBo",
RefInfo.String -> "St"
)
val vals = Vector(
RefInfo.Boolean -> Seq("true", "false"),
RefInfo.Char -> Seq("'2'", "'3'"),
RefInfo.Byte -> Seq("2.toByte", "(-3).toByte"),
RefInfo.Short -> Seq("3.toShort", "(-4).toShort"),
RefInfo.Int -> Seq("4.toShort", "-5"),
RefInfo.Long -> Seq("5L", "-6L"),
RefInfo.Float -> Seq("6.499f", "-7.499f"),
RefInfo.Double -> Seq("7.5", "-8.5"),
RefInfo.JavaByte -> Seq("java.lang.Byte.valueOf(8.toByte)", "java.lang.Byte.valueOf((-9).toByte)"),
RefInfo.JavaShort -> Seq("java.lang.Short.valueOf(9.toShort)", "java.lang.Short.valueOf((-10).toShort)"),
RefInfo.JavaInteger -> Seq("java.lang.Integer.valueOf(10)", "java.lang.Integer.valueOf(-11)"),
RefInfo.JavaLong -> Seq("java.lang.Long.valueOf(11)", "java.lang.Long.valueOf(-12)"),
RefInfo.JavaFloat -> Seq("java.lang.Float.valueOf(12.499f)", "java.lang.Float.valueOf(-13.499f)"),
RefInfo.JavaDouble -> Seq("java.lang.Double.valueOf(13.5)", "java.lang.Double.valueOf(-14.5)"),
RefInfo.JavaCharacter -> Seq("java.lang.Character.valueOf('4')", "java.lang.Character.valueOf('5')"),
RefInfo.JavaBoolean -> Seq("java.lang.Boolean.FALSE", "java.lang.Boolean.TRUE"),
RefInfo.String -> Seq("\\"31\\"", "\\"-1\\"", "\\"true\\"", "\\"false\\"")
)
def matrixAndTypes() = {
val typeMap = types.toMap
val lines = matrixStr.split("\\n")
val headerTypes = getHeaders(lines(0)).map(typeMap.apply)
// v
// 2 until ... b/c we skip the header row and the row that's just a horizontal bar.
// ^
val m = (2 until lines.size) map { i => getLine(lines(i)) }
(m, headerTypes)
}
def getHeaders(h: String) = h.drop(2).trim.split(" +").toVector
def getLine(l: String) = l.substring(l.indexOf("|") + 2).grouped(4).map(_.trim.nonEmpty).toVector
}
| eHarmony/aloha | aloha-core/src/test/scala/com/eharmony/aloha/models/TypeCoercionTest.scala | Scala | mit | 68,004 |
package akka.dispatch
import akka.actor.ActorSystem
import com.typesafe.config.ConfigFactory
import org.scalatest.FunSuite
/**
* Created by huanwuji
* date 2017/2/21.
*/
class SizeScaleThreadPoolExecutorConfiguratorTest extends FunSuite {
test("init") {
val config = ConfigFactory.load("instance.conf")
val system = ActorSystem("test", config)
val dispatcher = system.dispatchers.lookup("akka.teleporter.blocking-dispatcher")
dispatcher
}
} | huanwuji/teleporter | src/test/scala/akka/dispatch/SizeScaleThreadPoolExecutorConfiguratorTest.scala | Scala | agpl-3.0 | 469 |
package com.yetu.oauth2provider.base
import com.yetu.oauth2provider.registry.{ IntegrationTestRegistry, TestRegistry }
import org.scalatest._
import org.scalatest.prop.GeneratorDrivenPropertyChecks
/**
* Class to be extended for all Unit tests
*/
class BaseIntegrationSpec extends WordSpec with MustMatchers
with GeneratorDrivenPropertyChecks
with IntegrationTestRegistry
with BeforeAndAfter
with OptionValues
with Inside
with Inspectors
with BaseMethods
with DefaultTestVariables
| yetu/oauth2-provider | test/com/yetu/oauth2provider/base/BaseIntegrationSpec.scala | Scala | mit | 502 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.kafka010
import java.{util => ju}
import javax.security.auth.login.{AppConfigurationEntry, Configuration}
import org.apache.hadoop.security.{Credentials, UserGroupInformation}
import org.apache.hadoop.security.token.Token
import org.mockito.Mockito.mock
import org.scalatest.BeforeAndAfterEach
import org.apache.spark.{SparkConf, SparkEnv, SparkFunSuite}
import org.apache.spark.kafka010.KafkaTokenUtil.KafkaDelegationTokenIdentifier
/**
* This is a trait which provides functionalities for Kafka delegation token related test suites.
*/
trait KafkaDelegationTokenTest extends BeforeAndAfterEach {
self: SparkFunSuite =>
private def doReturn(value: Any) = org.mockito.Mockito.doReturn(value, Seq.empty: _*)
protected val tokenId = "tokenId" + ju.UUID.randomUUID().toString
protected val tokenPassword = "tokenPassword" + ju.UUID.randomUUID().toString
private class KafkaJaasConfiguration extends Configuration {
val entry =
new AppConfigurationEntry(
"DummyModule",
AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
ju.Collections.emptyMap[String, Object]()
)
override def getAppConfigurationEntry(name: String): Array[AppConfigurationEntry] = {
if (name.equals("KafkaClient")) {
Array(entry)
} else {
null
}
}
}
override def afterEach(): Unit = {
try {
Configuration.setConfiguration(null)
UserGroupInformation.setLoginUser(null)
SparkEnv.set(null)
} finally {
super.afterEach()
}
}
protected def setGlobalKafkaClientConfig(): Unit = {
Configuration.setConfiguration(new KafkaJaasConfiguration)
}
protected def addTokenToUGI(): Unit = {
val token = new Token[KafkaDelegationTokenIdentifier](
tokenId.getBytes,
tokenPassword.getBytes,
KafkaTokenUtil.TOKEN_KIND,
KafkaTokenUtil.TOKEN_SERVICE
)
val creds = new Credentials()
creds.addToken(KafkaTokenUtil.TOKEN_SERVICE, token)
UserGroupInformation.getCurrentUser.addCredentials(creds)
}
protected def setSparkEnv(settings: Traversable[(String, String)]): Unit = {
val conf = new SparkConf().setAll(settings)
val env = mock(classOf[SparkEnv])
doReturn(conf).when(env).conf
SparkEnv.set(env)
}
}
| Aegeaner/spark | external/kafka-0-10-token-provider/src/test/scala/org/apache/spark/kafka010/KafkaDelegationTokenTest.scala | Scala | apache-2.0 | 3,098 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.api.batch.table
import org.apache.flink.api.common.typeinfo.TypeInformation
import org.apache.flink.api.scala.createTypeInformation
import org.apache.flink.table.api.batch.table.CalcTest.{MyHashCode, TestCaseClass, WC, giveMeCaseClass}
import org.apache.flink.table.api.scala._
import org.apache.flink.table.functions.ScalarFunction
import org.apache.flink.table.utils.TableTestBase
import org.apache.flink.table.utils.TableTestUtil._
import org.junit.Test
class CalcTest extends TableTestBase {
@Test
def testMultipleFlatteningsTable(): Unit = {
val util = batchTestUtil()
val table = util.addTable[((Int, Long), (String, Boolean), String)]("MyTable", 'a, 'b, 'c)
val result = table.select('a.flatten(), 'c, 'b.flatten())
val expected = unaryNode(
"DataSetCalc",
batchTableNode(0),
term("select",
"a._1 AS a$_1",
"a._2 AS a$_2",
"c",
"b._1 AS b$_1",
"b._2 AS b$_2"
)
)
util.verifyTable(result, expected)
}
@Test
def testNestedFlattening(): Unit = {
val util = batchTestUtil()
val table = util
.addTable[((((String, TestCaseClass), Boolean), String), String)]("MyTable", 'a, 'b)
val result = table.select('a.flatten(), 'b.flatten())
val expected = unaryNode(
"DataSetCalc",
batchTableNode(0),
term("select",
"a._1 AS a$_1",
"a._2 AS a$_2",
"b"
)
)
util.verifyTable(result, expected)
}
@Test
def testScalarFunctionAccess(): Unit = {
val util = batchTestUtil()
val table = util
.addTable[(String, Int)]("MyTable", 'a, 'b)
val result = table.select(
giveMeCaseClass().get("my"),
giveMeCaseClass().get("clazz"),
giveMeCaseClass().flatten())
val expected = unaryNode(
"DataSetCalc",
batchTableNode(0),
term("select",
"giveMeCaseClass$().my AS _c0",
"giveMeCaseClass$().clazz AS _c1",
"giveMeCaseClass$().my AS _c2",
"giveMeCaseClass$().clazz AS _c3"
)
)
util.verifyTable(result, expected)
}
// ----------------------------------------------------------------------------------------------
// Tests for all the situations when we can do fields projection. Like selecting few fields
// from a large field count source.
// ----------------------------------------------------------------------------------------------
@Test
def testSimpleSelect(): Unit = {
val util = batchTestUtil()
val sourceTable = util.addTable[(Int, Long, String, Double)]("MyTable", 'a, 'b, 'c, 'd)
val resultTable = sourceTable.select('a, 'b)
val expected = unaryNode(
"DataSetCalc",
batchTableNode(0),
term("select", "a", "b")
)
util.verifyTable(resultTable, expected)
}
@Test
def testSelectAllFields(): Unit = {
val util = batchTestUtil()
val sourceTable = util.addTable[(Int, Long, String, Double)]("MyTable", 'a, 'b, 'c, 'd)
val resultTable1 = sourceTable.select('*)
val resultTable2 = sourceTable.select('a, 'b, 'c, 'd)
val expected = batchTableNode(0)
util.verifyTable(resultTable1, expected)
util.verifyTable(resultTable2, expected)
}
@Test
def testSelectAggregation(): Unit = {
val util = batchTestUtil()
val sourceTable = util.addTable[(Int, Long, String, Double)]("MyTable", 'a, 'b, 'c, 'd)
val resultTable = sourceTable.select('a.sum, 'b.max)
val expected = unaryNode(
"DataSetAggregate",
unaryNode(
"DataSetCalc",
batchTableNode(0),
term("select", "a", "b")
),
term("select", "SUM(a) AS TMP_0", "MAX(b) AS TMP_1")
)
util.verifyTable(resultTable, expected)
}
@Test
def testSelectFunction(): Unit = {
val util = batchTestUtil()
val sourceTable = util.addTable[(Int, Long, String, Double)]("MyTable", 'a, 'b, 'c, 'd)
util.tableEnv.registerFunction("hashCode", MyHashCode)
val resultTable = sourceTable.select("hashCode(c), b")
val expected = unaryNode(
"DataSetCalc",
batchTableNode(0),
term("select", "MyHashCode$(c) AS _c0", "b")
)
util.verifyTable(resultTable, expected)
}
@Test
def testSelectFromGroupedTable(): Unit = {
val util = batchTestUtil()
val sourceTable = util.addTable[(Int, Long, String, Double)]("MyTable", 'a, 'b, 'c, 'd)
val resultTable = sourceTable.groupBy('a, 'c).select('a)
val expected = unaryNode(
"DataSetCalc",
unaryNode(
"DataSetDistinct",
unaryNode(
"DataSetCalc",
batchTableNode(0),
term("select", "a", "c")
),
term("distinct", "a", "c")
),
term("select", "a")
)
util.verifyTable(resultTable, expected)
}
@Test
def testSelectAllFieldsFromGroupedTable(): Unit = {
val util = batchTestUtil()
val sourceTable = util.addTable[(Int, Long, String, Double)]("MyTable", 'a, 'b, 'c, 'd)
val resultTable = sourceTable.groupBy('a, 'c).select('a, 'c)
val expected = unaryNode(
"DataSetDistinct",
unaryNode(
"DataSetCalc",
batchTableNode(0),
term("select", "a", "c")
),
term("distinct", "a", "c")
)
util.verifyTable(resultTable, expected)
}
@Test
def testSelectAggregationFromGroupedTable(): Unit = {
val util = batchTestUtil()
val sourceTable = util.addTable[(Int, Long, String, Double)]("MyTable", 'a, 'b, 'c, 'd)
val resultTable = sourceTable.groupBy('c).select('a.sum)
val expected =
unaryNode(
"DataSetCalc",
unaryNode(
"DataSetAggregate",
unaryNode(
"DataSetCalc",
batchTableNode(0),
term("select", "a", "c")
),
term("groupBy", "c"),
term("select", "c", "SUM(a) AS TMP_0")
),
term("select", "TMP_0")
)
util.verifyTable(resultTable, expected)
}
@Test
def testSelectFromGroupedTableWithNonTrivialKey(): Unit = {
val util = batchTestUtil()
val sourceTable = util.addTable[(Int, Long, String, Double)]("MyTable", 'a, 'b, 'c, 'd)
val resultTable = sourceTable.groupBy('c.upperCase() as 'k).select('a.sum)
val expected =
unaryNode(
"DataSetCalc",
unaryNode(
"DataSetAggregate",
unaryNode(
"DataSetCalc",
batchTableNode(0),
// As stated in https://issues.apache.org/jira/browse/CALCITE-1584
// Calcite planner doesn't promise to retain field names.
term("select", "a", "c", "UPPER(c) AS $f2")
),
term("groupBy", "$f2"),
term("select", "$f2", "SUM(a) AS TMP_0")
),
term("select", "TMP_0")
)
util.verifyTable(resultTable, expected)
}
@Test
def testSelectFromGroupedTableWithFunctionKey(): Unit = {
val util = batchTestUtil()
val sourceTable = util.addTable[(Int, Long, String, Double)]("MyTable", 'a, 'b, 'c, 'd)
val resultTable = sourceTable.groupBy(MyHashCode('c) as 'k).select('a.sum)
val expected =
unaryNode(
"DataSetCalc",
unaryNode(
"DataSetAggregate",
unaryNode(
"DataSetCalc",
batchTableNode(0),
// As stated in https://issues.apache.org/jira/browse/CALCITE-1584
// Calcite planner doesn't promise to retain field names.
term("select", "a", "c", "MyHashCode$(c) AS $f2")
),
term("groupBy", "$f2"),
term("select", "$f2", "SUM(a) AS TMP_0")
),
term("select", "TMP_0")
)
util.verifyTable(resultTable, expected)
}
@Test
def testSelectFromAggregatedPojoTable(): Unit = {
val util = batchTestUtil()
val sourceTable = util.addTable[WC]("MyTable", 'word, 'frequency)
val resultTable = sourceTable
.groupBy('word)
.select('word, 'frequency.sum as 'frequency)
.filter('frequency === 2)
val expected =
unaryNode(
"DataSetCalc",
unaryNode(
"DataSetAggregate",
batchTableNode(0),
term("groupBy", "word"),
term("select", "word", "SUM(frequency) AS TMP_0")
),
term("select", "word, TMP_0 AS frequency"),
term("where", "=(TMP_0, 2)")
)
util.verifyTable(resultTable, expected)
}
@Test
def testMultiFilter(): Unit = {
val util = batchTestUtil()
val sourceTable = util.addTable[(Int, Long, String, Double)]("MyTable", 'a, 'b, 'c, 'd)
val resultTable = sourceTable.select('a, 'b)
.filter('a > 0)
.filter('b < 2)
.filter(('a % 2) === 1)
val expected = unaryNode(
"DataSetCalc",
batchTableNode(0),
term("select", "a", "b"),
term("where", "AND(AND(>(a, 0), <(b, 2)), =(MOD(a, 2), 1))")
)
util.verifyTable(resultTable, expected)
}
}
object CalcTest {
case class TestCaseClass(my: String, clazz: Int)
object giveMeCaseClass extends ScalarFunction {
def eval(): TestCaseClass = {
TestCaseClass("hello", 42)
}
override def getResultType(signature: Array[Class[_]]): TypeInformation[_] = {
createTypeInformation[TestCaseClass]
}
}
object MyHashCode extends ScalarFunction {
def eval(s: String): Int = s.hashCode()
}
case class WC(word: String, frequency: Long)
}
| ueshin/apache-flink | flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/batch/table/CalcTest.scala | Scala | apache-2.0 | 10,196 |
package org.template.fpm
/*
* Copyright KOLIBERO under one or more contributor license agreements.
* KOLIBERO licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.predictionio.controller.PDataSource
import org.apache.predictionio.controller.EmptyEvaluationInfo
import org.apache.predictionio.controller.EmptyActualResult
import org.apache.predictionio.controller.Params
import org.apache.predictionio.data.store.PEventStore
import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._
import org.apache.spark.rdd.RDD
import grizzled.slf4j.Logger
case class DataSourceParams(appName: String) extends Params
class DataSource(val dsp: DataSourceParams)
extends PDataSource[TrainingData, EmptyEvaluationInfo, Query, EmptyActualResult] {
@transient lazy val logger = Logger[this.type]
override
def readTraining(sc: SparkContext): TrainingData = {
println("Gathering data from event server.")
val transactionsRDD: RDD[Array[String]] = PEventStore.find(
appName = dsp.appName,
entityType = Some("transaction"),
startTime = None,
eventNames = Some(List("$set")))(sc).map { event =>
try {
event.properties.get[Array[String]]("items")
} catch {
case e: Exception => {
logger.error(s"Failed to convert event ${event} of. Exception: ${e}.")
throw e
}
}
}
new TrainingData(transactionsRDD)
}
}
class TrainingData(
val transactions: RDD[Array[String]]
) extends Serializable
| goliasz/pio-template-fpm | src/main/scala/DataSource.scala | Scala | apache-2.0 | 2,052 |
/*
Copyright 2012 Twitter, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.twitter.scalding
import com.twitter.scalding._
import java.util.TimeZone
import scala.util.{ Try, Success, Failure }
case class GlobifierOps(implicit tz: TimeZone, dp: DateParser) {
val yearMonthDayHourDurations = List(Years(1), Months(1), Days(1), Hours(1))
val yearMonthDayHourPattern = "/%1$tY/%1$tm/%1$td/%1$tH"
private val hourlyGlobifier = Globifier(yearMonthDayHourPattern)
def normalizeHrDr(a: DateRange) =
DateRange(Hours(1).floorOf(a.start), Hours(1).floorOf(a.end))
def hourlyRtGlobifier(inputDR: DateRange): DateRange =
rtGlobifier(hourlyGlobifier, yearMonthDayHourDurations)(inputDR)
val yearMonthDayDurations = List(Years(1), Months(1), Days(1))
val yearMonthDayPattern = "/%1$tY/%1$tm/%1$td"
private val dailyGlobifier = Globifier(yearMonthDayPattern)
def normalizeDayDr(a: DateRange) =
DateRange(Days(1).floorOf(a.start), Days(1).floorOf(a.end))
def dailyRtGlobifier(inputDR: DateRange): DateRange =
rtGlobifier(dailyGlobifier, yearMonthDayDurations)(inputDR)
def rtGlobifier(globifier: Globifier, durationList: List[Duration])(inputDr: DateRange): DateRange = {
val p = globifier.globify(inputDr)
val drList = p.map { pattern =>
val (lists, _, _) = pattern.split("/").tail.foldLeft((List[(Duration, Duration)](), durationList, true)) {
case ((durationLists, mappings, shouldContinue), current) =>
val curMapping = mappings.head
if (shouldContinue) {
val tryDuration: Try[Duration] = Try(current.toInt).map { indx =>
curMapping match {
case t if mappings.tail == Nil => t
case _ => Millisecs(0)
}
}
val (duration, doContinue) = tryDuration match {
case Success(d) => (d, true)
case Failure(e) =>
val dur: Duration = curMapping match {
case Years(_) => sys.error("Current is " + current + ", parsed as all years?")
case Months(_) => Years(1)
case Days(_) => Months(1)
case Hours(_) => Days(1)
}
(dur, false)
}
val base: Duration = Try(current.toInt).map { indx =>
curMapping match {
case Years(_) => Years(indx - 1970)
case Months(_) => Months(indx - 1) // months and days are 1 offsets not 0
case Days(_) => Days(indx - 1)
case Hours(_) => Hours(indx)
}
}.getOrElse(Hours(0))
(durationLists :+ (base, duration), mappings.tail, doContinue)
} else {
(durationLists, mappings.tail, false)
}
}
val baseDate = lists.foldLeft(RichDate("1970-01-01T00")) {
case (curDate, (base, _)) =>
base.addTo(curDate)
}
val endDate = lists.foldLeft(baseDate) {
case (curDate, (_, dur)) =>
dur.addTo(curDate)
}
DateRange(baseDate, endDate - Millisecs(1))
}.sortBy(_.start)
def combineDR(existing: DateRange, next: DateRange): DateRange = {
require(existing.end == next.start - Millisecs(1), "Not contigious range: \n" + existing + "\n" + next + "...From:\n" + p.mkString(",\n"))
DateRange(existing.start, next.end)
}
drList.reduceLeft(combineDR)
}
}
| oeddyo/scalding | scalding-date/src/test/scala/com/twitter/scalding/GlobifierOps.scala | Scala | apache-2.0 | 3,924 |
/* Copyright 2010 SpendChart.no
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package no.spendchart.banksync
import scala.swing._
import scala.swing.event.{ WindowClosing, MouseClicked, ButtonClicked, MousePressed, MouseEntered, MouseExited }
import javax.swing.{ UIManager, WindowConstants, JLabel }
import java.awt.{ SystemTray, MenuItem, TrayIcon, PopupMenu, Toolkit, Font, Color, Cursor }
import java.awt.event.{ ActionListener, ActionEvent, MouseListener, MouseEvent }
import java.net.URL
import scala.actors.Actor._
import actors.Actor
import org.scala_tools.time.Imports._
import no.spendchart.banksync.ui.{ ErrorMessage, OkMessage, Heading }
package msg {
case object Shutdown
case class StartSync(bankSyncPlugin: SkandiabankenSync, sync: Seq[(BankAccount, String)])
case class Sync(account: BankAccount, period: String)
case class Wait(msg: String)
case class Notice(msg: String)
case class MainMenu(msg: Option[Label] = None)
case class Login(msg: List[String] = Nil)
case class SkLogin(s: SkandiabankenSync, msg: List[String] = Nil)
case class InputSMSCode(s: SkandiabankenSync, msg: Option[String] = None)
case class ChoseAccounts(s: SkandiabankenSync, accounts: Seq[BankAccount], sync: Seq[(BankAccount, String)])
case class ChosePeriods(s: SkandiabankenSync, accounts: Seq[BankAccount], sync: Seq[(BankAccount, String)])
case class SpendChartLogin(username: String, password: Array[Char])
}
package object implicits {
implicit def tupleDimension(xy: Tuple2[Int, Int]) = new java.awt.Dimension(xy._1, xy._2)
implicit def tuplePoint(xy: Tuple2[Int, Int]) = new java.awt.Point(xy._1, xy._2)
}
package object util {
def getClassPathResource(filename: String) = Thread.currentThread.getContextClassLoader().getResource(filename)
def getImage(filename: String) = Swing.Icon(getClassPathResource(filename)).getImage
}
object RunMode extends Enumeration {
type RunMode = Value
val Test, Production, TestServer, TestBank = Value
}
object Banksync extends Application with Actor {
import implicits._
import util._
val applicationName = "SpendChart.no Banksync"
val runMode = Option(System.getProperty("runMode")).map(x => RunMode.valueOf(x)).flatMap(x => x).getOrElse(RunMode.Production)
val tray = false
val showAtStartup = true
val s = runMode match {
case RunMode.Production | RunMode.TestServer => new SkandiabankenSyncImpl
case RunMode.Test | RunMode.TestBank => new SkandiabankenSyncTest
}
def act = {
loop {
react {
case msg.Sync(account, period) => setView(ui.Wait("Synkroniserer " + period + " for " + account.number))
case msg.Wait(msg) => setView(ui.Wait(msg))
case msg.Notice(msg) => setView(ui.Wait(msg))
case msg.MainMenu(msg) => setView(ui.MainMenu(msg))
case msg.Login(msg) => setView(ui.SpendChartLogin(msg))
case msg.SkLogin(s, messages) => setView(skandiabanken.ui.Login(s, messages))
case msg.InputSMSCode(s, message) => setView(skandiabanken.ui.VerifySms(s, message))
case msg.ChoseAccounts(s, newOnes, oldOnes) => setView(skandiabanken.ui.ChoseAccounts(s, newOnes, oldOnes))
case msg.ChosePeriods(s, newOnes, oldOnes) => setView(skandiabanken.ui.ChosePeriods(s, newOnes, oldOnes))
case msg.Shutdown => exit()
}
}
}
this.start
UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName())
val frame = new Frame {
val (width, height) = (600, 200)
size = (width, height)
preferredSize = size
title = applicationName
iconImage = getImage("coins.gif")
reactions += {
case WindowClosing(e) =>
this.visible = false
if (!tray) {
SyncActor ! msg.Shutdown
Banksync ! msg.Shutdown
System.exit(0)
}
}
peer.setDefaultCloseOperation(WindowConstants.DO_NOTHING_ON_CLOSE)
val gc = this.peer.getGraphicsConfiguration
val screensize = Toolkit.getDefaultToolkit.getScreenSize
val insets = Toolkit.getDefaultToolkit.getScreenInsets(gc)
putAtBottomRight()
def putAtBottomRight() {
val x = screensize.width - width - insets.right
val y = screensize.height - height - insets.bottom
location = (x, y)
}
}
def setView(panel: Panel) {
frame.contents = panel
panel match {
case f: ExtendedPanel =>
f.onFocus()
f.defaultButton.map(frame.defaultButton=_)
case _ =>
}
}
setView(ui.SpendChartLogin())
if (tray) {
val tray = SystemTray.getSystemTray()
val popup = new PopupMenu()
val syncItem = new MenuItem("Synk")
val exitItem = new MenuItem("Avslutt")
val trayIcon = new TrayIcon(getImage("coins.gif"))
trayIcon.setImageAutoSize(true)
trayIcon.setToolTip(applicationName)
popup.add(syncItem)
popup.add(exitItem)
val trayIconMouseListener = new MouseListener() {
def mouseEntered(e: MouseEvent) {}
def mouseExited(e: MouseEvent) {}
def mouseReleased(e: MouseEvent) {}
def mousePressed(e: MouseEvent) {}
def mouseClicked(e: MouseEvent) {
frame.visible = true
}
}
trayIcon.addMouseListener(trayIconMouseListener)
trayIcon.setPopupMenu(popup)
tray.add(trayIcon)
exitItem.addActionListener(new ActionListener() {
def actionPerformed(e: ActionEvent) {
tray.remove(trayIcon)
SyncActor ! msg.Shutdown
Banksync ! msg.Shutdown
System.exit(0)
}
})
syncItem.addActionListener(new ActionListener() {
def actionPerformed(e: ActionEvent) {
frame.visible = true
}
})
} else {
frame.centerOnScreen
}
if (showAtStartup) {
frame.visible = true
}
SyncActor.start
}
case class CreateAccount(accounts: Seq[BankAccount], syncFrom: Option[String] = None)
object SyncActor extends Actor {
import no.spendchart.banksync.api.{ SpendChartApi, CheckAccountsReturn }
import net.liftweb.common._
val api = new SpendChartApi(Banksync.runMode)
var done = false
def act = {
while (!done) {
receive {
case msg.SpendChartLogin(user, password) =>
api.login(user, ("" /: password)(_ + _)) match {
case Failure("Failed", _, _) =>
Banksync ! msg.Login(List("Feil brukernavn eller passord. Vennligst prøv igjen."))
case Failure("Captcha required", _, _) =>
Banksync ! msg.Login(List("For mange mislykkede innloggingsforsøk. Du må logge inn på via https://www.spendchart.no."))
case Failure("Account Blocked", _, _) =>
Banksync ! msg.Login(List("For mange mislykkede innloggingsforsøk. Din konto har blitt blokkert i en time."))
case _ =>
Banksync ! msg.MainMenu()
}
case CreateAccount(accounts, None) =>
accounts.foreach { account => api.createAccount("1", account.number, account.name, false, None) }
case CreateAccount(accounts, Some(syncFrom)) =>
accounts.foreach { account => api.createAccount("1", account.number, account.name, true, Some(syncFrom)) }
case Login(s, username, bankPassword) =>
s.initLogin(username, bankPassword) match {
case step1.Success =>
Banksync ! msg.InputSMSCode(s)
case step1.WrongPassword =>
Banksync ! msg.SkLogin(s, List("Feil passord eller personnummer."))
case step1.Errors(lst) =>
Banksync ! msg.SkLogin(s, lst)
case step1.Failure(message) =>
Banksync ! msg.SkLogin(s, List(message))
case step1.Unexpected(message) =>
Banksync ! msg.MainMenu(Some(ErrorMessage("En uventet situasjon har oppstått. Vennligst prøv igjen.")))
}
case SmsCode(s, code) =>
s.completeLogin(code) match {
case step2.LoginCompleted =>
s.getAccounts() match {
case Some(accounts) =>
api.checkAccounts(1, accounts.map(_.number.toLong)) match {
case Full(CheckAccountsReturn(Nil, sync, noSync)) if sync.isEmpty =>
Banksync ! msg.MainMenu(Some(ErrorMessage("Ingen kontoer å synkronisere, du kan endre instillinger på SpendChart.no")))
case Full(CheckAccountsReturn(Nil, sync, noSync)) =>
val syncs = for (acc: BankAccount <- accounts; accPer <- sync.get(acc.number)) yield (acc, accPer)
this ! msg.StartSync(s, syncs)
case Full(CheckAccountsReturn(newAcc, sync, noSync)) =>
val syncs = for (acc: BankAccount <- accounts; accPer <- sync.get(acc.number)) yield (acc, accPer)
val newOnes = accounts.filter(acc => newAcc.contains(acc.number))
Banksync ! msg.ChoseAccounts(s, newOnes, syncs)
case x =>
println("Got an unexpected state while fetching accounts from SpendChart server: " + x)
Banksync ! msg.MainMenu(Some(ErrorMessage("En uventet situasjon har oppstått. Vennligst prøv igjen.")))
}
case x =>
println("Got an unexpected state while fetching accounts from SpendChart server: " + x)
Banksync ! ui.MainMenu(Some(ErrorMessage("En uventet situasjon har oppstått. Vennligst prøv igjen.")))
}
case step2.TimeOut =>
Banksync ! msg.MainMenu(Some(ErrorMessage("Innloggingen tok for lang tid. Vennligst prøv igjen senere.")))
case step2.WrongCodeFromSMS =>
Banksync ! msg.InputSMSCode(s, Some("Oppgitt kode var ikke korrekt, vennligst prøv igjen."))
case step2.Unexpected(message: String) =>
Banksync ! msg.MainMenu(Some(ErrorMessage("En uventet situasjon har oppstått. Vennligst prøv igjen.")))
}
case msg.StartSync(s, toSync) =>
toSync.foreach {
case (account, period) =>
println("Syncing " + account.number + " from period: " + period)
val periods = s.getPeriods(account).reverse
(0 to s.getPeriodId(account, period)).foreach(p => {
Banksync ! msg.Sync(account, periods(p))
s.fetchReport(p, account) match {
case Some(report) =>
println(report.filename + " downloaded")
api.upload("1", account.number, periods(p), report.filename, report.inputStream, report.contentLength)
case None =>
println("None")
}
})
case x => println("Wow got " + x + " in toSync.foreach")
}
Banksync ! msg.MainMenu(Some(OkMessage("Synkronisering gjennomført.")))
case msg.Shutdown => exit()
case sparebank1.StartSparebank1(fnr) => (new sparebank1.Sparebank1).login(fnr)
case nordea.StartNordea(fnr) => (new nordea.Nordea).login(fnr)
}
}
}
}
| spendchart/banksync | src/main/scala/no/spendchart/banksync/Banksync.scala | Scala | apache-2.0 | 11,579 |
/*
* (c) Copyright 2016 Hewlett Packard Enterprise Development LP
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cogx.cogmath.algebra.real
import cogx.cogmath.geometry.Shape
import cogx.utilities.Random
import PoorFloat._
/** A matrix.
* <p>
* In the following descriptions, "v" represents a vector, "s" a scalar, "m"
* a matrix, "b" a boolean, "i" and integer, "d" a double, "r" a range of
* integers (e.g. "1 to 5" or "3 until 9").
* <p>
* The update operations, such as "+=", update the elements of the matrix in
* place.
*
* {{{
* Constructors:
* Matrix(i, i)
* Matrix(i, i, Array[d])
* copy
*
* Dimensions:
* rows => i (number of rows in matrix)
* columns => i (number of columns in matrix)
* size => i (total # elements in matrix)
*
* Linear element access:
* apply(i) => s (access element linearly)
* update(i, s) (write element i with value s)
*
* 2D element access:
* apply(i, i) => s (access element by 2D coordinates)
* update(i, i, s) (write element (i, i) with value s)
*
* Submatrix multi-element reads:
* row(i) => m (extract a copy of a row)
* column(i) => m (extract a copy of a column)
* submatrix(r, r) => m (extract a subregion of a matrix)
* reshape(i, i) => m (reshape the matrix)
*
* Matrix operations:
* map(f: d => d) => m (map a Matrix by applying f to each element)
* mapSelf(f: d => d) (use f to map each element in place)
* reduce((d, d)=> d) => d (reduce a Matrix to a scalar)
* randomize (initialize to random values in [0, 1))
*
* Matrix / Scalar operations:
* m + s => m (add scalar to each element to produce a new matrix)
* m += s (add scalar to all elements of matrix in-place)
* m - s => m
* m -= s
* m * s => m
* m *= s
* m / s => m
* m /= s
* -m => m (map each element to new matrix my multiplying by -1)
* m := s (assign scalar to all elements of the matrix)
*
* Matrix / Matrix operations:
* m + m => m
* m += m
* m - m => m
* m -= m
* m :* m => m (element-wise multiplication)
* m :*= m (element-wise multiplication in-place)
* m :/ m => m (element-wise right division)
* m :/= n (element-wise right division in-place)
* m :\ m => m (element-wise left division)
* m :\= m (element-wise left division in-place)
* m1 === m2 => b (m1 and m2 have same shape, identical elements)
* m1 !=== m2 => b
* m1 ~== m2
* m1 !~== m2
* m1 := m2 (assign elements of m2 to m1)
*
* Matrix multiplication:
* m * m => m (matrix multiplication)
*
* Matrix decompositions:
* cholesky => m (Cholesky decomposition)
* eigen => (m, m) (eigenvector matrix, diag eigenvalue matrix)
* lu => (m, m) (L matrix, U matrix)
* qr => (m, m) (Q matrix, R matrix)
* svd => (m, m, m) (U matrix, S matrix, V matrix)
*
* Miscellaneous cogx.utilities:
* abs => m (absolute value of each element)
* sgn => m (sign of each element: -1, 0, 1)
* rectify => m (clip negative elements to 0)
* normalizeRows (normalize rows using L2 norm)
* normalizeColumns (normalize columns using L2 norm)
* print (print a matrix for debugging)
*
* }}}
*
* @param rows The number of rows in the matrix.
* @param columns The number of columns in the matrix.
* @param data Storage for matrix elements; length must be equal to
* rows * columns.
*
* @author Greg Snider
*/
@SerialVersionUID(-4510500996822152778L)
class Matrix(val rows: Int, val columns: Int, private[cogx] val data: Array[Float])
extends Tensor
with Serializable
{
val size = rows * columns
/** Shape of the matrix. */
def shape = Shape(rows, columns)
/** Tensor accessor. */
def read(index: Int) = data(index)
/** Constructor that allocates memory for Matrix (most common). */
def this(rows: Int, columns: Int) =
this(rows, columns, new Array[Float](rows * columns))
/** Create a Matrix from a 2-D tensor. */
def this(tensor: Tensor) = this(tensor.shape(0), tensor.shape(1), {
require(tensor.shape.dimensions == 2)
require(tensor.length == tensor.shape.points)
val newData = new Array[Float](tensor.shape.points)
for (i <- 0 until tensor.length)
newData(i) = tensor.read(i)
newData
})
/** Make a copy of a Matrix. */
def copy: Matrix = {
val t = new Matrix(rows, columns, new Array[Float](size))
for (i <- 0 until size)
t(i) = this(i)
t
}
/** Linear element access */
def apply(index: Int) = data(index)
/** Modify an element given its linear position. */
def update(index: Int, value: Float) {data(index) = value}
/** 2D coordinates element access */
def apply(row: Int, column: Int) = data(row * columns + column)
/** Modify an element given its 2D coordinates. */
def update(row: Int, column: Int, value: Float) {
data(row * columns + column) = value
}
/** Synonym for submatrix.
*
* @param rows Range of rows to extract.
* @param columns Range of columns to extract.
* @return The specified submatrix.
*/
def apply(rows: Range, columns: Range): Matrix = {
submatrix(rows, columns)
}
/** Update a subset of a matrix.
*
* @param rowRange Range of rows to update.
* @param columnRange Range of columns to update.
* @param values The new values to be written in the submatrix specified
* by `rows` and `columns`. This must have the exact same shape
* as (rows, columns), or it must be a 1 x 1 matrix, in which case
* it is treated like a scalar.
*/
def update(rowRange: Range, columnRange: Range, values: Matrix) {
if (values.rows == 1 && values.columns == 1) {
val scalar = values(0, 0)
// We treat the values matrix like a scalar
for (row <- rowRange; col <- columnRange)
this(row, col) = scalar
} else {
// Not a scalar
require(rowRange.size == values.rows, columnRange.size == values.columns)
var valueRow = 0
for (row <- rowRange) {
var valueColumn = 0
for (col <- columnRange) {
this(row, col) = values(valueRow, valueColumn)
valueColumn += 1
}
valueRow += 1
}
}
}
/** Write the row "row" with the data in "vector". */
def update(row: Int, vector: Vector) {
require(columns == vector.length)
val offset = row * columns
Array.copy(vector.data, 0, this.data, offset, vector.length)
}
// Submatrix multi-element reads:
def row(rowIndex: Int) = submatrix(rowIndex to rowIndex, 0 until columns)
def column(colIndex: Int) = submatrix(0 until rows, colIndex to colIndex)
def submatrix(rowRange: Range, columnRange: Range): Matrix = {
val sub = new Matrix(rowRange.length, columnRange.length)
for (row <- 0 until sub.rows; col <- 0 until sub.columns)
sub(row, col) = this(row + rowRange.start, col + columnRange.start)
sub
}
def reshape(newRows: Int, newColumns: Int): Matrix = {
require(newRows * newColumns == rows * columns)
new Matrix(newRows, newColumns, data.map(e => e))
}
/** Flatten the rows of this matrix to a vector. */
def toVector = new Vector(data)
/** View the matrix as a flat array of data. */
def asArray = data
// Matrix operations:
def map(f: Float => Float) = new Matrix(rows, columns, data.map(f))
def mapSelf(f: Float => Float) {
(0 until size).foreach(i => data(i) = f(data(i)))
}
def randomize = {
for (i <- 0 until size)
data(i) = Matrix.rand.nextFloat
this
}
def transpose: Matrix = {
val t = new Matrix(columns, rows)
for (row <- 0 until rows; col <- 0 until columns)
t(col, row) = this(row, col)
t
}
// Matrix / Scalar operations:
def +(scalar: Float) = map(_ + scalar)
def +=(scalar: Float) {mapSelf(_ + scalar)}
def -(scalar: Float) = map(_ - scalar)
def -=(scalar: Float) {mapSelf(_ - scalar)}
def *(scalar: Float) = map(_ * scalar)
def *=(scalar: Float) {mapSelf(_ * scalar)}
def /(scalar: Float) = map(_ / scalar)
def /=(scalar: Float) {mapSelf(_ / scalar)}
def ^(scalar: Float) = map( math.pow(_,scalar).toFloat )
def ^=(scalar: Float) {mapSelf( math.pow(_,scalar).toFloat )}
def unary_- = map(_ * -1)
def reciprocal = map(1f / _)
def :=(scalar: Float) {mapSelf(e => scalar)}
// Boolean comparisons
def >(v: Float) = map(x => if (x > v) 1f else 0f)
def >=(v: Float) = map(x => if (x >= v) 1f else 0f)
def <(v: Float) = map(x => if (x < v) 1f else 0f)
def <=(v: Float) = map(x => if (x <= v) 1f else 0f)
// Matrix / Matrix operations:
def +(that: Matrix) = this.combine(that, _ + _)
def +=(that: Matrix) {this.combineSelf(that, _ + _)}
def -(that: Matrix) = this.combine(that, _ - _)
def -=(that: Matrix) {this.combineSelf(that, _ - _)}
def :*(that: Matrix) = this.combine(that, _ * _)
def :*=(that: Matrix) {this.combineSelf(that, _ * _)}
def :/(that: Matrix) = this.combine(that, _ / _)
def :/=(that: Matrix) {this.combineSelf(that, _ / _)}
def :\(that: Matrix) = this.combine(that, (e1, e2) => e2 / e1)
def :\=(that: Matrix) {this.combineSelf(that, (e1, e2) => e2 / e1)}
def :^(that: Matrix) = this.combine(that, math.pow(_,_).toFloat )
def :^=(that: Matrix) {this.combineSelf(that, math.pow(_,_).toFloat )}
def max(that: Matrix) = this.combine(that, _ max _)
def min(that: Matrix) = this.combine(that, _ min _)
def ===(that: Matrix) = compare(that, _ == _)
def !===(that: Matrix) = !(this === that)
def ~==(that: Matrix) = compare(that, (x, y) => x ~== y)
def !~==(that: Matrix) = !(this ~== that)
def :=(that: Matrix) {this.combineSelf(that, (e1, e2) => e2)}
// Matrix multiplication:
def *(that: Matrix): Matrix = {
require(this.columns == that.rows,
"Matrix * incompatibility: " + this.shape + " * " + that.shape)
val product = new Matrix(this.rows, that.columns)
var thisIndex = 0
var thatIndex = 0
val thatStride = that.columns
for (r <- 0 until product.rows) {
for (c <- 0 until product.columns) {
thisIndex = r * this.columns
thatIndex = c
// Internally use Double to help preserve precision of result for large matrices
var dotProduct = 0.0
for (i <- 0 until this.columns) {
dotProduct += this(thisIndex).toDouble * that(thatIndex)
thisIndex += 1
thatIndex += thatStride
}
product(r, c) = dotProduct.toFloat
}
}
product
}
/** Dot product of "this" and "that" */
def dot(that: Matrix): Float = {
(data zip that.data).map(v => v._1 * v._2).reduceLeft(_ + _)
}
/** Reduce the matrix to a scalar. */
def reduce(f: (Float, Float) => Float): Float = {
data.reduceLeft(f)
}
/** Matrix multiplication. */
def *(that: Vector): Vector = {
require(this.columns == that.length)
val product = new Vector(this.rows)
var thisIndex = 0
for (r <- 0 until product.length) {
thisIndex = r * this.columns
var dotProduct = 0.0f
for (i <- 0 until this.columns) {
dotProduct += this(thisIndex) * that(i)
thisIndex += 1
}
product(r) = dotProduct
}
product
}
/** Get the data in the tensor, flattened to a linear array. */
protected[cogx] def getData = data
// Matrix decompositions:
def cholesky: Matrix =
Matrix(new Jama.CholeskyDecomposition(Matrix.toJamaMatrix(this)).getL)
def eigen: (Matrix, Matrix) = {
val decomp = new Jama.EigenvalueDecomposition(Matrix.toJamaMatrix(this))
(Matrix(decomp.getV), Matrix(decomp.getD))
}
def lu: (Matrix, Matrix) = {
val decomp = new Jama.LUDecomposition(Matrix.toJamaMatrix(this))
(Matrix(decomp.getL), Matrix(decomp.getU))
}
def qr: (Matrix, Matrix) = {
val decomp = new Jama.QRDecomposition(Matrix.toJamaMatrix(this))
(Matrix(decomp.getQ), Matrix(decomp.getR))
}
def svd: (Matrix, Matrix, Matrix) = {
val decomp = new Jama.SingularValueDecomposition(Matrix.toJamaMatrix(this))
val u = decomp.getU
val s = decomp.getS
val v = decomp.getV
(Matrix(u), Matrix(s), Matrix(v))
}
/** Attempt to separate this matrix into an outer product of two vectors.
* Relies on a tolerance, here taken to be the machine epsilon.
*
* @return Some((verticalVector, horizontalVector)) if separable, otherwise
* None.
*/
def separate: Option[(Vector, Vector)] = {
require(rows == columns)
val (u, s, v) = svd
//val tolerance = 1.0e-20 * rows * columns
val tolerance = epsilon
val nonzeroSingularValues =
s.map(math.abs(_)).map(e => if (e > tolerance) 1f else 0f).reduce(_ + _).toInt
if (nonzeroSingularValues == 1) {
val scale = scala.math.sqrt(s(0, 0)).toFloat
val verticalVector = u.transpose.row(0).toVector * scale
val horizontalVector = v.transpose.row(0).toVector * scale
Some((verticalVector, horizontalVector))
} else
None
}
def invert: Matrix = {
Matrix(Matrix.toJamaMatrix(this).inverse)
}
def forwardGradient: (Matrix, Matrix) = {
val v0 = new Matrix(rows, columns)
val v1 = new Matrix(rows, columns)
for (row <- 0 until rows - 1) {
for (col <- 0 until columns - 1) {
v1(row, col) += this(row, col + 1) - this(row, col)
v0(row, col) += this(row + 1, col) - this(row, col)
}
}
(v0, v1)
}
def backwardDivergence(v1: Matrix): Matrix = {
val v0 = this
require(v1.shape == v0.shape)
val m = new Matrix(rows, columns)
for (row <- 0 until rows - 1) {
for (col <- 0 until columns - 1) {
// Differs from GEE book since they use (columns, rows), so must
// flip role of v0 and v1
m(row, col + 1) += v1(row, col)
m(row + 1, col) += v0(row, col)
m(row, col) -= v0(row, col) + v1(row, col)
}
}
-m
}
// Miscellaneous cogx.utilities:
def abs = map(_.abs)
def sgn = map(e => {if (e > 0) 1.0f else if (e < 0) -1.0f else 0.0f})
def rectify = map(_ max 0)
def normalizeRows {
for (r <- 0 until rows) {
var sumOfSq = 0.0f
for (c <- 0 until columns)
sumOfSq += this(r, c) * this(r, c)
val l2Norm = math.sqrt(sumOfSq).toFloat
for (c <- 0 until columns)
this(r, c) /= l2Norm
}
}
def normalizeColumns {
for (c <- 0 until columns) {
var sumOfSq = 0.0f
for (r <- 0 until rows)
sumOfSq += this(r, c) * this(r, c)
val l2Norm = math.sqrt(sumOfSq).toFloat
for (r <- 0 until rows)
this(r, c) /= l2Norm
}
}
def print {
for (row <- 0 until rows) {
for (col <- 0 until columns)
printf(" %9.4f", this(row, col))
println
}
println
}
def compactString: String = {
val s = new StringBuilder("Matrix[%d, %d](".format(rows, columns))
for (row <- 0 until rows) {
s ++= "("
for (col <- 0 until columns){
if (col < columns-1)
s ++= "%.3f, ".format(this(row, col))
else
s ++= "%.3f".format(this(row, col))
}
if (row < rows-1)
s ++= "), "
else
s ++= ")"
}
s ++= ")"
s.toString
}
/** Combine "this" and "that" by using "f" on corresponding elements. */
private def combine(that: Matrix, f:(Float, Float) => Float): Matrix = {
require(this.shape == that.shape)
val result = new Array[Float](this.size)
for (i <- 0 until data.length)
result(i) = f(this(i), that(i))
new Matrix(rows, columns, result)
}
/** Compare "this" and "that" using "f" on corresponding elements. "This" and
* "that" must be the same shape and corresponding elements must all satisfy
* "f" to return true, otherwise this returns false.
*/
private def compare(that: Matrix, f: (Float, Float) => Boolean): Boolean = {
if (!(this.shape == that.shape))
return false
for (i <- 0 until data.length)
if (!f(this(i), that(i)))
return false
true
}
/** Combine "that" into "this" by using "f" on corresponding elements. */
private def combineSelf(that: Matrix, f:(Float, Float) => Float) {
require(this.shape == that.shape)
for (i <- 0 until data.length)
this(i) = f(this(i), that(i))
}
/** Get the size of the matrix as a string. */
def sizeString = "(" + rows + " x " + columns + ")"
/** Shift a matrix with zero fill on the edges.
*
* @param shiftRows Number of rows to shift `this` down.
* @param shiftColumns Number of columns to shift `this` to the right.
* @return Shifted matrix, zero-filling on edges.
*/
def shift(shiftRows: Int, shiftColumns: Int): Matrix = {
val shifted = new Matrix(this.rows, this.columns)
for (row <- 0 until this.rows; col <- 0 until this.columns) {
val sRow = row - shiftRows
val sCol = col - shiftColumns
val pixel =
if (sRow >= 0 && sRow < this.rows && sCol >= 0 && sCol < this.columns)
this(sRow, sCol)
else
0f
shifted(row, col) = pixel
}
shifted
}
/** Shifts the elements of a matrix cyclicly as though it were a torus, wrapping
* around the left side to the right side, and the top to the bottom. The
* rotation is specified by the tuple("deltaRows", "deltaColumns"). For
* example, the tuple value (2, 3) would cause the element at location (0, 0)
* to be moved to location (2, 3). In a 5 x 5 matrix, the same tuple would
* cause the element at (4, 4) to be moved to (1, 2), wrapping around the
* torus. Returns the cyclicly shifted matrix.
*/
def shiftCyclic(deltaRows: Int, deltaColumns: Int): Matrix = {
val result = new Matrix(rows, columns)
var rowShift = deltaRows
while (rowShift < 0)
rowShift += rows
var colShift = deltaColumns
while (colShift < 0)
colShift += columns
for (row <- 0 until rows; col <- 0 until columns)
result((row + rowShift) % rows, (col + colShift) % columns) =
this(row, col)
result
}
/** Shift "this" matrix down by "shift" rows and right by "shift" columns,
* expand to a "bigRows" x "bigColumns" complex matrix, padding with zeros
* everywhere an element is not defined by "this".
*/
def shiftAndExpand(shift: Int, bigRows: Int, bigColumns: Int): Matrix =
{
require(bigRows + shift >= rows && bigColumns + shift >= columns)
val big = new Matrix(bigRows, bigColumns)
for (row <- 0 until rows) {
val fromIndex = row * columns
val toIndex = (row + shift) * bigColumns + shift
Array.copy(data, fromIndex, big.data, toIndex, columns)
}
big
}
/** Expand the matrix, optionally extending the border into the expanded
* region. This operation is a key part of the FFT. The new matrix is of
* size "bigRows" x "bigColumns" and element (0, 0) of this is anchored at
* (0, 0) in the larger matrix. If "borderFill" is true, then the four
* edges of the matrix are extended evenly in all directions, as though
* the bigger matrix were actually a torus with opposite edges touching.
*/
def expand(bigRows: Int, bigColumns: Int, borderFill: Boolean): Matrix =
{
require(bigRows >= rows && bigColumns >= columns)
val big = new Matrix(bigRows, bigColumns)
// Copy "this" into big
for (row <- 0 until rows; col <- 0 until columns)
big(row, col) = apply(row, col)
// Now copy the border along the edges if requested
if (borderFill) {
val nearBottomApronSize = (bigRows - rows) / 2
val farBottomApronSize = (bigRows - rows) - nearBottomApronSize
val nearRightApronSize = (bigColumns - columns) / 2
val farRightApronSize = (bigColumns - columns) - nearRightApronSize
// far right edge
for (col <- big.columns - farRightApronSize until big.columns)
for (row <- 0 until rows)
big(row, col) = apply(row, 0)
// near right edge
for (col <- columns until (columns + nearRightApronSize))
for (row <- 0 until rows)
big(row, col) = apply(row, columns - 1)
// far bottom edge, copied from expanded matrix 'big' along entire row
for (row <- big.rows - farBottomApronSize until big.rows)
for (col <- 0 until bigColumns)
big(row, col) = big(0, col)
// near bottom edge, copied from expanded matrix 'big' along entire row
for (row <- rows until rows + nearBottomApronSize)
for (col <- 0 until bigColumns)
big(row, col) = big(rows - 1, col)
}
big
}
/** Trim "this" to a "smallRows" x "smallColumns" matrix. */
def trim(smallRows: Int, smallColumns: Int): Matrix = {
require(smallRows <= rows && smallColumns <= columns)
val small = new Matrix(smallRows, smallColumns)
for (row <- 0 until smallRows; col <- 0 until smallColumns)
small(row, col) = apply(row, col)
small
}
/** Flip the matrix left-to-right and top-to-bottom. This is useful for
* creating correlation matrices that are implemented using convolution.
*/
def flip: Matrix = {
val result = new Matrix(rows, columns)
for (row <- 0 until rows; col <- 0 until columns)
result(rows - row - 1, columns - col - 1) = this(row, col)
result
}
/** Compute the Moore-Penrose pseudo inverse. */
def pseudoInverse: Matrix = {
// There's a bug in the Jama library for SVD where it sometimes fails on
// matrices with fewer rows than columns. In that case we tranpose the
// matrix first, pseudo-invert it, and transpose again. There are no
// plans to fix Jama, so we hack...
val flip = rows < columns
val matrix = if (flip) this.transpose else this
val (u, s, v) = matrix.svd
val tolerance = epsilon * (rows max columns) * s.abs.reduce(_ max _)
// Take the pseudo-inverse of s.
val inverseS = s.transpose
val diagonalSize = inverseS.rows min inverseS.columns
for (d <- 0 until diagonalSize)
if (inverseS(d, d) > tolerance)
inverseS(d, d) = 1f / inverseS(d, d)
val inverse: Matrix = v * inverseS * u.transpose
if (flip)
inverse.transpose
else
inverse
}
/** The distance from 1.0f to the next larger float. */
private val epsilon: Float = math.ulp(1.0f)
/** Find the rank of a matrix.
*
* @return Rank of the matrix.
*/
def rank: Int = {
val (u, s, v) = this.svd
val tolerance = epsilon * (rows max columns) * s.abs.reduce(_ max _)
val rank = s.map(x => if (x > tolerance) 1f else 0f).reduce(_ + _)
rank.toInt
}
/** Convolve this matrix with another. Useful for convolving two filters.
* Works only with square, odd-sized filters.
*
* @param that The matrix to be convolved with this.
* @return The convolution of the filters, larger than either.
*/
def convolve(that: Matrix): Matrix = {
require(this.rows == this.columns, "matrix must be square")
require(that.rows == that.columns, "matrix must be square")
require(this.rows % 2 == 1, "matrix must be of odd size")
require(that.rows % 2 == 1, "matrix must be of odd size")
require(that.rows <= this.rows, "that matrix cannot be larger than this")
// Flip "that" for convolution. "That" is regarded as the filter.
val thatFlipped = new Matrix(that.rows, that.columns)
for (r <- 0 until that.rows; c <- 0 until that.columns)
thatFlipped(r, c) = that(that.rows - r - 1, that.columns - c - 1)
// Expand "this", border filled with zeroes, to make computation easier.
val expandedHalo = thatFlipped.rows - 1
val expandedSize = this.rows + 2 * expandedHalo
val resultHalo = thatFlipped.rows / 2
val resultSize = this.rows + 2 * resultHalo
val expanded = new Matrix(expandedSize, expandedSize)
for (r <- 0 until this.rows; c <- 0 until this.columns)
expanded(r + expandedHalo, c + expandedHalo) = this(r, c)
// Do the convolution.
val result = new Matrix(resultSize, resultSize)
for (r <- 0 until resultSize; c <- 0 until resultSize) {
var sum = 0f
for (thatRow <- 0 until that.rows; thatCol <- 0 until that.columns) {
sum += thatFlipped(thatRow, thatCol) * expanded(r + thatRow, c + thatCol)
}
result(r, c) = sum
}
result
}
}
/**
* Factory methods for Matrix.
*/
object Matrix {
private lazy val rand = new Random
/** Create a Matrix from an array of row data. */
def apply(rowArray: Array[Float]*): Matrix = Matrix.apply(rowArray.toArray)
/** Create a Matrix from a 2D array. */
def apply(rowArray: Array[Array[Float]]): Matrix = {
val rows = rowArray.length
val columns = rowArray(0).length
for (row <- rowArray)
require(row.length == columns)
val data = new Array[Float](columns * rows)
var index = 0
for (row <- 0 until rows) {
for (col <- 0 until columns) {
data(index) = rowArray(row)(col)
index += 1
}
}
new Matrix(rows, columns, data)
}
/** Create a "rows" x "columns" matrix initialized by "f". */
def apply(rows: Int, columns: Int, f: (Int, Int) => Float): Matrix = {
val matrix = new Matrix(rows, columns)
for (row <- 0 until rows; col <- 0 until columns)
matrix(row, col) = f(row, col)
matrix
}
/** Create a square diagonal matrix with specified "diagonal" values. */
def diagonal(vector: Vector): Matrix = {
new Matrix(vector.length, vector.length) {
for (row <- 0 until rows)
this(row, row) = vector(row)
}
}
/** Create a "rows" x "rows" square identity matrix. */
def identity(rows: Int): Matrix = {
new Matrix(rows, rows) {
for (row <- 0 until rows)
this(row, row) = 1f
}
}
/** Create a random "rows" x "columns" matrix. */
def random(rows: Int, columns: Int): Matrix = {
val random = new Random
new Matrix(rows, columns) {
for (row <- 0 until rows; col <- 0 until columns)
this(row, col) = random.nextFloat
}
}
/** Create a Jama matrix containing the same values as "this". */
private[algebra] def toJamaMatrix(m: Matrix): Jama.Matrix = {
val array = new Array[Array[Double]](m.rows)
for (r <- 0 until m.rows)
array(r) = new Array[Double](m.columns)
var index = 0
for (r <- 0 until m.rows; c <- 0 until m.columns) {
array(r)(c) = m(index)
index += 1
}
val jamaMatrix = new Jama.Matrix(array)
require(jamaMatrix.getRowDimension == m.rows)
require(jamaMatrix.getColumnDimension == m.columns)
require(jamaMatrix.getArray.length == m.rows)
require(jamaMatrix.getArray()(0).length == m.columns)
jamaMatrix
}
/** Create a Matrix containing the same values as a Jama matrix. */
private[algebra] def apply(j: Jama.Matrix): Matrix = {
// There appears to be a bug in the Jama library, making the
// getRowDimension and getColumnDimension unreliable. So we look at
// the actual stored arrays to get that information.
//require(j.getArray.length == j.getRowDimension)
//require(j.getArray()(0).length == j.getColumnDimension)
val jamaArray = j.getArray
val rows = jamaArray.length
val columns = jamaArray(0).length
val array = new Array[Float](rows * columns)
var index = 0
for (r <- 0 until rows; c <- 0 until columns) {
array(index) = jamaArray(r)(c).toFloat
index += 1
}
new Matrix(rows, columns, array)
}
}
| hpe-cct/cct-core | src/main/scala/cogx/cogmath/algebra/real/Matrix.scala | Scala | apache-2.0 | 28,473 |
package com.bradbrok.filmomatic
object Message {
trait Request
trait Response {
def request: Request
}
}
| bradbrok/Film-O-Matic | core/src/main/scala/com/bradbrok/filmomatic/Message.scala | Scala | mit | 116 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.nodes.physical.batch
import org.apache.flink.api.dag.Transformation
import org.apache.flink.runtime.operators.DamBehavior
import org.apache.flink.table.dataformat.BaseRow
import org.apache.flink.table.planner.codegen.{CodeGeneratorContext, CorrelateCodeGenerator}
import org.apache.flink.table.planner.delegation.BatchPlanner
import org.apache.flink.table.planner.functions.utils.TableSqlFunction
import org.apache.flink.table.planner.plan.`trait`.{FlinkRelDistribution, FlinkRelDistributionTraitDef, TraitUtil}
import org.apache.flink.table.planner.plan.nodes.exec.{BatchExecNode, ExecNode}
import org.apache.flink.table.planner.plan.nodes.logical.FlinkLogicalTableFunctionScan
import org.apache.flink.table.planner.plan.utils.RelExplainUtil
import org.apache.calcite.plan.{RelOptCluster, RelOptRule, RelTraitSet}
import org.apache.calcite.rel.`type`.RelDataType
import org.apache.calcite.rel.core.{Correlate, JoinRelType}
import org.apache.calcite.rel.{RelCollationTraitDef, RelDistribution, RelFieldCollation, RelNode, RelWriter, SingleRel}
import org.apache.calcite.rex.{RexCall, RexInputRef, RexNode, RexProgram}
import org.apache.calcite.sql.SqlKind
import org.apache.calcite.util.mapping.{Mapping, MappingType, Mappings}
import java.util
import scala.collection.JavaConversions._
/**
* Batch physical RelNode for [[Correlate]] (user defined table function).
*/
class BatchExecCorrelate(
cluster: RelOptCluster,
traitSet: RelTraitSet,
inputRel: RelNode,
scan: FlinkLogicalTableFunctionScan,
condition: Option[RexNode],
projectProgram: Option[RexProgram],
outputRowType: RelDataType,
joinType: JoinRelType)
extends SingleRel(cluster, traitSet, inputRel)
with BatchPhysicalRel
with BatchExecNode[BaseRow] {
require(joinType == JoinRelType.INNER || joinType == JoinRelType.LEFT)
override def deriveRowType(): RelDataType = outputRowType
override def copy(traitSet: RelTraitSet, inputs: java.util.List[RelNode]): RelNode = {
copy(traitSet, inputs.get(0), projectProgram, outputRowType)
}
/**
* Note: do not passing member 'child' because singleRel.replaceInput may update 'input' rel.
*/
def copy(
traitSet: RelTraitSet,
child: RelNode,
projectProgram: Option[RexProgram],
outputType: RelDataType): RelNode = {
new BatchExecCorrelate(
cluster,
traitSet,
child,
scan,
condition,
projectProgram,
outputType,
joinType)
}
override def explainTerms(pw: RelWriter): RelWriter = {
val rexCall = scan.getCall.asInstanceOf[RexCall]
val sqlFunction = rexCall.getOperator.asInstanceOf[TableSqlFunction]
super.explainTerms(pw)
.item("invocation", scan.getCall)
.item("correlate", RelExplainUtil.correlateToString(
input.getRowType, rexCall, sqlFunction, getExpressionString))
.item("select", outputRowType.getFieldNames.mkString(","))
.item("rowType", outputRowType)
.item("joinType", joinType)
.itemIf("condition", condition.orNull, condition.isDefined)
}
override def satisfyTraits(requiredTraitSet: RelTraitSet): Option[RelNode] = {
val requiredDistribution = requiredTraitSet.getTrait(FlinkRelDistributionTraitDef.INSTANCE)
// Correlate could not provide broadcast distribution
if (requiredDistribution.getType == RelDistribution.Type.BROADCAST_DISTRIBUTED) {
return None
}
def getOutputInputMapping: Mapping = {
val inputFieldCnt = getInput.getRowType.getFieldCount
projectProgram match {
case Some(program) =>
val projects = program.getProjectList.map(program.expandLocalRef)
val mapping = Mappings.create(MappingType.INVERSE_FUNCTION, inputFieldCnt, projects.size)
projects.zipWithIndex.foreach {
case (project, index) =>
project match {
case inputRef: RexInputRef => mapping.set(inputRef.getIndex, index)
case call: RexCall if call.getKind == SqlKind.AS =>
call.getOperands.head match {
case inputRef: RexInputRef => mapping.set(inputRef.getIndex, index)
case _ => // ignore
}
case _ => // ignore
}
}
mapping.inverse()
case _ =>
val mapping = Mappings.create(MappingType.FUNCTION, inputFieldCnt, inputFieldCnt)
(0 until inputFieldCnt).foreach {
index => mapping.set(index, index)
}
mapping
}
}
val mapping = getOutputInputMapping
val appliedDistribution = requiredDistribution.apply(mapping)
// If both distribution and collation can be satisfied, satisfy both. If only distribution
// can be satisfied, only satisfy distribution. There is no possibility to only satisfy
// collation here except for there is no distribution requirement.
if ((!requiredDistribution.isTop) && (appliedDistribution eq FlinkRelDistribution.ANY)) {
return None
}
val requiredCollation = requiredTraitSet.getTrait(RelCollationTraitDef.INSTANCE)
val appliedCollation = TraitUtil.apply(requiredCollation, mapping)
// the required collation can be satisfied if field collations are not empty
// and the direction of each field collation is non-STRICTLY
val canSatisfyCollation = appliedCollation.getFieldCollations.nonEmpty &&
!appliedCollation.getFieldCollations.exists { c =>
(c.getDirection eq RelFieldCollation.Direction.STRICTLY_ASCENDING) ||
(c.getDirection eq RelFieldCollation.Direction.STRICTLY_DESCENDING)
}
// If required traits only contains collation requirements, but collation keys are not columns
// from input, then no need to satisfy required traits.
if ((appliedDistribution eq FlinkRelDistribution.ANY) && !canSatisfyCollation) {
return None
}
var inputRequiredTraits = getInput.getTraitSet
var providedTraits = getTraitSet
if (!appliedDistribution.isTop) {
inputRequiredTraits = inputRequiredTraits.replace(appliedDistribution)
providedTraits = providedTraits.replace(requiredDistribution)
}
if (canSatisfyCollation) {
inputRequiredTraits = inputRequiredTraits.replace(appliedCollation)
providedTraits = providedTraits.replace(requiredCollation)
}
val newInput = RelOptRule.convert(getInput, inputRequiredTraits)
Some(copy(providedTraits, Seq(newInput)))
}
//~ ExecNode methods -----------------------------------------------------------
override def getDamBehavior: DamBehavior = DamBehavior.PIPELINED
override def getInputNodes: util.List[ExecNode[BatchPlanner, _]] =
getInputs.map(_.asInstanceOf[ExecNode[BatchPlanner, _]])
override def replaceInputNode(
ordinalInParent: Int,
newInputNode: ExecNode[BatchPlanner, _]): Unit = {
replaceInput(ordinalInParent, newInputNode.asInstanceOf[RelNode])
}
override protected def translateToPlanInternal(
planner: BatchPlanner): Transformation[BaseRow] = {
val config = planner.getTableConfig
val inputTransformation = getInputNodes.get(0).translateToPlan(planner)
.asInstanceOf[Transformation[BaseRow]]
val operatorCtx = CodeGeneratorContext(config)
val transformation = CorrelateCodeGenerator.generateCorrelateTransformation(
config,
operatorCtx,
inputTransformation,
input.getRowType,
projectProgram,
scan,
condition,
outputRowType,
joinType,
inputTransformation.getParallelism,
retainHeader = false,
getExpressionString,
"BatchExecCorrelate")
transformation.setName(getRelDetailedDescription)
transformation
}
}
| fhueske/flink | flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/plan/nodes/physical/batch/BatchExecCorrelate.scala | Scala | apache-2.0 | 8,571 |
package lila.app
package templating
import controllers.routes
import mashup._
import play.twirl.api.Html
import lila.api.Context
import lila.common.LightUser
import lila.rating.{ PerfType, Perf }
import lila.user.{ User, UserContext, Perfs }
trait UserHelper { self: I18nHelper with StringHelper with NumberHelper =>
def showProgress(progress: Int, withTitle: Boolean = true) = Html {
val span = progress match {
case 0 => ""
case p if p > 0 => s"""<span class="positive" data-icon="N">$p</span>"""
case p if p < 0 => s"""<span class="negative" data-icon="M">${math.abs(p)}</span>"""
}
val title = if (withTitle) """data-hint="Rating progression over the last twelve games"""" else ""
val klass = if (withTitle) "progress hint--bottom" else "progress"
s"""<span $title class="$klass">$span</span>"""
}
val topBarSortedPerfTypes: List[PerfType] = List(
PerfType.Bullet,
PerfType.Chess960,
PerfType.Blitz,
PerfType.KingOfTheHill,
PerfType.Classical,
PerfType.ThreeCheck,
PerfType.Correspondence,
PerfType.Antichess,
PerfType.Atomic,
PerfType.Horde,
PerfType.RacingKings,
PerfType.Crazyhouse)
private def best4Of(u: User, perfTypes: List[PerfType]) =
perfTypes.sortBy { pt => -u.perfs(pt).nb } take 4
def miniViewSortedPerfTypes(u: User): List[PerfType] =
best4Of(u, List(PerfType.Bullet, PerfType.Blitz, PerfType.Classical, PerfType.Correspondence)) :::
best4Of(u, List(PerfType.Crazyhouse, PerfType.Chess960, PerfType.KingOfTheHill, PerfType.ThreeCheck, PerfType.Antichess, PerfType.Atomic, PerfType.Horde, PerfType.RacingKings))
def showPerfRating(rating: Int, name: String, nb: Int, provisional: Boolean, icon: Char, klass: String)(implicit ctx: Context) = Html {
val title = s"$name rating over ${nb.localize} games"
val attr = if (klass == "title") "title" else "data-hint"
val number = if (nb > 0) s"$rating${if (provisional) "?" else ""}"
else " -"
s"""<span $attr="$title" class="$klass"><span data-icon="$icon">$number</span></span>"""
}
def showPerfRating(perfType: PerfType, perf: Perf, klass: String)(implicit ctx: Context): Html =
showPerfRating(perf.intRating, perfType.name, perf.nb, perf.provisional, perfType.iconChar, klass)
def showPerfRating(u: User, perfType: PerfType, klass: String = "hint--bottom")(implicit ctx: Context): Html =
showPerfRating(perfType, u perfs perfType, klass)
def showPerfRating(u: User, perfKey: String)(implicit ctx: Context): Option[Html] =
PerfType(perfKey) map { showPerfRating(u, _) }
def showBestPerf(u: User)(implicit ctx: Context): Option[Html] = u.perfs.bestPerf map {
case (pt, perf) => showPerfRating(pt, perf, klass = "hint--bottom")
}
def showRatingDiff(diff: Int) = Html {
diff match {
case 0 => """<span class="rp null">±0</span>"""
case d if d > 0 => s"""<span class="rp up">+$d</span>"""
case d => s"""<span class="rp down">$d</span>"""
}
}
def lightUser(userId: String): Option[LightUser] = Env.user lightUser userId
def lightUser(userId: Option[String]): Option[LightUser] = userId flatMap lightUser
def usernameOrId(userId: String) = lightUser(userId).fold(userId)(_.titleName)
def usernameOrAnon(userId: Option[String]) = lightUser(userId).fold(User.anonymous)(_.titleName)
def isOnline(userId: String) = Env.user isOnline userId
def userIdLink(
userIdOption: Option[String],
cssClass: Option[String] = None,
withOnline: Boolean = true,
withTitle: Boolean = true,
truncate: Option[Int] = None,
params: String = ""): Html = Html {
userIdOption.flatMap(lightUser).fold(User.anonymous) { user =>
userIdNameLink(
userId = user.id,
username = user.name,
title = user.title,
cssClass = cssClass,
withOnline = withOnline,
withTitle = withTitle,
truncate = truncate,
params = params)
}
}
def lightUserLink(
user: LightUser,
cssClass: Option[String] = None,
withOnline: Boolean = true,
withTitle: Boolean = true,
truncate: Option[Int] = None,
params: String = ""): Html = Html {
userIdNameLink(
userId = user.id,
username = user.name,
title = user.title,
cssClass = cssClass,
withOnline = withOnline,
withTitle = withTitle,
truncate = truncate,
params = params)
}
def userIdLink(
userId: String,
cssClass: Option[String]): Html = userIdLink(userId.some, cssClass)
def userIdLinkMini(userId: String) = Html {
val user = lightUser(userId)
val name = user.fold(userId)(_.name)
val content = user.fold(userId)(_.titleNameHtml)
val klass = userClass(userId, none, false)
val href = userHref(name)
s"""<a data-icon="r" $klass $href> $content</a>"""
}
def usernameLink(
usernameOption: Option[String],
cssClass: Option[String] = None,
withOnline: Boolean = true,
withTitle: Boolean = true,
truncate: Option[Int] = None): Html = Html {
usernameOption.fold(User.anonymous) { username =>
userIdNameLink(username.toLowerCase, username, cssClass, withOnline, withTitle, truncate)
}
}
private def titleTag(title: Option[String]) = title match {
case None => ""
case Some(t) => s"""<span class="title" title="${User titleName t}">$t</span> """
}
private def userIdNameLink(
userId: String,
username: String,
cssClass: Option[String] = None,
withOnline: Boolean = true,
withTitle: Boolean = true,
truncate: Option[Int] = None,
title: Option[String] = None,
params: String = ""): String = {
val klass = userClass(userId, cssClass, withOnline)
val href = userHref(username, params = params)
val content = truncate.fold(username)(username.take)
val titleS = if (withTitle) titleTag(title) else ""
val space = if (withOnline) " " else ""
val dataIcon = if (withOnline) """ data-icon="r"""" else ""
s"""<a$dataIcon $klass $href>$space$titleS$content</a>"""
}
def userLink(
user: User,
cssClass: Option[String] = None,
withOnline: Boolean = true,
withPowerTip: Boolean = true,
withDonor: Boolean = false,
withTitle: Boolean = true,
withBestRating: Boolean = false,
withPerfRating: Option[PerfType] = None,
text: Option[String] = None,
params: String = "") = Html {
val klass = userClass(user.id, cssClass, withOnline, withPowerTip)
val href = userHref(user.username, params)
val content = text | user.username
val titleS = if (withTitle) titleTag(user.title) else ""
val space = if (withOnline) " " else ""
val dataIcon = if (withOnline) """ data-icon="r"""" else ""
val rating = userRating(user, withPerfRating, withBestRating)
val donor = if (withDonor) donorBadge else ""
s"""<a$dataIcon $klass $href>$space$titleS$content$rating$donor</a>"""
}
def userInfosLink(
userId: String,
rating: Option[Int],
cssClass: Option[String] = None,
withPowerTip: Boolean = true,
withTitle: Boolean = false,
withOnline: Boolean = true) = {
val user = lightUser(userId)
val name = user.fold(userId)(_.name)
val klass = userClass(userId, cssClass, withOnline, withPowerTip)
val href = userHref(name)
val content = rating.fold(name)(e => s"$name ($e)")
val titleS = titleTag(user.flatMap(_.title) ifTrue withTitle)
val space = if (withOnline) " " else ""
val dataIcon = if (withOnline) """ data-icon="r"""" else ""
Html(s"""<a$dataIcon $klass $href>$space$titleS$content</a>""")
}
def userSpan(
user: User,
cssClass: Option[String] = None,
withOnline: Boolean = true,
withPowerTip: Boolean = true,
withTitle: Boolean = true,
withBestRating: Boolean = false,
withPerfRating: Option[PerfType] = None,
text: Option[String] = None) = Html {
val klass = userClass(user.id, cssClass, withOnline, withPowerTip)
val href = s"data-${userHref(user.username)}"
val content = text | user.username
val titleS = if (withTitle) titleTag(user.title) else ""
val space = if (withOnline) " " else ""
val dataIcon = if (withOnline) """ data-icon="r"""" else ""
val rating = userRating(user, withPerfRating, withBestRating)
s"""<span$dataIcon $klass $href>$space$titleS$content$rating</span>"""
}
def userIdSpanMini(userId: String, withOnline: Boolean = false) = Html {
val user = lightUser(userId)
val name = user.fold(userId)(_.name)
val content = user.fold(userId)(_.titleNameHtml)
val klass = userClass(userId, none, false)
val href = s"data-${userHref(name)}"
val space = if (withOnline) " " else ""
val dataIcon = if (withOnline) """ data-icon="r"""" else ""
s"""<span$dataIcon $klass $href>$space$content</span>"""
}
private def renderRating(perf: Perf) =
s" (${perf.intRating}${if (perf.provisional) "?" else ""})"
private def userRating(user: User, withPerfRating: Option[PerfType], withBestRating: Boolean) =
withPerfRating match {
case Some(perfType) => renderRating(user.perfs(perfType))
case _ if withBestRating => user.perfs.bestPerf ?? {
case (_, perf) => renderRating(perf)
}
case _ => ""
}
private def userHref(username: String, params: String = "") =
s"""href="${routes.User.show(username)}$params""""
protected def userClass(
userId: String,
cssClass: Option[String],
withOnline: Boolean,
withPowerTip: Boolean = true) = {
"user_link" :: List(
cssClass,
withPowerTip option "ulpt",
withOnline option isOnline(userId).fold("online is-green", "offline")
).flatten
}.mkString("class=\\"", " ", "\\"")
def userGameFilterTitle(info: UserInfo, filter: GameFilter)(implicit ctx: UserContext) =
splitNumber(userGameFilterTitleNoTag(info, filter))
def userGameFilterTitleNoTag(info: UserInfo, filter: GameFilter)(implicit ctx: UserContext) = Html((filter match {
case GameFilter.All => info.user.count.game + " " + trans.gamesPlayed()
case GameFilter.Me => ctx.me ?? (me => trans.nbGamesWithYou.str(info.nbWithMe))
case GameFilter.Rated => info.nbRated + " " + trans.rated()
case GameFilter.Win => trans.nbWins(info.user.count.win)
case GameFilter.Loss => trans.nbLosses(info.user.count.loss)
case GameFilter.Draw => trans.nbDraws(info.user.count.draw)
case GameFilter.Playing => info.nbPlaying + " playing"
case GameFilter.Bookmark => trans.nbBookmarks(info.nbBookmark)
case GameFilter.Imported => trans.nbImportedGames(info.nbImported)
case GameFilter.Search => Html(trans.advancedSearch.str().replaceFirst(" ", "\\n"))
}).toString)
def describeUser(user: User) = {
val name = user.titleUsername
val nbGames = user.count.game
val createdAt = org.joda.time.format.DateTimeFormat forStyle "M-" print user.createdAt
val currentRating = user.perfs.bestPerf ?? {
case (pt, perf) => s" Current ${pt.name} rating: ${perf.intRating}."
}
s"$name played $nbGames games since $createdAt.$currentRating"
}
private val donorBadge = """<span data-icon="" class="donor is-gold" title="Lichess donor"></span>"""
}
| JimmyMow/lila | app/templating/UserHelper.scala | Scala | mit | 11,345 |
package ch17_skip_list
import scala.util.Random
class Node(var data: Int, var forwards: Array[Node], var maxLevel: Int)
class SkipList(var head: Node, var skipListLevel: Int) {
def this() = this(new Node(-1, new Array[Node](16), 0), 1)
val MAX_LEVEL = 16
val random = new Random()
def find(value: Int): Option[Node] = {
var p = head
for (i <- skipListLevel - 1 to 0 by -1) {
while (p.forwards(i) != null && p.forwards(i).data < value) {
p = p.forwards(i)
}
}
if (p.forwards(0) != null && p.forwards(0).data == value) {
Some(p.forwards(0))
} else {
None
}
}
def insert(value: Int): Unit = {
//init the new node
val level = randomLevel()
val newNode = new Node(value, new Array[Node](level), level)
//use updtes array to record all nodes in all level before the inserted node
val updates: Array[Node] = new Array[Node](level)
var p = head
for (i <- level - 1 to 0 by -1) {
while (p.forwards(i) != null && p.forwards(i).data < value) {
p = p.forwards(i)
}
updates(i) = p
}
for (i <- Range(0, level)) {
newNode.forwards(i) = updates(i).forwards(i)
updates(i).forwards(i) = newNode
}
if (level > skipListLevel) {
skipListLevel = level
}
}
def delete(value: Int): Unit = {
var p = head
val updates: Array[Node] = new Array[Node](skipListLevel)
//try to locate the given node with the value
for (i <- skipListLevel - 1 to 0 by -1) {
while (p.forwards(i) != null && p.forwards(i).data < value) {
p = p.forwards(i)
}
updates(i) = p
}
if (p.forwards(0) != null && p.forwards(0).data == value) {
//find the value node, start to delete the node from the skip list
for (i <- skipListLevel - 1 to 0 by -1) {
if (updates(i).forwards(i) != null && updates(i).forwards(i).data == value) {
updates(i).forwards(i) = updates(i).forwards(i).forwards(i)
}
}
}
}
def randomLevel(): Int = {
var level = 1
for (i <- Range(1, MAX_LEVEL)) {
if (random.nextInt() % 2 == 1) {
level += 1
}
}
level
}
def mkString(): String = {
val builder = new StringBuilder
var p = head
while (p.forwards(0) != null) {
p = p.forwards(0)
builder.append(p.data)
}
builder.mkString
}
}
| wangzheng0822/algo | scala/src/main/scala/ch17_skip_list/SkipList.scala | Scala | apache-2.0 | 2,398 |
import stainless.lang._
object SuperCall2 {
sealed abstract class Base {
def double(x: BigInt): BigInt = x * 2
}
case class Override() extends Base {
override def double(x: BigInt): BigInt = {
super.double(x + 1) + 42
}
}
case class NoOverride() extends Base
def test1 = {
NoOverride().double(10) == 20
}.holds
def test2 = {
Override().double(10) == 64
}.holds
}
| epfl-lara/stainless | frontends/benchmarks/verification/valid/MicroTests/SuperCall2.scala | Scala | apache-2.0 | 415 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalautils
import org.scalatest.FunSpec
import org.scalatest.Matchers
class LegacyTripleEqualsAndShouldMatchersSpec extends FunSpec with Matchers {
describe("ShouldMatcher, which extends LegacyTripleEquals") {
it("should allow users to use should === out of the box") {
1 + 1 should === (2)
}
}
}
| svn2github/scalatest | src/test/scala/org/scalautils/LegacyTripleEqualsAndShouldMatchersSpec.scala | Scala | apache-2.0 | 932 |
package by.pavelverk.hardwrite.core.auth
import by.pavelverk.hardwrite.core.InMemoryStorage
import by.pavelverk.hardwrite.utils.db.DatabaseConnector
import by.pavelverk.hardwrite.core.AuthData
import scala.concurrent.{ExecutionContext, Future}
sealed trait AuthDataStorage {
def findAuthData(login: String): Future[Option[AuthData]]
def saveAuthData(authData: AuthData): Future[AuthData]
}
class JdbcAuthDataStorage(
val databaseConnector: DatabaseConnector
)(implicit executionContext: ExecutionContext) extends AuthDataTable with AuthDataStorage {
import databaseConnector._
import databaseConnector.profile.api._
override def findAuthData(login: String): Future[Option[AuthData]] =
db.run(auth.filter(d => d.username === login || d.email === login).result.headOption)
override def saveAuthData(authData: AuthData): Future[AuthData] =
db.run(auth.insertOrUpdate(authData)).map(_ => authData)
}
class InMemoryAuthDataStorage extends InMemoryStorage[String, AuthData] with AuthDataStorage {
override def findAuthData(login: String): Future[Option[AuthData]] = find(d => d.username == login || d.email == login)
override def saveAuthData(authData: AuthData): Future[AuthData] = save(authData)
} | VerkhovtsovPavel/BSUIR_Labs | Master/back/akka-http-rest-master/src/main/scala/by/pavelverk/hardwrite/core/auth/AuthDataStorage.scala | Scala | mit | 1,232 |
/*
* SPDX-License-Identifier: Apache-2.0
*
* Copyright 2015-2021 Andre White.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.truthencode.ddo.model.effect.features
import io.truthencode.ddo.api.model.effect.DetailedEffect
import io.truthencode.ddo.enhancement.BonusType
import io.truthencode.ddo.model.effect._
import io.truthencode.ddo.model.stats.BasicStat
import io.truthencode.ddo.support.naming.UsingSearchPrefix
/**
* Affects your to-hit chance be a percentage
*/
trait HitChancePercentFeature extends Features {
self: SourceInfo =>
protected val hitChanceBonusType: BonusType
protected val hitChanceBonusAmount: Int
protected[this] val triggerOn: Seq[TriggerEvent]
protected[this] val triggerOff: Seq[TriggerEvent]
protected[this] val hitChanceCategories: Seq[EffectCategories.Value]
private val src = this
private[this] val hitChanceChance =
new PartModifier[Int, BasicStat] with UsingSearchPrefix {
/**
* Used when qualifying a search with a prefix. Examples include finding "HalfElf" from
* qualified "Race:HalfElf"
*
* @return
* A default or applied prefix
*/
override def searchPrefixSource: String = partToModify.searchPrefixSource
override protected[this] lazy val partToModify: BasicStat =
BasicStat.ToHitChance
/**
* The General Description should be just that. This should not include specific values unless
* all instances will share that value. I.e. a Dodge Effect might state it increases your
* miss-chance, but omit any value such as 20%. Those values will be displayed in the
* effectText of a specific implementation such as the Dodge Feat or Uncanny Dodge
*/
override val generalDescription: String =
"Increases your chance to hit by a certain percentage"
/**
* a list of Categories useful for menu / UI placement and also for searching / querying for
* Miss-Chance or other desired effects.
*
* This list might be constrained or filtered by an Enumeration or CSV file. The goal is to
* enable quick and advanced searching for specific categories from general (Miss-Chance) to
* specific (evasion). In addition, it may be useful for deep searching such as increasing
* Spot, which should suggest not only +Spot items, but +Wisdom or eventually include a feat
* or enhancement that allows the use of some other value as your spot score.
*/
override def categories: Seq[String] = hitChanceCategories.map(_.toString)
private val eb = EffectParameterBuilder()
.toggleOffValue(triggerOff: _*)
.toggleOnValue(triggerOn: _*)
.addBonusType(hitChanceBonusType)
.build
override protected[this] def effectParameters: Seq[ParameterModifier[_]] = eb.modifiers
override lazy val effectDetail: DetailedEffect = DetailedEffect(
id = "ArmorClass",
description = "Adds damage to critical hits",
triggersOn = triggerOn.map(_.entryName),
triggersOff = triggerOff.map(_.entryName),
bonusType = hitChanceBonusType.entryName
)
override val source: SourceInfo = src
override lazy val value: Int = hitChanceBonusAmount
override lazy val effectText: Option[String] = Some(s"hitChance Class by $value%")
}
abstract override def features: Seq[Feature[_]] = {
assert(hitChanceChance.value == hitChanceBonusAmount)
super.features :+ hitChanceChance
}
}
| adarro/ddo-calc | subprojects/common/ddo-core/src/main/scala/io/truthencode/ddo/model/effect/features/HitChancePercentFeature.scala | Scala | apache-2.0 | 4,040 |
import sbt._
class ExcludeScala(info: ProjectInfo) extends DefaultProject(info)
{
lazy val noScala = task { checkNoScala }
def checkNoScala =
{
val existing = compileClasspath.filter(isScalaLibrary _).get
if(existing.isEmpty) None else Some("Scala library was incorrectly retrieved: " + existing)
}
def isScalaLibrary(p: Path) = p.name contains "scala-library"
val sbinary = "org.scala-tools.sbinary" % "sbinary_2.7.7" % "0.3"
} | kuochaoyi/xsbt | sbt/src/sbt-test/dependency-management/exclude-scala/project/build/ExcludeScala.scala | Scala | bsd-3-clause | 441 |
/*
* Copyright (C) Lightbend Inc. <https://www.lightbend.com>
*/
package play.api.test
import java.util.concurrent.TimeUnit
import org.fluentlenium.adapter.FluentAdapter
import org.fluentlenium.core.domain.FluentList
import org.fluentlenium.core.domain.FluentWebElement
import org.openqa.selenium._
import org.openqa.selenium.firefox._
import org.openqa.selenium.htmlunit._
import org.openqa.selenium.support.ui.FluentWait
import scala.compat.java8.FunctionConverters._
/**
* A test browser (Using Selenium WebDriver) with the FluentLenium API (https://github.com/Fluentlenium/FluentLenium).
*
* @param webDriver The WebDriver instance to use.
*/
case class TestBrowser(webDriver: WebDriver, baseUrl: Option[String]) extends FluentAdapter() {
super.initFluent(webDriver)
baseUrl.foreach(baseUrl => super.getConfiguration.setBaseUrl(baseUrl))
/**
* Submits a form with the given field values
*
* @example {{{
* submit("#login", fields =
* "email" -> email,
* "password" -> password
* )
* }}}
*/
def submit(selector: String, fields: (String, String)*): FluentList[FluentWebElement] = {
fields.foreach {
case (fieldName, fieldValue) =>
$(s"$selector *[name=$fieldName]").fill.`with`(fieldValue)
}
$(selector).submit()
}
/**
* Repeatedly applies this instance's input value to the given block until one of the following occurs:
* the function returns neither null nor false,
* the function throws an unignored exception,
* the timeout expires
*
* @param timeout the timeout amount
* @param timeUnit timeout unit
* @param block code to be executed
*/
def waitUntil[T](timeout: Int, timeUnit: TimeUnit)(block: => T): T = {
val wait = new FluentWait[WebDriver](webDriver).withTimeout(java.time.Duration.ofMillis(timeUnit.toMillis(timeout)))
val f = (driver: WebDriver) => block
wait.until(f.asJava)
}
/**
* Repeatedly applies this instance's input value to the given block until one of the following occurs:
* the function returns neither null nor false,
* the function throws an unignored exception,
* the timeout expires
*
* @param timeout duration of how long should wait
* @param block code to be executed
*/
def waitUntil[T](timeout: java.time.Duration)(block: => T): T = {
val wait = new FluentWait[WebDriver](webDriver).withTimeout(timeout)
val f = (driver: WebDriver) => block
wait.until(f.asJava)
}
/**
* Repeatedly applies this instance's input value to the given block until one of the following occurs:
* the function returns neither null nor false,
* the function throws an unignored exception,
* the default timeout expires
*
* @param block code to be executed
*/
def waitUntil[T](block: => T): T = waitUntil(3000, TimeUnit.MILLISECONDS)(block)
/**
* retrieves the underlying option interface that can be used
* to set cookies, manage timeouts among other things
*/
def manage: WebDriver.Options = super.getDriver.manage
def quit(): Unit = {
Option(super.getDriver).foreach(_.quit())
releaseFluent()
}
}
/**
* Helper utilities to build TestBrowsers
*/
object TestBrowser {
/**
* Creates an in-memory WebBrowser (using HtmlUnit)
*
* @param baseUrl The default base URL that will be used for relative URLs
*/
def default(baseUrl: Option[String] = None) = of(classOf[HtmlUnitDriver], baseUrl)
/**
* Creates a firefox WebBrowser.
*
* @param baseUrl The default base URL that will be used for relative URLs
*/
def firefox(baseUrl: Option[String] = None) = of(classOf[FirefoxDriver], baseUrl)
/**
* Creates a WebBrowser of the specified class name.
*
* @param baseUrl The default base URL that will be used for relative URLs
*/
def of[WEBDRIVER <: WebDriver](webDriver: Class[WEBDRIVER], baseUrl: Option[String] = None) =
TestBrowser(WebDriverFactory(webDriver), baseUrl)
}
object WebDriverFactory {
/**
* Creates a Selenium Web Driver and configures it
* @param clazz Type of driver to create
* @return The driver instance
*/
def apply[D <: WebDriver](clazz: Class[D]): WebDriver = {
val driver = clazz.getDeclaredConstructor().newInstance()
// Driver-specific configuration
driver match {
case htmlunit: HtmlUnitDriver => htmlunit.setJavascriptEnabled(true)
case _ =>
}
driver
}
}
| wegtam/playframework | testkit/play-test/src/main/scala/play/api/test/Selenium.scala | Scala | apache-2.0 | 4,452 |
package cromwell.engine.workflow.mocks
import org.specs2.mock.Mockito
import wdl.{Declaration, WdlExpression}
import wom.types.WomType
object DeclarationMock {
type DeclarationMockType = (String, WomType, WdlExpression)
}
trait DeclarationMock extends Mockito {
def mockDeclaration(name: String,
womType: WomType,
expression: WdlExpression) = {
val declaration = mock[Declaration]
declaration.unqualifiedName returns name
declaration.expression returns Option(expression)
declaration.womType returns womType
declaration
}
}
| ohsu-comp-bio/cromwell | engine/src/test/scala/cromwell/engine/workflow/mocks/DeclarationMock.scala | Scala | bsd-3-clause | 597 |
/* Title: Tools/jEdit/src/bibtex_jedit.scala
Author: Makarius
BibTeX support in Isabelle/jEdit.
*/
package isabelle.jedit
import isabelle._
import scala.collection.mutable
import java.awt.event.{ActionListener, ActionEvent}
import javax.swing.text.Segment
import javax.swing.tree.DefaultMutableTreeNode
import javax.swing.{JMenu, JMenuItem}
import org.gjt.sp.jedit.Buffer
import org.gjt.sp.jedit.textarea.{JEditTextArea, TextArea}
import org.gjt.sp.jedit.syntax.{Token => JEditToken, TokenMarker, TokenHandler, ParserRuleSet}
import sidekick.{SideKickParser, SideKickParsedData}
object Bibtex_JEdit
{
/** buffer model **/
/* file type */
def check(buffer: Buffer): Boolean =
JEdit_Lib.buffer_name(buffer).endsWith(".bib")
/* parse entries */
def parse_buffer_entries(buffer: Buffer): List[(String, Text.Offset)] =
{
val chunks =
try { Bibtex.parse(JEdit_Lib.buffer_text(buffer)) }
catch { case ERROR(msg) => Output.warning(msg); Nil }
val result = new mutable.ListBuffer[(String, Text.Offset)]
var offset = 0
for (chunk <- chunks) {
if (chunk.name != "" && !chunk.is_command) result += ((chunk.name, offset))
offset += chunk.source.length
}
result.toList
}
/* retrieve entries */
def entries_iterator(): Iterator[(String, Buffer, Text.Offset)] =
for {
buffer <- JEdit_Lib.jedit_buffers()
model <- PIDE.document_model(buffer).iterator
(name, offset) <- model.bibtex_entries.iterator
} yield (name, buffer, offset)
/* completion */
def complete(name: String): List[String] =
{
val name1 = name.toLowerCase
(for ((entry, _, _) <- entries_iterator() if entry.toLowerCase.containsSlice(name1))
yield entry).toList
}
def completion(
history: Completion.History,
text_area: JEditTextArea,
rendering: Rendering): Option[Completion.Result] =
{
for {
Text.Info(r, name) <- rendering.citation(JEdit_Lib.before_caret_range(text_area, rendering))
original <- JEdit_Lib.try_get_text(text_area.getBuffer, r)
orig = Library.perhaps_unquote(original)
entries = complete(name).filter(_ != orig)
if !entries.isEmpty
items =
entries.map({
case entry =>
val full_name = Long_Name.qualify(Markup.CITATION, entry)
val description = List(entry, "(BibTeX entry)")
val replacement = quote(entry)
Completion.Item(r, original, full_name, description, replacement, 0, false)
}).sorted(history.ordering).take(PIDE.options.int("completion_limit"))
} yield Completion.Result(r, original, false, items)
}
/** context menu **/
def context_menu(text_area0: JEditTextArea): List[JMenuItem] =
{
text_area0 match {
case text_area: TextArea =>
text_area.getBuffer match {
case buffer: Buffer
if (check(buffer) && buffer.isEditable) =>
val menu = new JMenu("BibTeX entries")
for (entry <- Bibtex.entries) {
val item = new JMenuItem(entry.kind)
item.addActionListener(new ActionListener {
def actionPerformed(evt: ActionEvent): Unit =
Isabelle.insert_line_padding(text_area, entry.template)
})
menu.add(item)
}
List(menu)
case _ => Nil
}
case _ => Nil
}
}
/** token markup **/
/* token style */
private def token_style(context: String, token: Bibtex.Token): Byte =
token.kind match {
case Bibtex.Token.Kind.COMMAND => JEditToken.KEYWORD2
case Bibtex.Token.Kind.ENTRY => JEditToken.KEYWORD1
case Bibtex.Token.Kind.KEYWORD => JEditToken.OPERATOR
case Bibtex.Token.Kind.NAT => JEditToken.LITERAL2
case Bibtex.Token.Kind.STRING => JEditToken.LITERAL1
case Bibtex.Token.Kind.NAME => JEditToken.LABEL
case Bibtex.Token.Kind.IDENT =>
if (Bibtex.is_month(token.source)) JEditToken.LITERAL3
else
Bibtex.get_entry(context) match {
case Some(entry) if entry.is_required(token.source) => JEditToken.KEYWORD3
case Some(entry) if entry.is_optional(token.source) => JEditToken.KEYWORD4
case _ => JEditToken.DIGIT
}
case Bibtex.Token.Kind.SPACE => JEditToken.NULL
case Bibtex.Token.Kind.COMMENT => JEditToken.COMMENT1
case Bibtex.Token.Kind.ERROR => JEditToken.INVALID
}
/* line context */
private val context_rules = new ParserRuleSet("bibtex", "MAIN")
private class Line_Context(val context: Option[Bibtex.Line_Context])
extends TokenMarker.LineContext(context_rules, null)
{
override def hashCode: Int = context.hashCode
override def equals(that: Any): Boolean =
that match {
case other: Line_Context => context == other.context
case _ => false
}
}
/* token marker */
class Token_Marker extends TokenMarker
{
override def markTokens(context: TokenMarker.LineContext,
handler: TokenHandler, raw_line: Segment): TokenMarker.LineContext =
{
val line_ctxt =
context match {
case c: Line_Context => c.context
case _ => Some(Bibtex.Ignored)
}
val line = if (raw_line == null) new Segment else raw_line
def no_markup =
{
val styled_token = (JEditToken.NULL, line.subSequence(0, line.count).toString)
(List(styled_token), new Line_Context(None))
}
val context1 =
{
val (styled_tokens, context1) =
line_ctxt match {
case Some(ctxt) =>
try {
val (chunks, ctxt1) = Bibtex.parse_line(line, ctxt)
val styled_tokens =
for { chunk <- chunks; tok <- chunk.tokens }
yield (token_style(chunk.kind, tok), tok.source)
(styled_tokens, new Line_Context(Some(ctxt1)))
}
catch { case ERROR(msg) => Output.warning(msg); no_markup }
case None => no_markup
}
var offset = 0
for ((style, token) <- styled_tokens) {
val length = token.length
val end_offset = offset + length
if ((offset until end_offset).exists(i => line.charAt(i) == '\\t')) {
for (i <- offset until end_offset)
handler.handleToken(line, style, i, 1, context1)
}
else handler.handleToken(line, style, offset, length, context1)
offset += length
}
handler.handleToken(line, JEditToken.END, line.count, 0, context1)
context1
}
val context2 = context1.intern
handler.setLineContext(context2)
context2
}
}
/** Sidekick parser **/
class Sidekick_Parser extends SideKickParser("bibtex")
{
override def supportsCompletion = false
private class Asset(label: String, label_html: String, range: Text.Range, source: String)
extends Isabelle_Sidekick.Asset(label, range) {
override def getShortString: String = label_html
override def getLongString: String = source
}
def parse(buffer: Buffer, error_source: errorlist.DefaultErrorSource): SideKickParsedData =
{
val data = Isabelle_Sidekick.root_data(buffer)
try {
var offset = 0
for (chunk <- Bibtex.parse(JEdit_Lib.buffer_text(buffer))) {
val kind = chunk.kind
val name = chunk.name
val source = chunk.source
if (kind != "") {
val label = kind + (if (name == "") "" else " " + name)
val label_html =
"<html><b>" + HTML.encode(kind) + "</b>" +
(if (name == "") "" else " " + HTML.encode(name)) + "</html>"
val range = Text.Range(offset, offset + source.size)
val asset = new Asset(label, label_html, range, source)
data.root.add(new DefaultMutableTreeNode(asset))
}
offset += source.size
}
data
}
catch { case ERROR(msg) => Output.warning(msg); null }
}
}
}
| MerelyAPseudonym/isabelle | src/Tools/jEdit/src/bibtex_jedit.scala | Scala | bsd-3-clause | 8,118 |
// easy and straightforward. nothing to say
object Solution {
def flipAndInvertImage(xs: Array[Array[Int]]): Array[Array[Int]] =
xs.map(_.reverse.map(x => if (x == 0) 1 else 0))
def main(args: Array[String]) = {
println(
new Array(new Array(1,1,0),new Array(1,0,1),new Array(0,0,0))
)
}
}
| Javran/leetcode | flipping-an-image/Solution.scala | Scala | mit | 315 |
package vonsim.assembly.parser
import scala.util.parsing.combinator.Parsers
import scala.util.parsing.input.{NoPosition, Position, Reader}
import vonsim.assembly.lexer._
import scala.Left
import scala.Right
import vonsim.assembly.ParserError
import vonsim.assembly.Location
import scala.util.parsing.input.Positional
import vonsim.assembly.i18n.English
import vonsim.assembly.i18n.CompilerLanguage
import scala.util.Random
object Parser extends MyParsers {
var compilerLanguage:CompilerLanguage=new English()
override type Elem = Token
def apply(tokens: Seq[Token]): Either[ParserError, Instruction] = {
val reader = new TokenReader(tokens)
val defaultMessage=compilerLanguage.parserError // temporary until the parser is improved
program(reader) match {
case NoSuccess(msg, next) => Left(ParserError(Location(next.pos.line, next.pos.column), defaultMessage))
case Success(result, next) => Right(result)
}
}
class TokenReader(tokens: Seq[Token]) extends Reader[Token] {
override def first: Token = tokens.head
override def atEnd: Boolean = tokens.isEmpty
override def pos: Position = tokens.headOption.map(_.pos).getOrElse(NoPosition)
override def rest: Reader[Token] = new TokenReader(tokens.tail)
}
def program = positioned {
(labeledInstruction | instruction) ~ newline ^^{case (o:Instruction) ~ _ => o }
}
def labeledInstruction =positioned {
(label ~ instruction) ^^{ case LABEL(l) ~ (o:ExecutableInstruction) => LabeledInstruction(l,o) }
}
def instruction = positioned{
zeroary | org | mov | jump | arithmetic | io | intn | stack | vardef | equ
}
def equ = positioned{
(identifier ~ EQU() ~ expression) ^^{case ((i:IDENTIFIER) ~ (o:EQU) ~ (e:Expression) ) => EQUDef(i.str,e) }
}
def arithmetic= positioned {
binaryArithmetic | unaryArithmetic
}
def binaryArithmetic: Parser[Instruction] = positioned {
(binary ~ operand ~ COMMA() ~ operand) ^^ { case ( (o:BinaryArithmeticOp) ~ (m:Operand) ~ _ ~ (v:Operand)) => BinaryArithmetic(o,m,v)}
}
def binary= (Token.binaryArithmetic map tokenAsParser) reduceLeft(_ | _)
def unaryArithmetic: Parser[Instruction] = positioned {
(unary ~ operand) ^^ { case ( (o:UnaryArithmeticOp) ~ (m:Operand)) => UnaryArithmetic(o,m)}
}
def unary = (Token.unaryArithmetic map tokenAsParser) reduceLeft(_ | _)
def io = positioned {
((IN() | OUT()) ~ (AL() | AX()) ~ COMMA() ~ (ioaddress)) ^^ { case ( (o:IOToken) ~ (m:IORegister) ~ _ ~ (a:IOAddress)) => IO(o,m,a)}
}
def cmp = positioned {
(CMP() ~ (operand) ~ COMMA() ~ (operand) ~ (newline)) ^^ { case ( CMP() ~ (v1:Operand) ~ _ ~ (v2:Operand) ~ _) => Cmp(v1,v2)}
}
def intn= positioned {
(INT() ~ literalInteger ) ^^ {case o ~ LITERALINTEGER(v) => IntN(v)}
}
def org= positioned {
(ORG() ~ literalInteger ) ^^ {case o ~ LITERALINTEGER(v) => Org(v)}
}
def stack= positioned {
((PUSH() | POP()) ~ fullRegister ) ^^ {case (o:StackInstruction) ~ (t:FullRegisterToken) => Stack(o,t)}
}
def mov= positioned {
(MOV() ~ operand ~ COMMA() ~ operand) ^^ {
case ( MOV() ~ (m:Operand) ~ _ ~ (v:Operand)) => Mov(m,v)
}
}
def zeroary= positioned {
val end =END() ^^ (_ => End())
val ret = RET() ^^ (_ => Ret())
val nop = NOP() ^^ (_ => Nop())
val hlt = HLT() ^^ (_ => Hlt())
val cli = CLI() ^^ (_ => Cli())
val sti = STI() ^^ (_ => Sti())
val iret = IRET() ^^ (_ => IRet())
val pushf = PUSHF() ^^ (_ => Pushf())
val popf = POPF() ^^ (_ => Popf())
end | ret | nop | hlt | cli | sti | iret | pushf | popf
}
def jump = jmp | conditionalJump | call
def jmp = positioned {
(JMP() ~ identifier ) ^^ {case JMP() ~ IDENTIFIER(i) => UnconditionalJump(i)}
}
def call = positioned {
(CALL() ~ identifier ) ^^ {case CALL() ~ IDENTIFIER(i) => Call(i)}
}
def vardef = positioned {
val r = new scala.util.Random(31)
val ints= ((identifier?) ~ (DB() | DW() ) ~ varDefInts ) ^^ {
case Some(IDENTIFIER(id))~ (t:VarType) ~ (e:List[Either[Undefined.type,Expression]]) => VarDef(id,t,e)
case None~ (t:VarType) ~ (e:List[Either[Undefined.type,Expression]]) => VarDef(Random.alphanumeric.take(30).mkString,t,e)
}
val str = ((identifier ?) ~ DB() ~ literalString ) ^^ {
case Some(IDENTIFIER(id))~ DB() ~ LITERALSTRING(s) => VarDef(id,DB(),stringToIntList(s))
case None ~ DB() ~ LITERALSTRING(s) => VarDef(Random.alphanumeric.take(30).mkString,DB(),stringToIntList(s))
}
str | ints
}
def stringToIntList(s:String)=s.map( (c:Char) => Right(ConstantExpression(c.charValue().toInt))).toList
//
def varDefValue = (expression ^^ {case e => Right(e)} | UNINITIALIZED() ^^ {case u => Left(Undefined)})
def varDefInts = rep1sep(varDefValue, COMMA())
def conditionalJump = positioned {
(conditionalJumpTokens ~ identifier ) ^^ {case (o:ConditionalJumpToken) ~ IDENTIFIER(i) => ConditionalJump(o,i)}
}
private def newline= positioned {
accept(compilerLanguage.newline, { case bl @ NEWLINE() => bl })
}
private def literalString = positioned {
accept(compilerLanguage.stringLiteral, { case lit @ LITERALSTRING(v) => lit })
}
private def identifier = positioned {
accept(compilerLanguage.identifier, { case lit @ IDENTIFIER(v) => lit })
}
private def label = positioned {
accept(compilerLanguage.label, { case lit @ LABEL(v) => lit })
}
private def literalInteger = positioned {
accept(compilerLanguage.integerLiteral, { case lit @ LITERALINTEGER(v) => lit })
}
def conditionalJumpTokens = positioned{
(Token.conditionalJump map tokenAsParser) reduceLeft(_ | _)
}
def tokenAsParser(t:Token)= t ^^^ t
private def indirect={
val indirectDWord = (WORD() ~ PTR() ~ INDIRECTBX()) ^^ { case (WORD() ~ PTR() ~ INDIRECTBX()) => DWORDINDIRECTBX()}
val indirectWord = (BYTE() ~ PTR() ~ INDIRECTBX()) ^^ { case (BYTE() ~ PTR() ~ INDIRECTBX()) => WORDINDIRECTBX()}
indirectDWord | indirectWord |INDIRECTBX()
}
def operand = expression | allRegisters | indirect
private def allRegisters=(Token.registers map tokenAsParser) reduceLeft(_ | _)
private def fullRegister = positioned{
(Token.xRegisters map tokenAsParser) reduceLeft(_ | _)
}
private def ioaddress = expression
def copyPosition[T <: Positional,E <: Positional](to:T,from:E)={
to.pos=from.pos
to
}
def offsetLabel = accept(compilerLanguage.offsetLabel, { case lit @ OFFSETLABEL(v) => copyPosition(OffsetLabelExpression(v),lit)})
def negativeInteger = MinusOp() ~ integer ^^ {case m ~ i => copyPosition(ConstantExpression(-i.v),i)}
def integer = accept(compilerLanguage.integerLiteral, { case lit @ LITERALINTEGER(v) => copyPosition(ConstantExpression(v),lit) })
def expressionLabel = accept(compilerLanguage.equLabel, { case lit @ IDENTIFIER(v) => copyPosition(LabelExpression(v),lit)})
def parens:Parser[Expression] = OpenParen() ~> expression <~ CloseParen()
def term = ( negativeInteger | integer | offsetLabel | expressionLabel | parens )
def binaryOp(level:Int):Parser[((Expression,Expression)=>Expression)] = {
level match {
case 1 =>
PlusOp() ^^^ { (a:Expression, b:Expression) => BinaryExpression(PlusOp() ,a,b) } |
MinusOp() ^^^ { (a:Expression, b:Expression) => BinaryExpression(MinusOp(),a,b) }
case 2 =>
MultOp() ^^^ { (a:Expression, b:Expression) => BinaryExpression(MultOp(),a,b) } |
DivOp() ^^^ { (a:Expression, b:Expression) => BinaryExpression(DivOp(),a,b) }
case _ => throw new RuntimeException("bad precedence level "+level)
}
}
val minPrec = 1
val maxPrec = 2
def binary(level:Int):Parser[Expression] =
if (level>maxPrec) term
else binary(level+1) * binaryOp(level)
def expression = ( binary(minPrec) | term )
} | facundoq/vonsim | src/main/scala/vonsim/assembly/parser/Parser.scala | Scala | agpl-3.0 | 8,020 |
package com.tobe.client
import org.apache.hadoop.fs.FileSystem
import org.apache.hadoop.io.{BytesWritable, NullWritable}
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.rdd.RDD
import org.tensorflow.example.{Example, Feature, Features, FloatList}
object SparkDensePredictClient {
def main(args: Array[String]): Unit = {
System.out.println("Start spark project")
var host: String = "127.0.0.1"
//var host = "10.235.114.223"
var port: Int = 9000
var modelName: String = "dense"
var modelVersion: Long = 1
var inputPath = "/user/u_chendihao/deep_recommend_system/data/cancer_train.csv"
var outputPath = "/user/u_chendihao/deep_recommend_system/predict"
// TODO: String to int doesn't work
// Parse command-line arguments
if (args.length == 6) {
host = args(0)
//port = args(1).toInt
modelName = args(2)
//modelVersion = args(3).toLong
inputPath = args(4)
outputPath = args(5)
}
val sparkConf = new SparkConf().setAppName("Generate TFRecord")
val sc = new SparkContext(sparkConf)
val fs = FileSystem.get(sc.hadoopConfiguration)
sc.textFile(inputPath).map(line => {
/*
var arr = line.split(",", 2)
var label = arr(0).toFloat
val client = new DensePredictClient(host, port)
val result = client.predict_example(modelName, modelVersion)
*/
// TODO: Generate TensorProto and request the server
System.out.println(line)
})
// TODO: Change to request for each executor
val client: DensePredictClient = new DensePredictClient(host, port)
client.predict_example(modelName, modelVersion)
System.out.println("End of predict client")
}
/*
def csvToTFRecords(rdd: RDD[String]) = {
rdd.map(line => {
val arr = line.split(",", 2)
val label = FloatList.newBuilder().addValue(arr(0).toFloat).build()
val valuesList = FloatList.newBuilder()
arr(1).split(",").map(value => {
val values = value.toFloat
valuesList.addValue(values)
})
val features = Features.newBuilder().putFeature("label", Feature.newBuilder().setFloatList(label).build())
.putFeature("features", Feature.newBuilder().setFloatList(valuesList.build()).build())
.build()
val example = Example.newBuilder()
.setFeatures(features)
.build()
(new BytesWritable(example.toByteArray), NullWritable.get())
})
}
*/
} | tobegit3hub/deep_recommend_system | java_predict_client/src/main/scala/com/tobe/client/SparkDensePredictClient.scala | Scala | apache-2.0 | 2,480 |
package org.jetbrains.plugins.scala
package lang.refactoring.rename
import com.intellij.refactoring.rename.RenameHandler
import com.intellij.openapi.project.Project
import com.intellij.openapi.editor.Editor
import com.intellij.psi.{PsiFile, PsiElement}
import com.intellij.openapi.actionSystem.{CommonDataKeys, DataContext}
import com.intellij.codeInsight.daemon.impl.quickfix.EmptyExpression
import com.intellij.codeInsight.template._
import com.intellij.openapi.command.CommandProcessor
import com.intellij.refactoring.RefactoringBundle
import com.intellij.util.PairProcessor
import lang.psi.api.expr.xml.ScXmlPairedTag
import com.intellij.openapi.editor.colors.{EditorColors, EditorColorsManager}
import com.intellij.codeInsight.highlighting.HighlightManager
import com.intellij.openapi.editor.markup.RangeHighlighter
import java.util.ArrayList
/**
* User: Dmitry Naydanov
* Date: 4/8/12
*/
class XmlRenameHandler extends RenameHandler {
def isAvailableOnDataContext(dataContext: DataContext): Boolean = {
val editor = CommonDataKeys.EDITOR.getData(dataContext)
if (editor == null || !editor.getSettings.isVariableInplaceRenameEnabled) return false
val file = CommonDataKeys.PSI_FILE.getData(dataContext)
if (file == null) return false
val element = file.findElementAt(editor.getCaretModel.getOffset)
if (element == null) return false
element.getParent match {
case _: ScXmlPairedTag => true
case _ => false
}
}
def isRenaming(dataContext: DataContext): Boolean = isAvailableOnDataContext(dataContext)
def invoke(project: Project, editor: Editor, file: PsiFile, dataContext: DataContext) {
if (!isRenaming(dataContext)) return
val element = file.findElementAt(editor.getCaretModel.getOffset)
if (element != null) invoke(project, Array(element), dataContext)
}
def invoke(project: Project, elements: Array[PsiElement], dataContext: DataContext) {
import scala.collection.JavaConversions._
if (!isRenaming(dataContext) || elements == null || elements.length != 1) return
val element = if (elements(0) == null || !elements(0).getParent.isInstanceOf[ScXmlPairedTag]) return else
elements(0).getParent.asInstanceOf[ScXmlPairedTag]
if (element.getMatchedTag == null || element.getTagNameElement == null || element.getMatchedTag.getTagNameElement == null) return
val editor = CommonDataKeys.EDITOR.getData(dataContext)
val elementStartName = element.getTagName
val rangeHighlighters = new ArrayList[RangeHighlighter]()
val matchedRange = element.getMatchedTag.getTagNameElement.getTextRange
def highlightMatched() {
val colorsManager = EditorColorsManager.getInstance()
val attributes = colorsManager.getGlobalScheme.getAttributes(EditorColors.WRITE_SEARCH_RESULT_ATTRIBUTES)
HighlightManager.getInstance(editor.getProject).addOccurrenceHighlight(editor, matchedRange.getStartOffset,
matchedRange.getEndOffset, attributes, 0, rangeHighlighters, null)
rangeHighlighters.foreach {a =>
a.setGreedyToLeft(true)
a.setGreedyToRight(true)
}
}
def rename() {
CommandProcessor.getInstance().executeCommand(project, new Runnable {
def run() {
extensions.inWriteAction {
val offset = editor.getCaretModel.getOffset
val template = buildTemplate()
editor.getCaretModel.moveToOffset(element.getParent.getTextOffset)
TemplateManager.getInstance(project).startTemplate(editor, template, new TemplateEditingAdapter {
override def templateFinished(template: Template, brokenOff: Boolean) {
templateCancelled(template)
}
override def templateCancelled(template: Template) {
val highlightManager = HighlightManager.getInstance(project)
rangeHighlighters.foreach{a => highlightManager.removeSegmentHighlighter(editor, a)}
}
},
new PairProcessor[String, String] {
def process(s: String, t: String): Boolean = !(t.length == 0 || t.charAt(t.length - 1) == ' ')
})
highlightMatched()
editor.getCaretModel.moveToOffset(offset)
}
}
}, RefactoringBundle.message("rename.title"), null)
}
def buildTemplate(): Template = {
val builder = new TemplateBuilderImpl(element.getParent)
builder.replaceElement(element.getTagNameElement, "first", new EmptyExpression {
override def calculateQuickResult(context: ExpressionContext): Result = new TextResult(Option(element.getTagName).getOrElse(elementStartName))
override def calculateResult(context: ExpressionContext): Result = calculateQuickResult(context)
}, true)
builder.replaceElement(element.getMatchedTag.getTagNameElement, "second", "first", false)
builder.buildInlineTemplate()
}
rename()
}
}
| consulo/consulo-scala | src/org/jetbrains/plugins/scala/lang/refactoring/rename/XmlRenameHandler.scala | Scala | apache-2.0 | 4,959 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.util
import org.apache.spark.sql.catalyst.analysis.{TypeCheckResult, TypeCoercion}
import org.apache.spark.sql.catalyst.expressions.RowOrdering
import org.apache.spark.sql.errors.QueryCompilationErrors
import org.apache.spark.sql.types._
/**
* Functions to help with checking for valid data types and value comparison of various types.
*/
object TypeUtils {
def checkForNumericExpr(dt: DataType, caller: String): TypeCheckResult = {
if (dt.isInstanceOf[NumericType] || dt == NullType) {
TypeCheckResult.TypeCheckSuccess
} else {
TypeCheckResult.TypeCheckFailure(s"$caller requires numeric types, not ${dt.catalogString}")
}
}
def checkForOrderingExpr(dt: DataType, caller: String): TypeCheckResult = {
if (RowOrdering.isOrderable(dt)) {
TypeCheckResult.TypeCheckSuccess
} else {
TypeCheckResult.TypeCheckFailure(
s"$caller does not support ordering on type ${dt.catalogString}")
}
}
def checkForSameTypeInputExpr(types: Seq[DataType], caller: String): TypeCheckResult = {
if (TypeCoercion.haveSameType(types)) {
TypeCheckResult.TypeCheckSuccess
} else {
TypeCheckResult.TypeCheckFailure(
s"input to $caller should all be the same type, but it's " +
types.map(_.catalogString).mkString("[", ", ", "]"))
}
}
def checkForMapKeyType(keyType: DataType): TypeCheckResult = {
if (keyType.existsRecursively(_.isInstanceOf[MapType])) {
TypeCheckResult.TypeCheckFailure("The key of map cannot be/contain map.")
} else {
TypeCheckResult.TypeCheckSuccess
}
}
def checkForAnsiIntervalOrNumericType(
dt: DataType, funcName: String): TypeCheckResult = dt match {
case _: YearMonthIntervalType | _: DayTimeIntervalType | NullType =>
TypeCheckResult.TypeCheckSuccess
case dt if dt.isInstanceOf[NumericType] => TypeCheckResult.TypeCheckSuccess
case other => TypeCheckResult.TypeCheckFailure(
s"function $funcName requires numeric or interval types, not ${other.catalogString}")
}
def getNumeric(t: DataType, exactNumericRequired: Boolean = false): Numeric[Any] = {
if (exactNumericRequired) {
t.asInstanceOf[NumericType].exactNumeric.asInstanceOf[Numeric[Any]]
} else {
t.asInstanceOf[NumericType].numeric.asInstanceOf[Numeric[Any]]
}
}
def getInterpretedOrdering(t: DataType): Ordering[Any] = {
t match {
case i: AtomicType => i.ordering.asInstanceOf[Ordering[Any]]
case a: ArrayType => a.interpretedOrdering.asInstanceOf[Ordering[Any]]
case s: StructType => s.interpretedOrdering.asInstanceOf[Ordering[Any]]
case udt: UserDefinedType[_] => getInterpretedOrdering(udt.sqlType)
}
}
def compareBinary(x: Array[Byte], y: Array[Byte]): Int = {
val limit = if (x.length <= y.length) x.length else y.length
var i = 0
while (i < limit) {
val res = (x(i) & 0xff) - (y(i) & 0xff)
if (res != 0) return res
i += 1
}
x.length - y.length
}
/**
* Returns true if the equals method of the elements of the data type is implemented properly.
* This also means that they can be safely used in collections relying on the equals method,
* as sets or maps.
*/
def typeWithProperEquals(dataType: DataType): Boolean = dataType match {
case BinaryType => false
case _: AtomicType => true
case _ => false
}
def failWithIntervalType(dataType: DataType): Unit = {
invokeOnceForInterval(dataType) {
throw QueryCompilationErrors.cannotUseIntervalTypeInTableSchemaError()
}
}
def invokeOnceForInterval(dataType: DataType)(f: => Unit): Unit = {
def isInterval(dataType: DataType): Boolean = dataType match {
case CalendarIntervalType | _: DayTimeIntervalType | _: YearMonthIntervalType => true
case _ => false
}
if (dataType.existsRecursively(isInterval)) f
}
}
| maropu/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/TypeUtils.scala | Scala | apache-2.0 | 4,722 |
package quotes
import scala.quoted.*
object Quotes_1 {
def printHello(using Quotes): Expr[Unit] = '{ println("Hello") }
$printHello // error
}
| dotty-staging/dotty | tests/neg-macros/quote-spliceNonStaged.scala | Scala | apache-2.0 | 148 |
package org.scaladebugger.api.lowlevel.wrappers
import com.sun.jdi._
import org.scaladebugger.test.helpers.ParallelMockFunSpec
import org.scalamock.scalatest.MockFactory
import org.scalatest.{FunSpec, Matchers}
import test.JDIMockHelpers
import scala.collection.JavaConverters._
class ValueWrapperSpec extends ParallelMockFunSpec with JDIMockHelpers {
describe("ValueWrapper") {
describe("constructor") {
it("should throw an exception if wrapping a null pointer") {
intercept[IllegalArgumentException] {
new ValueWrapper(null)
}
}
}
describe("#isObject") {
it("should return true if the value wrapped is an object reference") {
val valueWrapper = new ValueWrapper(stub[ObjectReference])
valueWrapper.isObject should be (true)
}
it("should return false if the value wrapped is not an object reference") {
val valueWrapper = new ValueWrapper(stub[PrimitiveValue])
valueWrapper.isObject should be (false)
}
}
describe("#isPrimitive") {
it("should return true if the value wrapped is a primitive value") {
val valueWrapper = new ValueWrapper(stub[PrimitiveValue])
valueWrapper.isPrimitive should be (true)
}
it("should return false if the value wrapped is not a primitive value") {
val valueWrapper = new ValueWrapper(stub[ObjectReference])
valueWrapper.isPrimitive should be (false)
}
}
describe("#value") {
it("should invoke primitiveValue if wrapped value is a primitive") {
val expected = 3
val actual = new ValueWrapper(stub[PrimitiveValue]) {
override def primitiveValue(): AnyVal = expected
}.value()
actual should be (expected)
}
it("should invoke objectValue if wrapped value is an object") {
val expected = "string value"
val actual = new ValueWrapper(stub[ObjectReference]) {
override def objectValue(): AnyRef = expected
}.value()
actual should be (expected)
}
it("should throw an exception if wrapped value is not an object or primitive") {
intercept[Throwable] {
new ValueWrapper(stub[Value]).value()
}
}
}
describe("#valueAsOption") {
it("should return Some(value) if wrapped value is a primitive") {
val expected = Some(3)
val actual = new ValueWrapper(stub[PrimitiveValue]) {
override def primitiveValue(): AnyVal = expected.get
}.valueAsOption()
actual should be (expected)
}
it("should return Some(value) if wrapped value is an object") {
val expected = Some("string value")
val actual = new ValueWrapper(stub[ObjectReference]) {
override def objectValue(): AnyRef = expected.get
}.valueAsOption()
actual should be (expected)
}
it("should return None if wrapped value is not an object or primitive") {
val expected = None
val actual = new ValueWrapper(stub[Value]).valueAsOption()
actual should be (expected)
}
}
describe("#objectValue") {
it("should return the actual string if the value is a string reference") {
val expected = "some string"
val mockStringReference = mock[StringReference]
(mockStringReference.value _).expects().returning(expected).once()
val actual = new ValueWrapper(mockStringReference).objectValue()
actual should be (expected)
}
it("should return an array of values if the value is an array reference") {
val expected = List(mock[Value], mock[Value], mock[Value])
val mockArrayReference = mock[ArrayReference]
(mockArrayReference.getValues: Function0[java.util.List[Value]])
.expects().returning(expected.asJava).once()
val actual = new ValueWrapper(mockArrayReference).objectValue()
.asInstanceOf[java.util.List[Value]]
actual should contain theSameElementsAs expected
}
it("should return the #toString() of the value if there is no specific case") {
val mockObjectReference = mock[ObjectReference]
val expected = mockObjectReference.toString
val actual = new ValueWrapper(mockObjectReference).objectValue()
actual should be (expected)
}
it("should throw an exception if the value is not an object") {
intercept[IllegalArgumentException] {
new ValueWrapper(mock[PrimitiveValue]).objectValue()
}
}
}
describe("#objectValueAsOption") {
it("should return Some(value) if wrapped value has an object value") {
val expected = Some("string value")
val value = mock[StringReference]
(value.value _).expects().returning(expected.get).once()
val actual = new ValueWrapper(value).objectValueAsOption()
.map(_.asInstanceOf[String])
actual should be (expected)
}
it("should return None if wrapped value does not have an object value") {
val expected = None
val actual =
new ValueWrapper(stub[PrimitiveValue]).objectValueAsOption()
actual should be (expected)
}
}
describe("#primitiveValue") {
it("should return a boolean if wrapping a BooleanValue") {
val expected: Boolean = true
val value = createPrimitiveValueStub(expected)
val actual = new ValueWrapper(value).primitiveValue().asInstanceOf[Boolean]
actual should be (expected)
}
it("should return a byte if wrapping a ByteValue") {
val expected: Byte = 3.toByte
val value = createPrimitiveValueStub(expected)
val actual = new ValueWrapper(value).primitiveValue().asInstanceOf[Byte]
actual should be (expected)
}
it("should return a char if wrapping a CharValue") {
val expected: Char = 'a'
val value = createPrimitiveValueStub(expected)
val actual = new ValueWrapper(value).primitiveValue().asInstanceOf[Char]
actual should be (expected)
}
it("should return a double if wrapping a DoubleValue") {
val expected: Double = 3.0
val value = createPrimitiveValueStub(expected)
val actual = new ValueWrapper(value).primitiveValue().asInstanceOf[Double]
actual should be (expected)
}
it("should return a float if wrapping a FloatValue") {
val expected: Float = 3.0f
val value = createPrimitiveValueStub(expected)
val actual = new ValueWrapper(value).primitiveValue().asInstanceOf[Float]
actual should be (expected)
}
it("should return an integer if wrapping an IntegerValue") {
val expected: Int = 3
val value = createPrimitiveValueStub(expected)
val actual = new ValueWrapper(value).primitiveValue().asInstanceOf[Int]
actual should be (expected)
}
it("should return a long if wrapping a LongValue") {
val expected: Long = 3
val value = createPrimitiveValueStub(expected)
val actual = new ValueWrapper(value).primitiveValue().asInstanceOf[Long]
actual should be (expected)
}
it("should return a short if wrapping a ShortValue") {
val expected: Short = 3
val value = createPrimitiveValueStub(expected)
val actual = new ValueWrapper(value).primitiveValue().asInstanceOf[Short]
actual should be (expected)
}
it("should throw an exception if not a primitive") {
intercept[IllegalArgumentException] {
// TODO: Investigate creating a real value without needing a
// VirtualMachine instance
new ValueWrapper(stub[ObjectReference]).primitiveValue()
}
}
it("should throw an exception if given an unexpected primitive") {
intercept[Throwable] {
// TODO: Investigate creating a real value without needing a
// VirtualMachine instance
// Represents an unknown primitive value that we are not checking
new ValueWrapper(stub[PrimitiveValue]).primitiveValue()
}
}
}
describe("#primitiveValueAsOption") {
it("should return Some(value) if wrapped value has a primitive value") {
val expected = Some(true)
val value = createPrimitiveValueStub(expected.get)
val actual = new ValueWrapper(value).primitiveValueAsOption()
.map(_.asInstanceOf[Boolean])
actual should be (expected)
}
it("should return None if wrapped value does not have a primitive value") {
val expected = None
val actual =
new ValueWrapper(stub[ObjectReference]).primitiveValueAsOption()
actual should be (expected)
}
}
describe("#fieldsAndValues") {
it("should return all fields available on the object") {
val fieldsAndValues = Seq(
(stub[Field], stub[PrimitiveValue]),
(stub[Field], stub[ObjectReference])
)
val expected = fieldsAndValues.toMap
val value = createObjectReferenceStub(
fieldsAndValues = Some(fieldsAndValues))
val actual = new ValueWrapper(value).fieldsAndValues()
actual should be (expected)
}
it("should exclude the MODULE$ field (it is recursive)") {
// Create the field that should be excluded
val moduleField = createFieldStub("MODULE$")
val fieldsAndValues = Seq(
(stub[Field], stub[PrimitiveValue]),
(stub[Field], stub[ObjectReference]),
(moduleField, stub[Value])
)
// Expect all fields EXCEPT the module
val expected = fieldsAndValues.filterNot(_._1 eq moduleField).toMap
val value = createObjectReferenceStub(
fieldsAndValues = Some(fieldsAndValues))
val actual = new ValueWrapper(value).fieldsAndValues()
actual should be (expected)
}
it("should fill in any missing values with null") {
val fieldsAndValues = Seq(
(stub[Field], stub[PrimitiveValue]),
(stub[Field], stub[ObjectReference])
)
val fieldsWithNoValues = Seq(stub[Field])
val expected =
(fieldsAndValues ++ fieldsWithNoValues.map((_, null))).toMap
val value = createObjectReferenceStub(
fieldsAndValues = Some(fieldsAndValues),
fieldsWithNoValues = Some(fieldsWithNoValues)
)
val actual = new ValueWrapper(value).fieldsAndValues()
actual should be (expected)
}
it("should throw an exception if not an object reference") {
intercept[Throwable] {
new ValueWrapper(stub[PrimitiveValue]).primitiveValue()
}
}
}
describe("#fieldsAndValuesAsOption") {
it("should return Some(...) if wrapped value is capable of fields") {
val fieldsAndValues = Seq(
(stub[Field], stub[PrimitiveValue]),
(stub[Field], stub[ObjectReference])
)
val expected = Some(fieldsAndValues.toMap)
val value = createObjectReferenceStub(
fieldsAndValues = Some(fieldsAndValues))
val actual = new ValueWrapper(value).fieldsAndValuesAsOption()
actual should be (expected)
}
it("should return None if wrapped value does not have fields") {
val expected = None
val actual =
new ValueWrapper(stub[PrimitiveValue]).fieldsAndValuesAsOption()
actual should be (expected)
}
}
describe("#toString") {
it("should print out the value as a string") {
val value = stub[Value]
val expected = value.toString
val actual = new ValueWrapper(value).toString
actual should be (expected)
}
it("should print out the field names and values if given a depth of 2") {
val fieldsAndValues = Seq(
(createFieldStub("one"), stub[PrimitiveValue]),
(createFieldStub("two"), stub[ObjectReference])
)
val value = createObjectReferenceStub(
fieldsAndValues = Some(fieldsAndValues))
val expected = value.toString + "\n" +
fieldsAndValues.map {
case (f,v) => "\t" + f.name() + ": " + v.toString
}.mkString("\n")
val actual = new ValueWrapper(value).toString(2)
actual should be (expected)
}
}
}
}
| chipsenkbeil/scala-debugger | scala-debugger-api/src/test/scala/org/scaladebugger/api/lowlevel/wrappers/ValueWrapperSpec.scala | Scala | apache-2.0 | 12,470 |
/*^
===========================================================================
Helios - FX
===========================================================================
Copyright (C) 2013-2016 Gianluca Costa
===========================================================================
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
===========================================================================
*/
package info.gianlucacosta.helios.fx.dialogs.about
import javafx.fxml.FXMLLoader
import info.gianlucacosta.helios.apps.AppInfo
import scalafx.scene.control.Alert.AlertType
import scalafx.scene.control.{Alert, ButtonBar, ButtonType}
/**
* Dialog showing the application's information.
*
* This class must be instantiated on the GUI thread.
*
* @param appInfo an AppInfo object - for example, an instance of AuroraAppInfo
*/
class AboutBox(appInfo: AppInfo) extends Alert(AlertType.None) {
private val loader: FXMLLoader =
new FXMLLoader(this.getClass.getResource("AboutBox.fxml"))
private val root: javafx.scene.layout.Pane =
loader.load[javafx.scene.layout.Pane]
private val controller: AboutBoxController =
loader.getController[AboutBoxController]
controller.setup(appInfo)
dialogPane().setContent(root)
buttonTypes = Seq(
new ButtonType("OK", ButtonBar.ButtonData.OKDone)
)
title = s"About ${appInfo.name}..."
}
| giancosta86/Helios-fx | src/main/scala/info/gianlucacosta/helios/fx/dialogs/about/AboutBox.scala | Scala | apache-2.0 | 1,888 |
package com.github.mgoeminne.sitar.parser.acm
import com.github.mgoeminne.sitar.parser.{Paper, Citation, CitationParser}
/**
* acm style for inproceedings citation
*/
private[acm] class ACMInProceedingsParser extends CitationParser
{
def lastName: Parser[String] = """[^,]+""".r ^^ { case l => l.split(" ").last}
def firstName: Parser[String] = """(\p{Lu}\.\s?)+""".r
def author: Parser[String] = lastName ~ "," ~ firstName ^^ { case l~","~f => l}
def authors: Parser[Seq[String]] = rep(author ~ ",") ~ "and" ~ author ^^ { case a ~ "and" ~ c => a.map(_._1) :+ c} |
author ~ ", and" ~ author ^^ { case a ~ ", and" ~ b => Seq(a,b)} |
author ^^ { case a => Seq(a) }
def title: Parser[String] = """((?!\.\s).)*""".r
def rest: Parser[Any] = """.*""".r
def citation: Parser[Paper] = authors~title~"."~rest ^^ { case a~t~"."~r => new Paper(t, a, 42, "youhou") }
}
| mgoeminne/sitar | src/main/scala/com/github/mgoeminne/sitar/parser/acm/ACMInProceedingsParser.scala | Scala | mit | 969 |
package com.themillhousegroup.play2.mailgun.templating
import org.specs2.mutable.Specification
class DualFormatEmailSpec extends Specification {
"Dual-Format emails" should {
"Convert plain text to plain text" in {
val dfe = new DualFormatEmail("test text")
dfe.toPlainText must beEqualTo("test text")
}
"Convert basic markup text to plain text" in {
val dfe = new DualFormatEmail("test <b>bold <i>bold-italic</i></b> <i>italic</i>")
dfe.toPlainText must beEqualTo("test bold bold-italic italic")
}
"Convert basic markup text to plain text, dropping elements that are tagged as such" in {
val dfe = new DualFormatEmail("test <b>bold <i>bold-italic</i></b> <h3 data-ignored-in-plain-text>I should be <i>ignored</i></h3> <i>italic</i>")
dfe.toPlainText must beEqualTo("test bold bold-italic italic")
}
}
}
| themillhousegroup/play2-mailgun | templating/src/test/scala/com/themillhousegroup/play2/mailgun/templating/DualFormatEmailSpec.scala | Scala | mit | 849 |
package model
import play.api.libs.json._
/**
* Represents the Swagger definition for GithubScmlinks.
* @param additionalProperties Any additional properties this model may have.
*/
@javax.annotation.Generated(value = Array("org.openapitools.codegen.languages.ScalaPlayFrameworkServerCodegen"), date = "2022-02-13T02:38:35.589632Z[Etc/UTC]")
case class GithubScmlinks(
self: Option[Link],
`class`: Option[String]
additionalProperties:
)
object GithubScmlinks {
implicit lazy val githubScmlinksJsonFormat: Format[GithubScmlinks] = {
val realJsonFormat = Json.format[GithubScmlinks]
val declaredPropNames = Set("self", "`class`")
Format(
Reads {
case JsObject(xs) =>
val declaredProps = xs.filterKeys(declaredPropNames)
val additionalProps = JsObject(xs -- declaredPropNames)
val restructuredProps = declaredProps + ("additionalProperties" -> additionalProps)
val newObj = JsObject(restructuredProps)
realJsonFormat.reads(newObj)
case _ =>
JsError("error.expected.jsobject")
},
Writes { githubScmlinks =>
val jsObj = realJsonFormat.writes(githubScmlinks)
val additionalProps = jsObj.value("additionalProperties").as[JsObject]
val declaredProps = jsObj - "additionalProperties"
val newObj = declaredProps ++ additionalProps
newObj
}
)
}
}
| cliffano/swaggy-jenkins | clients/scala-play-server/generated/app/model/GithubScmlinks.scala | Scala | mit | 1,418 |
package com.gravity.hadoop
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{PathFilter, Path, FileSystem}
import org.apache.hadoop.io.{SequenceFile, Writable}
import java.io._
import scala.collection.mutable.Buffer
/**
* Convenience methods for reading and writing files to and from hdfs.
*/
package object hdfs {
implicit def asRichFileSystem(fs: FileSystem): RichFileSystem = new RichFileSystem(fs)
/**
* Gives you a file writer into the local cluster hdfs instance
* @param relpath The relative path
* @param recreateIfPresent If true, will delete the file if it already exists
* @param work A function that works with the output. The output will be closed when this function goes out of scope.
* @return
*/
def withHdfsWriter(fs: FileSystem, relpath: String, recreateIfPresent: Boolean = true)(work: (BufferedWriter) => Unit) {
val path = new Path(relpath)
val fileSystem = fs
if (recreateIfPresent) {
if (fileSystem.exists(path)) {
fileSystem.delete(path)
}
}
val output = new BufferedWriter(new OutputStreamWriter(fileSystem.create(path)))
try {
work(output)
} finally {
output.close()
}
}
def perPartSequenceFileKV[K <: Writable, V <: Writable](fs: FileSystem, relpath: String, conf: Configuration,fileBeginsWith:String="part-")(key: K, value: V)(line: (K, V) => Unit) {
val glob = new Path(relpath)
val files = fs.listStatus(glob, new PathFilter {
override def accept(path: Path): Boolean = path.getName.startsWith(fileBeginsWith)
})
for (file <- files) {
perSequenceFileKV(fs, file.getPath.toString, conf)(key, value)(line)
}
}
def perSequenceFileKV[K <: Writable, V <: Writable](fs: FileSystem, relpath: String, conf: Configuration)(key: K, value: V)(line: (K, V) => Unit) {
val reader = new SequenceFile.Reader(fs, new Path(relpath), conf)
try {
while (reader.next(key, value)) {
line(key, value)
}
} finally {
reader.close()
}
}
/**
* Allows you to work with a reader opened into an hdfs file on the test cluster.
* @param relpath The path to the file
* @param work The work you will do
* @tparam A If you want to return a value after the work, here it is.
* @return
*/
def withHdfsReader[A](fs: FileSystem, relpath: String)(work: (BufferedReader) => A): A = {
val path = new Path(relpath)
val input = new BufferedReader(new InputStreamReader(fs.open(path)))
try {
work(input)
} finally {
input.close()
}
}
def withHdfsDirectoryReader[A](fs: FileSystem, relpath: String)(work: (BufferedReader) => A): A = {
val path = new Path(relpath)
val input = new BufferedReader(new InputStreamReader(new RichFileSystem(fs).openParts(path)))
try {
work(input)
} finally {
input.close()
}
}
/**
* Reads a file into a buffer, allowing you to decide what's in the buffer depending on the output of the linereader function
* @param relpath Path to local hdfs buffer
* @param linereader Function to return an element in the buffer, given the line fo the file
* @tparam A
* @return
*/
def perHdfsLineToSeq[A](fs: FileSystem, relpath: String)(linereader: (String) => A): Seq[A] = {
val result = Buffer[A]()
withHdfsReader(fs, relpath) {
input =>
var done = false
while (!done) {
val line = input.readLine()
if (line == null) {
done = true
} else {
result += linereader(line)
}
}
}
result.toSeq
}
/**
* Reads a file line by line. If you want to have the results in a buffer, use perHdfsLineToSeq
* @param relpath
* @param linereader
* @tparam A
* @return
*/
def perHdfsLine[A](fs: FileSystem, relpath: String)(linereader: (String) => Unit) {
withHdfsReader(fs, relpath) {
input =>
var done = false
while (!done) {
val line = input.readLine()
if (line == null) {
done = true
} else {
linereader(line)
}
}
}
}
/**
* For each line in a directory of files
* @param relpath Path to files (or glob path)
* @param linereader Will be invoked once per line with a string representation
* @return Bupkiss
*/
def perHdfsDirectoryLine(fs: FileSystem, relpath: String)(linereader: (String) => Unit) {
withHdfsDirectoryReader(fs, relpath) {
input =>
var done = false
while (!done) {
val line = input.readLine()
if (line == null) {
done = true
} else {
linereader(line)
}
}
}
}
} | GravityLabs/HPaste | src/main/scala/com/gravity/hadoop/hdfs.scala | Scala | apache-2.0 | 4,766 |
/*
* ParParticleFilter.scala
* A parallel one-time particle filter.
*
* Created By: Lee Kellogg ([email protected])
* Creation Date: Jun 2, 2015
*
* Copyright 2015 Avrom J. Pfeffer and Charles River Analytics, Inc.
* See http://www.cra.com or email [email protected] for information.
*
* See http://www.github.com/p2t2/figaro for a copy of the software license.
*/
package com.cra.figaro.algorithm.filtering
import com.cra.figaro.language._
import scala.collection.parallel.ParSeq
import com.cra.figaro.algorithm.filtering.ParticleFilter.WeightedParticle
import com.cra.figaro.library.cache.PermanentCache
import com.cra.figaro.library.cache.Cache
import com.cra.figaro.algorithm.sampling.LikelihoodWeighter
/**
* A parallel one-time particle filter. Distributes the work of generating particles at each time step over a specified
* number of threads. After generating the particles, they are recombined before re-sampling occurs. Instead of accepting
* initial and static universes as input, this method accepts functions that return universes. This is because each
* thread needs its own set of universes to work on. It is important that any elements created within those functions are
* explicitly assigned to the returned universe, not the implicit default universe.
*
* @param static A function that returns a universe of elements whose values do not change over time
* @param initial A function that returns a universe describing the distribution over the initial state of the system
* @param transition The transition model describing how the current state of the system depends on the static and previous, respectively
* @param numParticles Number of particles to use at each time step
* @param numThreads The number of threads over which to distribute the work of generating the particles at each step
*/
class ParOneTimeParticleFilter(static: () => Universe, initial: () => Universe, transition: (Universe, Universe) => Universe, val numParticles: Int, numThreads: Int)
extends ParFiltering(transition) with ParticleFilter {
/** sequence of UniverseWindows -- one for each thread */
private var windows: Seq[UniverseWindow] = _
/** (start, end) indices for each thread to divide up numParticles */
private val indices = calculateIndices(numParticles, numThreads)
/** generate the initial UniverseWindows */
private def genInitialWindows(): Seq[UniverseWindow] = {
Seq.fill(numThreads)(new UniverseWindow(null, initial(), static()))
}
/** apply the transition function to each of a sequence of UniverseWindows */
private def advanceUniverseWindows(windows: Seq[UniverseWindow]): Seq[UniverseWindow] = {
windows map { w => advanceUniverse(w, transition) }
}
/**
* generate particles for each thread, in parallel, then recombine and return
*
* @param windows the UniverseWindows to sample from
* @param weightedParticleCreator a function that generates a WeightedParticle, given a UniverseWindow and an index
*/
private def genParticles(windows: Seq[(UniverseWindow, LikelihoodWeighter)], weightedParticleCreator: ((UniverseWindow, LikelihoodWeighter), Int) => WeightedParticle): Seq[WeightedParticle] = {
val parWindows = windows.par
val particles = parWindows zip indices flatMap { case(window, (start, end)) =>
(start to end) map { i => weightedParticleCreator(window, i) }
}
particles.seq
}
/** compute probability of evidence for the particles, and update the belief state (after re-sampling) */
private def doTimeStep(weightedParticles: Seq[WeightedParticle]) {
computeProbEvidence(weightedParticles)
updateBeliefState(weightedParticles)
}
def run(): Unit = {
windows = genInitialWindows()
val windowWithCaches = windows.map(w => (w, new LikelihoodWeighter(w.current, new PermanentCache(w.current))))
val particles = genParticles(windowWithCaches, (w, _) => initialWeightedParticle(w._1.static, w._1.current, w._2))
doTimeStep(particles)
}
def advanceTime(evidence: Seq[NamedEvidence[_]] = List()): Unit = {
val newWindows = advanceUniverseWindows(windows)
val newWindowsWithCaches = newWindows.map(w => (w, new LikelihoodWeighter(w.current, new PermanentCache(w.current))))
val particles = genParticles(newWindowsWithCaches, (w, i) => addWeightedParticle(evidence, i, w._1, w._2))
doTimeStep(particles)
windows = newWindows
}
/**
* Calculate start and end indices for each thread dividing up the particles
*/
private def calculateIndices(numParticles: Int, numThreads: Int): Seq[(Int, Int)] = {
val indices = (1 to numThreads) map { i =>
val start = (i - 1) * numParticles / numThreads
val end = i * numParticles / numThreads -1
(start, end)
}
indices
}
}
/**
* A parallel implementation of a OneTimeParticleFilter.
*/
object ParParticleFilter {
/**
* A parallel one-time particle filter. Distributes the work of generating particles at each time step over a specified
* number of threads. After generating the particles, they are recombined before re-sampling occurs. Instead of accepting
* initial and static universes as input, this method accepts functions that return universes. This is because each
* thread needs its own set of universes to work on. It is important that any elements created within those functions are
* explicitly assigned to the returned universe, not the implicit default universe.
*
*
* @param static A function that returns a universe of elements whose values do not change over time
* @param initial A function that returns a universe describing the distribution over the initial state of the system
* @param transition The transition model describing how the current state of the system depends on the static and previous, respectively
* @param numParticles Number of particles to use at each time step
* @param numThreads The number of threads over which to distribute the work of generating the particles at each step
*/
def apply(static: () => Universe, initial: () => Universe, transition: (Universe, Universe) => Universe, numParticles: Int, numThreads: Int): ParOneTimeParticleFilter =
new ParOneTimeParticleFilter(static, initial, transition, numParticles, numThreads)
/**
* A parallel one-time particle filter. Distributes the work of generating particles at each time step over a specified
* number of threads. After generating the particles, they are recombined before re-sampling occurs. Instead of accepting
* an initial universe as input, this method accepts a function that returns a universe. This is because each thread needs
* its own set of universes to work on. It is important that any elements created within that function are explicitly
* assigned to the returned universe, not the implicit default universe.
*
* @param initial A function that returns a universe describing the distribution over the initial state of the system
* @param transition The transition model describing how the current state of the system depends on the previous
* @param numParticles Number of particles to use at each time step
* @param numThreads The number of threads over which to distribute the work of generating the particles at each step
*/
def apply(initial: () => Universe, transition: Universe => Universe, numParticles: Int, numThreads: Int): ParOneTimeParticleFilter =
apply(() => new Universe(), initial, (static: Universe, previous: Universe) => transition(previous), numParticles, numThreads)
} | scottcb/figaro | Figaro/src/main/scala/com/cra/figaro/algorithm/filtering/ParParticleFilter.scala | Scala | bsd-3-clause | 7,588 |
import korolev._
import korolev.server._
import korolev.akka._
import scala.concurrent.ExecutionContext.Implicits.global
import korolev.state.javaSerialization._
import scala.concurrent.Future
object SimpleExample extends SimpleAkkaHttpKorolevApp {
import State.globalContext._
import levsha.dsl._
import html._
// Handler to input
val inputId = elementId()
val editInputId = elementId()
val service = akkaHttpService {
KorolevServiceConfig [Future, State, Any] (
stateLoader = StateLoader.default(State()),
document = state => optimize {
Html(
body(
div("Super TODO tracker"),
div(height @= "250px", overflow @= "scroll",
(state.todos zipWithIndex) map {
case (todo, i) =>
div(
input(
`type` := "checkbox",
when(state.edit.nonEmpty)(disabled),
when(todo.done)(checked),
// Generate transition when clicking checkboxes
event("click") { access =>
access.transition { s =>
val updated = s.todos.updated(i, s.todos(i).copy(done = !todo.done))
s.copy(todos = updated)
}
}
),
if (state.edit.contains(i)) {
form(
marginBottom @= "-10px",
display @= "inline-block",
input(
editInputId,
display @= "inline-block",
`type` := "text",
value := todo.text
),
button(display @= "inline-block", "Save"),
event("submit") { access =>
access.property(editInputId, "value") flatMap { value =>
access.transition { s =>
val updatedTodo = s.todos(i).copy(text = value)
val updatedTodos = s.todos.updated(i, updatedTodo)
s.copy(todos = updatedTodos, edit = None)
}
}
}
)
} else {
span(
when(todo.done)(textDecoration @= "line-through"),
todo.text,
event("dblclick") { access =>
access.transition(_.copy(edit = Some(i)))
}
)
}
)
}
),
form(
// Generate AddTodo action when Add' button clicked
event("submit") { access =>
val prop = access.property(inputId)
prop.get("value") flatMap { value =>
prop.set("value", "") flatMap { _ =>
val todo = State.Todo(value, done = false)
access.transition(s => s.copy(todos = s.todos :+ todo))
}
}
},
input(
when(state.edit.nonEmpty)(disabled),
inputId,
`type` := "text",
placeholder := "What should be done?"
),
button(
when(state.edit.nonEmpty)(disabled),
"Add todo"
)
)
)
)
}
)
}
}
case class State(
todos: Vector[State.Todo] = (0 to 9).toVector.map(i => State.Todo(s"This is TODO #$i", done = false)),
edit: Option[Int] = None
)
object State {
val globalContext = Context[Future, State, Any]
case class Todo(text: String, done: Boolean)
}
| fomkin/korolev | examples/simple/src/main/scala/SimpleExample.scala | Scala | apache-2.0 | 3,918 |
/***********************************************************************
* Copyright (c) 2013-2016 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.accumulo.tools.ingest
import java.io.File
import java.text.SimpleDateFormat
import java.util.Date
import com.google.common.io.Files
import com.vividsolutions.jts.geom.Coordinate
import org.geotools.data.Transaction
import org.geotools.data.shapefile.ShapefileDataStoreFactory
import org.geotools.factory.Hints
import org.geotools.geometry.jts.JTSFactoryFinder
import org.junit.runner.RunWith
import org.locationtech.geomesa.accumulo.tools.{AccumuloDataStoreCommand, AccumuloRunner}
import org.locationtech.geomesa.index.stats.AttributeBounds
import org.locationtech.geomesa.utils.geotools.Conversions._
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
import scala.collection.JavaConversions._
@RunWith(classOf[JUnitRunner])
class ShpIngestTest extends Specification {
sequential
"ShpIngest" >> {
val geomBuilder = JTSFactoryFinder.getGeometryFactory
val shpStoreFactory = new ShapefileDataStoreFactory
val shpFile = new File(Files.createTempDir(), "shpingest.shp")
val shpUrl = shpFile.toURI.toURL
val params = Map("url" -> shpUrl)
val shpStore = shpStoreFactory.createNewDataStore(params)
val schema = SimpleFeatureTypes.createType("shpingest", "age:Integer,dtg:Date,*geom:Point:srid=4326")
shpStore.createSchema(schema)
val df = new SimpleDateFormat("dd-MM-yyyy")
val (minDate, maxDate) = (df.parse("01-01-2011"), df.parse("01-01-2012"))
val (minX, maxX, minY, maxY) = (10.0, 20.0, 30.0, 40.0)
val data =
List(
("1", 1, minDate, (minX, minY)),
("1", 2, maxDate, (maxX, maxY))
)
val writer = shpStore.getFeatureWriterAppend("shpingest", Transaction.AUTO_COMMIT)
data.foreach { case (id, age, dtg, (lat, lon)) =>
val f = writer.next()
f.setAttribute("age", age)
f.setAttribute("dtg", dtg)
val pt = geomBuilder.createPoint(new Coordinate(lat, lon))
f.setDefaultGeometry(pt)
f.getUserData.put(Hints.USE_PROVIDED_FID, java.lang.Boolean.TRUE)
f.getUserData.put(Hints.PROVIDED_FID, id)
writer.write()
}
writer.flush()
writer.close()
val args = Array[String]("ingest", "--zookeepers", "zoo", "--mock", "--instance", "mycloud", "--user", "myuser",
"--password", "mypassword", "--catalog", "testshpingestcatalog", shpFile.getAbsolutePath)
"should properly ingest a shapefile" >> {
val command = AccumuloRunner.parseCommand(args).asInstanceOf[AccumuloDataStoreCommand]
command.execute()
val fs = command.withDataStore(_.getFeatureSource("shpingest"))
fs.getFeatures.features().toList must haveLength(2)
val bounds = fs.getBounds
bounds.getMinX mustEqual minX
bounds.getMaxX mustEqual maxX
bounds.getMinY mustEqual minY
bounds.getMaxY mustEqual maxY
command.withDataStore { (ds) =>
ds.stats.getAttributeBounds[Date](ds.getSchema("shpingest"), "dtg") must
beSome(AttributeBounds(minDate, maxDate, 2))
}
}
"should support renaming the feature type" >> {
val newArgs = Array(args.head) ++ Array("--feature-name", "changed") ++ args.tail
val command = AccumuloRunner.parseCommand(newArgs).asInstanceOf[AccumuloDataStoreCommand]
command.execute()
val fs = command.withDataStore(_.getFeatureSource("changed"))
fs.getFeatures.features().toList must haveLength(2)
val bounds = fs.getBounds
bounds.getMinX mustEqual minX
bounds.getMaxX mustEqual maxX
bounds.getMinY mustEqual minY
bounds.getMaxY mustEqual maxY
command.withDataStore { (ds) =>
ds.stats.getAttributeBounds[Date](ds.getSchema("changed"), "dtg") must
beSome(AttributeBounds(minDate, maxDate, 2))
}
}
}
}
| MutahirKazmi/geomesa | geomesa-accumulo/geomesa-accumulo-tools/src/test/scala/org/locationtech/geomesa/accumulo/tools/ingest/ShpIngestTest.scala | Scala | apache-2.0 | 4,310 |
/*
* Copyright (c) 2013, OpenCloudDB/MyCAT and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software;Designed and Developed mainly by many Chinese
* opensource volunteers. you can redistribute it and/or modify it under the
* terms of the GNU General Public License version 2 only, as published by the
* Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Any questions about this component can be directed to it's project Web address
* https://code.google.com/p/opencloudb/.
*
*/
package turbo.crawler.power
import turbo.crawler.Fetchable
/**
* Message Driven
* @author mclaren
*
*/
trait MessageDriven {
def fireEvent(evt: Evt): Unit = EventManager.fireEvent(evt)
}
class Evt(evt_id: String, _source: Fetchable) {
def eventId = evt_id
def source = _source
override def toString = "Evt:[" + evt_id + "] on [" + source.getDirectUrl + "]"
}
class EventIDDescriptor(_id: String) {
def mkComplted = _id + "_COMPLETION"
}
object eventId {
def apply(id: String): EventIDDescriptor = new EventIDDescriptor(id)
def apply(id: Int): EventIDDescriptor = this(id + "")
}
| fengshao0907/Mycat-spider | src/main/scala/turbo/crawler/power/MessageDriven.scala | Scala | apache-2.0 | 1,755 |
/**
* Copyright (C) 2016 Verizon. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.verizon.bda.trapezium.cache.exception
/**
* Created by v468328 on 11/22/16.
*/
class CacheConfig {
}
| Verizon/trapezium | cache/src/main/scala/com/verizon/bda/trapezium/cache/exception/CacheConfig.scala | Scala | apache-2.0 | 721 |
package org.jetbrains.plugins.scala.lang.refactoring.util
import com.intellij.openapi.project.Project
import com.intellij.psi.search.{GlobalSearchScopesCore, PsiSearchHelper}
import com.intellij.psi.{PsiDirectory, PsiElement, PsiFile, PsiNamedElement}
import com.intellij.util.Processor
import scala.collection.mutable.ArrayBuffer
/**
* Created by Kate Ustyuzhanina on 8/25/15.
*/
object ScalaCompositeTypeValidator {
def apply(validators: List[ScalaValidator],
conflictsReporter: ConflictsReporter,
myProject: Project,
selectedElement: PsiElement,
noOccurrences: Boolean,
enclosingContainerAll: PsiElement,
enclosingOne: PsiElement): ScalaCompositeTypeValidator = {
new ScalaCompositeTypeValidator(conflictsReporter, myProject,
selectedElement, noOccurrences, enclosingContainerAll, enclosingOne, validators)
}
}
class ScalaCompositeTypeValidator(conflictsReporter: ConflictsReporter,
myProject: Project,
selectedElement: PsiElement,
noOccurrences: Boolean,
enclosingContainerAll: PsiElement,
enclosingOne: PsiElement,
validators: List[ScalaValidator])
extends ScalaTypeValidator(selectedElement, noOccurrences, enclosingContainerAll, enclosingOne) {
protected override def findConflictsImpl(name: String, allOcc: Boolean): Seq[(PsiNamedElement, String)] = {
//returns declaration and message
val buf = new ArrayBuffer[(PsiNamedElement, String)]
val filesToSearchIn = enclosingContainerAll match {
case directory: PsiDirectory =>
findFilesForDownConflictFindings(directory, name)
case _ => Seq.empty
}
for (file <- filesToSearchIn) {
if (buf.isEmpty) {
buf ++= forbiddenNamesInBlock(file, name)
}
}
for (validator <- validators) {
if (buf.isEmpty) {
buf ++= forbiddenNames(validator.enclosingContainer(allOcc), name)
}
}
buf
}
//TODO iliminate duplication
private def findFilesForDownConflictFindings(directory: PsiDirectory, name: String): Seq[PsiFile] = {
val buffer = new ArrayBuffer[PsiFile]()
val processor = new Processor[PsiFile] {
override def process(file: PsiFile): Boolean = {
buffer += file
true
}
}
val helper: PsiSearchHelper = PsiSearchHelper.SERVICE.getInstance(directory.getProject)
helper.processAllFilesWithWord(name, GlobalSearchScopesCore.directoryScope(directory, true), processor, true)
buffer
}
}
| loskutov/intellij-scala | src/org/jetbrains/plugins/scala/lang/refactoring/util/ScalaCompositeTypeValidator.scala | Scala | apache-2.0 | 2,692 |
package scroll.internal.formal
/** Companion object for the formal representation of the constraint model.
*/
object FormalConstraintModel {
def empty[NT >: Null <: AnyRef, RT >: Null <: AnyRef, CT >: Null <: AnyRef, RST >: Null <: AnyRef]
: FormalConstraintModel[NT, RT, CT, RST] =
FormalConstraintModel[NT, RT, CT, RST](Map.empty, Map.empty, List.empty)
/** Little helper factory method for creating a constraint model with Strings only.
*/
def forStrings(
rolec: Map[String, List[((Int, Int), AnyRef)]],
card: Map[String, ((Int, Int), (Int, Int))],
intra: List[(String, List[(String, String)] => Boolean)]
): FormalConstraintModel[String, String, String, String] =
FormalConstraintModel(rolec, card, intra)
}
/** Class representation of the Constraint Model.
*
* @param rolec
* the role constraints
* @param card
* cardinality mappings
* @param intra
* intra-relationship constraints
* @tparam NT
* type of naturals
* @tparam RT
* type of roles
* @tparam CT
* type of compartments
* @tparam RST
* type of relationships
*/
final case class FormalConstraintModel[
NT >: Null <: AnyRef,
RT >: Null <: AnyRef,
CT >: Null <: AnyRef,
RST >: Null <: AnyRef
](
rolec: Map[CT, List[((Int, Int), AnyRef)]],
card: Map[RST, ((Int, Int), (Int, Int))],
intra: List[(RST, List[(NT, NT)] => Boolean)]
) {
/** @param crom
* the CROM instance to check against
* @return
* true iff the constraint model is compliant to the given CROM.
*/
def compliant(crom: FormalCROM[NT, RT, CT, RST]): Boolean = crom.wellformed && axiom12(crom)
def axiom12(crom: FormalCROM[NT, RT, CT, RST]): Boolean =
FormalUtils.all(for {
ct1 <- crom.ct if rolec.contains(ct1)
(_, a) <- rolec(ct1)
} yield FormalUtils.atoms(a).toSet.subsetOf(crom.parts(ct1).toSet))
/** @param crom
* the CROM instance to check against
* @param croi
* the CROI instance to check against
* @return
* true iff the constraint model is compliant to the given CROM and the given CROI is valid
* wrt. the constraint model
*/
def validity(crom: FormalCROM[NT, RT, CT, RST], croi: FormalCROI[NT, RT, CT, RST]): Boolean =
compliant(crom) && croi.compliant(crom) && axiom13(crom, croi) && axiom14(croi) && axiom15(
crom,
croi
) && axiom16(croi)
def axiom13(crom: FormalCROM[NT, RT, CT, RST], croi: FormalCROI[NT, RT, CT, RST]): Boolean =
FormalUtils.all(for {
ct1 <- crom.ct if rolec.contains(ct1)
(crd, a) <- rolec(ct1)
c1 <- croi.c if croi.type1(c1) == ct1
} yield {
val sum = croi.o_c(c1).map(FormalUtils.evaluate(a, croi, _, c1)).sum
crd._1 <= sum && sum <= crd._2
})
def axiom14(croi: FormalCROI[NT, RT, CT, RST]): Boolean =
FormalUtils.all(for {
(o, c, r) <- croi.plays if rolec.contains(croi.type1(c).asInstanceOf[CT])
(_, a) <- rolec(croi.type1(c).asInstanceOf[CT])
if FormalUtils.atoms(a).contains(croi.type1(r))
} yield FormalUtils.evaluate(a, croi, o, c) == 1)
def axiom15(crom: FormalCROM[NT, RT, CT, RST], croi: FormalCROI[NT, RT, CT, RST]): Boolean =
FormalUtils.all(for {
rst <- crom.rst if card.contains(rst)
c <- croi.c if croi.links.contains((rst, c))
(r_1, r_2) <- croi.links((rst, c))
} yield {
val l1 = croi.pred(rst, c, r_2).size
val l2 = croi.succ(rst, c, r_1).size
card(rst)._1._1 <= l1 && l1 <= card(rst)._1._2 && card(rst)._2._1 <= l2 && l2 <= card(
rst
)._2._2
})
def axiom16(croi: FormalCROI[NT, RT, CT, RST]): Boolean =
FormalUtils.all(for {
c <- croi.c
(rst, f) <- intra if croi.links.contains((rst, c))
} yield f(croi.overline_links(rst, c)))
}
| max-leuthaeuser/SCROLL | core/src/main/scala/scroll/internal/formal/FormalConstraintModel.scala | Scala | lgpl-3.0 | 3,820 |
package mesosphere.marathon.integration.setup
import java.io.File
import java.util.concurrent.{ Executors, TimeUnit }
import com.google.common.util.concurrent.{ AbstractIdleService, Service }
import com.google.inject.Guice
import mesosphere.chaos.http.{ HttpConf, HttpModule, HttpService }
import mesosphere.chaos.metrics.MetricsModule
import org.apache.commons.io.FileUtils
import org.apache.log4j.Logger
import org.rogach.scallop.ScallopConf
import scala.concurrent.duration._
import scala.concurrent.{ Await, ExecutionContext, Future, Promise }
import scala.sys.ShutdownHookThread
import scala.sys.process._
import scala.util.control.NonFatal
import scala.util.{ Failure, Success, Try }
/**
* Book Keeper for processes and services.
* During integration tests, several services and processes have to be launched.
* The ProcessKeeper knows about them and can handle their lifecycle.
*/
object ProcessKeeper {
private[this] val log = Logger.getLogger(getClass.getName)
private[this] var processes = List.empty[Process]
private[this] var services = List.empty[Service]
private[this] val ENV_MESOS_WORK_DIR: String = "MESOS_WORK_DIR"
def startHttpService(port: Int, assetPath: String) = {
startService {
log.info(s"Start Http Service on port $port")
val conf = new ScallopConf(Array("--http_port", port.toString, "--assets_path", assetPath)) with HttpConf
conf.afterInit()
val injector = Guice.createInjector(new MetricsModule, new HttpModule(conf), new IntegrationTestModule)
injector.getInstance(classOf[HttpService])
}
}
def startZooKeeper(port: Int, workDir: String) {
val args = "org.apache.zookeeper.server.ZooKeeperServerMain" :: port.toString :: workDir :: Nil
val workDirFile = new File(workDir)
FileUtils.deleteDirectory(workDirFile)
FileUtils.forceMkdir(workDirFile)
startJavaProcess("zookeeper", heapInMegs = 256, args, new File("."), sys.env, _.contains("binding to port"))
}
def startMesosLocal(): Process = {
val mesosWorkDirForMesos: String = "/tmp/marathon-itest-mesos"
val mesosWorkDirFile: File = new File(mesosWorkDirForMesos)
FileUtils.deleteDirectory(mesosWorkDirFile)
FileUtils.forceMkdir(mesosWorkDirFile)
startProcess(
"mesos",
Process(Seq("mesos-local", "--ip=127.0.0.1"), cwd = None, ENV_MESOS_WORK_DIR -> mesosWorkDirForMesos),
upWhen = _.toLowerCase.contains("registered with master"))
}
def startMarathon(cwd: File, env: Map[String, String], arguments: List[String],
mainClass: String = "mesosphere.marathon.Main",
startupLine: String = "Started SelectChannelConnector"): Process = {
val argsWithMain = mainClass :: arguments
val mesosWorkDir: String = "/tmp/marathon-itest-marathon"
val mesosWorkDirFile: File = new File(mesosWorkDir)
FileUtils.deleteDirectory(mesosWorkDirFile)
FileUtils.forceMkdir(mesosWorkDirFile)
startJavaProcess(
"marathon", heapInMegs = 512, argsWithMain, cwd,
env + (ENV_MESOS_WORK_DIR -> mesosWorkDir),
upWhen = _.contains(startupLine))
}
def startJavaProcess(name: String, heapInMegs: Int, arguments: List[String],
cwd: File = new File("."), env: Map[String, String] = Map.empty, upWhen: String => Boolean): Process = {
log.info(s"Start java process $name with args: $arguments")
val javaExecutable = sys.props.get("java.home").fold("java")(_ + "/bin/java")
val classPath = sys.props.getOrElse("java.class.path", "target/classes")
val memSettings = s"-Xmx${heapInMegs}m"
val builder = Process(javaExecutable :: memSettings :: "-classpath" :: classPath :: arguments, cwd, env.toList: _*)
val process = startProcess(name, builder, upWhen)
log.info(s"Java process $name up and running!")
process
}
def startProcess(name: String, processBuilder: ProcessBuilder, upWhen: String => Boolean, timeout: Duration = 30.seconds): Process = {
sealed trait ProcessState
case object ProcessIsUp extends ProcessState
case object ProcessExited extends ProcessState
val up = Promise[ProcessIsUp.type]()
val logger = new ProcessLogger {
def checkUp(out: String) = {
log.info(s"$name: $out")
if (!up.isCompleted && upWhen(out)) up.trySuccess(ProcessIsUp)
}
override def buffer[T](f: => T): T = f
override def out(s: => String) = checkUp(s)
override def err(s: => String) = checkUp(s)
}
val process = processBuilder.run(logger)
val processExitCode: Future[ProcessExited.type] = Future {
val exitCode = scala.concurrent.blocking {
process.exitValue()
}
log.info(s"Process $name finished with exit code $exitCode")
// Sometimes this finishes before the other future finishes parsing the output
// and we incorrectly report ProcessExited instead of ProcessIsUp as the result of upOrExited.
Await.result(up.future, 1.second)
ProcessExited
}(ExecutionContext.fromExecutor(Executors.newCachedThreadPool()))
val upOrExited = Future.firstCompletedOf(Seq(up.future, processExitCode))(ExecutionContext.global)
Try(Await.result(upOrExited, timeout)) match {
case Success(result) =>
processes = process :: processes
result match {
case ProcessExited =>
throw new IllegalStateException(s"Process $name exited before coming up. Give up. $processBuilder")
case ProcessIsUp => log.info(s"Process $name is up and running. ${processes.size} processes in total.")
}
case Failure(_) =>
process.destroy()
throw new IllegalStateException(
s"Process $name does not came up within time bounds ($timeout). Give up. $processBuilder")
}
process
}
def onStopServices(block: => Unit): Unit = {
services ::= new AbstractIdleService {
override def shutDown(): Unit = {
block
}
override def startUp(): Unit = {}
}
}
def stopOSProcesses(grep: String): Unit = {
val PIDRE = """\\s*(\\d+)\\s.*""".r
val processes = ("ps -x" #| s"grep $grep").!!.split("\\n").map { case PIDRE(pid) => pid }
processes.foreach(p => s"kill -9 $p".!)
}
def stopAllProcesses(): Unit = {
def waitForProcessesToFinish(): Unit = {
processes.foreach(p => Try(p.destroy()))
// Unfortunately, there seem to be race conditions in Process.exitValue.
// Thus this ugly workaround.
val waitForExitInThread = new Thread() {
override def run(): Unit = {
processes.foreach(_.exitValue())
}
}
waitForExitInThread.start()
try {
waitForExitInThread.join(1000)
}
finally {
waitForExitInThread.interrupt()
}
}
try waitForProcessesToFinish()
catch {
case NonFatal(e) =>
log.error("while waiting for processes to finish", e)
try waitForProcessesToFinish()
catch {
case NonFatal(e) =>
log.error("giving up waiting for processes to finish", e)
}
}
processes = Nil
}
def startService(service: Service): Unit = {
services ::= service
service.startAsync().awaitRunning()
}
def stopAllServices(): Unit = {
services.foreach(_.stopAsync())
services.par.foreach(_.awaitTerminated(5, TimeUnit.SECONDS))
services = Nil
}
def shutdown(): Unit = {
stopAllProcesses()
stopAllServices()
}
val shutDownHook: ShutdownHookThread = sys.addShutdownHook {
shutdown()
}
def main(args: Array[String]) {
//startMarathon(new File("."), Map("MESOS_NATIVE_LIBRARY" -> "/usr/local/lib/libmesos.dylib"), List("--master", "local", "--event_subscriber", "http_callback"))
startZooKeeper(2183, "/tmp/foo")
Thread.sleep(10000)
stopAllProcesses()
//startHttpService(11211, ".")
}
}
| ramitsurana/marathon | src/test/scala/mesosphere/marathon/integration/setup/ProcessKeeper.scala | Scala | apache-2.0 | 7,865 |
package config.fixtures
import com.samskivert.mustache.Mustache
import org.peelframework.core.beans.system.Lifespan
import org.peelframework.flink.beans.system.Flink
import org.peelframework.hadoop.beans.system.HDFS2
import org.peelframework.spark.beans.system.Spark
import org.springframework.context.annotation.{Bean, Configuration}
import org.springframework.context.{ApplicationContext, ApplicationContextAware}
/** System beans for the 'peel-wordcount' bundle. */
@Configuration
class systems extends ApplicationContextAware {
/* The enclosing application context. */
var ctx: ApplicationContext = null
def setApplicationContext(ctx: ApplicationContext): Unit = {
this.ctx = ctx
}
// ---------------------------------------------------
// Systems
// ---------------------------------------------------
@Bean(name = Array("flink-1.0.3"))
def `flink-1.0.3`: Flink = new Flink(
version = "1.0.3",
configKey = "flink",
lifespan = Lifespan.EXPERIMENT,
dependencies = Set(ctx.getBean("hdfs-2.7.1", classOf[HDFS2])),
mc = ctx.getBean(classOf[Mustache.Compiler])
)
@Bean(name = Array("spark-1.6.0"))
def `spark-1.6.0`: Spark = new Spark(
version = "1.6.0",
configKey = "spark",
lifespan = Lifespan.EXPERIMENT,
dependencies = Set(ctx.getBean("hdfs-2.7.1", classOf[HDFS2])),
mc = ctx.getBean(classOf[Mustache.Compiler])
)
} | peelframework/peel-wordcount | peel-wordcount-bundle/src/main/resources/config/fixtures/systems.scala | Scala | apache-2.0 | 1,439 |
package domain
import akka.persistence.PersistentActor
import akka.actor.ActorLogging
import utils.akka.ActorHelpers
import java.security.MessageDigest
import akka.util.ByteString
object KeyRingAggregate {
type KeyBlock = String
trait Command extends BaseCommand
case class ReceiveKeySubmission(pgpPublicKeyBlock: KeyBlock) extends Command
case object GetKeys extends Command
trait Response extends BaseResponse
trait ReceiveKeySubmissionResponse extends Response
case class KeySubmissionAccepted(currentKeyBlock: String) extends Response
case class KeySubmissionRejected(reason: String) extends Response
trait GetKeysResponse extends Response
case object NoKeysAvailable extends GetKeysResponse
case class CurrentKeyBlock(pgpPublicKeyBlock: KeyBlock) extends GetKeysResponse
}
class KeyRingAggregate extends PersistentActor
with ActorLogging with ActorHelpers {
import KeyRingAggregate._
import domain.keyring._
import BaseDomainEvent.now
import dit4c.common.KeyHelpers._
import scala.language.implicitConversions
lazy val primaryKeyId = self.path.name
override lazy val persistenceId: String = "KeyRing-" + self.path.name
var currentKeyBlock: Option[String] = None
var seenKeyBlocks = Set.empty[ByteString]
override val receiveCommand = sealedReceive[Command] {
case GetKeys =>
sender ! currentKeyBlock.map(CurrentKeyBlock(_)).getOrElse(NoKeysAvailable)
case ReceiveKeySubmission(pgpPublicKeyBlock) if alreadyReceived(pgpPublicKeyBlock) =>
sender ! KeySubmissionAccepted(currentKeyBlock.get)
case ReceiveKeySubmission(pgpPublicKeyBlock) =>
combineWithCurrent(pgpPublicKeyBlock) match {
case Left(reason) =>
sender ! KeySubmissionRejected(reason)
case Right(updatedKeyBlock) =>
persist(AcceptedKeyBlockSubmission(pgpPublicKeyBlock, now)) { evt =>
updateState(evt)
sender ! KeySubmissionAccepted(currentKeyBlock.get)
}
}
}
override val receiveRecover = sealedReceive[DomainEvent](updateState _)
protected def updateState(evt: DomainEvent): Unit = evt match {
case AcceptedKeyBlockSubmission(keyBlock, _) =>
// Update key block, skipping blocks that merge with errors
combineWithCurrent(keyBlock).right.foreach { kb =>
currentKeyBlock = Some(kb)
recordReceived(kb)
}
}
protected def combineWithCurrent(newKeyBlock: String): Either[String, KeyBlock] =
parseArmoredPublicKeyRing(newKeyBlock) match {
case Left(reason) =>
Left(reason)
case Right(pkr) if pkr.getPublicKey.fingerprint.string != primaryKeyId =>
Left("Primary key fingerprint does not match persistent entity ID")
case Right(pkr) =>
// Obviously this needs improvement!
// TODO: do proper merge
Right(newKeyBlock)
}
def recordReceived(kb: String): Unit = {
seenKeyBlocks += keyBlockDigest(kb)
}
def alreadyReceived(kb: String): Boolean =
seenKeyBlocks.contains(keyBlockDigest(kb))
def keyBlockDigest(keyBlock: String): ByteString =
ByteString(MessageDigest.getInstance("SHA-512").digest(keyBlock.getBytes))
} | dit4c/dit4c | dit4c-portal/app/domain/KeyRingAggregate.scala | Scala | mit | 3,197 |
trait tPointState {//5
type p <: tPointState
def x: Int
def y: Int
def of(x:Int,y:Int):p
}
trait tPointSum extends tPointState {//3*4
def sum(that:p)=
this.of(this.x+that.x,this.y+that.y)
}
trait tPointSub extends tPointState {
def sub(that:p)=
this.of(this.x-that.x,this.y-that.y)
}
trait tPointMul extends tPointState {
def mul(that:p)=
this.of(this.x*that.x,this.y*that.y)
}
trait tPointDiv extends tPointState {
def div(that:p)=
this.of(this.x/that.x,this.y/that.y)
}
//glue code from now on
class Point0(val x:Int, val y:Int) extends tPointState {//4*16
override type p = Point0
override def of(x:Int,y:Int)=
new Point0(x,y)//3 duplication of "this name"
}
class PointSum(val x:Int, val y:Int) extends tPointSum {
override type p = PointSum
override def of(x:Int,y:Int)=
new PointSum(x,y)
}
class PointSub(val x:Int, val y:Int) extends tPointSub {
override type p = PointSub
override def of(x:Int,y:Int)=
new PointSub(x,y)
}
class PointMul(val x:Int, val y:Int) extends tPointMul {
override type p = PointMul
override def of(x:Int,y:Int)=
new PointMul(x,y)
}
class PointDiv(val x:Int, val y:Int) extends tPointDiv {
override type p = PointDiv
override def of(x:Int,y:Int)=
new PointDiv(x,y)
}
class PointSumSub(val x:Int, val y:Int) extends tPointSum with tPointSub {
override type p = PointSumSub
override def of(x:Int,y:Int)=
new PointSumSub(x,y)
}
class PointSumMul(val x:Int, val y:Int) extends tPointSum with tPointMul {
override type p = PointSumMul
override def of(x:Int,y:Int)=
new PointSumMul(x,y)
}
class PointSumDiv(val x:Int, val y:Int) extends tPointSum with tPointDiv {
override type p = PointSumDiv
override def of(x:Int,y:Int)=
new PointSumDiv(x,y)
}
class PointSubMul(val x:Int, val y:Int) extends tPointSub with tPointMul {
override type p = PointSubMul
override def of(x:Int,y:Int)=
new PointSubMul(x,y)
}
class PointSubDiv(val x:Int, val y:Int) extends tPointSub with tPointDiv {
override type p = PointSubDiv
override def of(x:Int,y:Int)=
new PointSubDiv(x,y)
}
class PointMulDiv(val x:Int, val y:Int) extends tPointMul with tPointDiv {
override type p = PointMulDiv
override def of(x:Int,y:Int)=
new PointMulDiv(x,y)
}
class PointSumSubDiv(val x:Int, val y:Int) extends tPointSum with tPointSub with tPointDiv {
override type p = PointSumSubDiv
override def of(x:Int,y:Int)=
new PointSumSubDiv(x,y)
}
class PointSumSubMul(val x:Int, val y:Int) extends tPointSum with tPointSub with tPointMul {
override type p = PointSumSubMul
override def of(x:Int,y:Int)=
new PointSumSubMul(x,y)
}
class PointSumMulDiv(val x:Int, val y:Int) extends tPointSum with tPointMul with tPointDiv {
override type p = PointSumMulDiv
override def of(x:Int,y:Int)=
new PointSumMulDiv(x,y)
}
class PointSubMulDiv(val x:Int, val y:Int) extends tPointSub with tPointMul with tPointDiv {
override type p = PointSubMulDiv
override def of(x:Int,y:Int)=
new PointSubMulDiv(x,y)
}
class PointSumSubMulDiv(val x:Int, val y:Int) extends tPointSum with tPointSub with tPointMul with tPointDiv {
override type p = PointSumSubMulDiv
override def of(x:Int,y:Int)=
new PointSumSubMulDiv(x,y)
}
//main, not counted for the line numbers
object Test5 extends App {
val pp=new Point0(2,4)
val pp2=new PointSum(2,4)
val pp3=pp2.sum(pp2)
val ppAll=new PointSumSubMulDiv(3,4)
val ppAll2=ppAll.sum(ppAll.mul(ppAll))
println("Hi")
println(ppAll2.x+ppAll2.y)
} | ElvisResearchGroup/L42Docs | CaseStudyEcoop2018/src/scalaEP/mainPoints.scala | Scala | lgpl-3.0 | 3,503 |
package mesosphere.marathon.api.v2
import javax.inject.{ Inject }
import javax.ws.rs._
import javax.ws.rs.core.{ MediaType }
import mesosphere.marathon.MarathonConf
import com.codahale.metrics.annotation.Timed
import mesosphere.marathon.api.{ MarathonMediaType, RestResource }
import java.io.InputStream
@Path("v2/schemas")
@Consumes(Array(MediaType.APPLICATION_JSON))
@Produces(Array(MarathonMediaType.PREFERRED_APPLICATION_JSON))
class SchemaResource @Inject() (
val config: MarathonConf) extends RestResource {
@GET
@Timed
@Path("/app")
def index(): InputStream = {
getClass().getResourceAsStream("AppDefinition.json")
}
}
| spacejam/marathon | src/main/scala/mesosphere/marathon/api/v2/SchemaResource.scala | Scala | apache-2.0 | 647 |
package cobase.user
import com.mohiva.play.silhouette.api.services.IdentityService
import com.mohiva.play.silhouette.impl.providers.CommonSocialProfile
import scala.concurrent.Future
/**
* Handles actions to users.
*/
trait UserService extends IdentityService[User] {
def save(user: User): Future[User]
/**
* Saves the social profile for a user.
*
* If a user exists for this profile then update the user, otherwise create a new user with the given profile.
*
* @param profile The social profile to save.
* @return The user for whom the profile was saved.
*/
def save(profile: CommonSocialProfile): Future[User]
}
| Cobase/cobase-pro | app/cobase/user/UserService.scala | Scala | mit | 648 |
package utils
import scopt.OParser
object CommandLineParser {
def apply[C](parser: OParser[Unit, C], args: Array[String], config: C): C = {
OParser.parse(parser, args, config) match {
case Some(parsedArgs) => parsedArgs.asInstanceOf[C]
case _ => throw new IllegalArgumentException("Wrong command line arguments passed")
}
}
}
| lift-project/lift | src/main/utils/CommandLineParser.scala | Scala | mit | 352 |
package model
import com.stormpath.sdk.account.Account
import com.stormpath.sdk.group.Group
import util.MarshallableImplicits._
import scala.collection.JavaConversions._
/**
* Created by vasmith on 12/11/16.
*/
object AccountFields {
def toJson(stormAccount: Account) = AccountFields(stormAccount.getHref, stormAccount.getUsername, stormAccount.getEmail, stormAccount.getGivenName,
stormAccount.getMiddleName, stormAccount.getSurname, stormAccount.getStatus.toString, stormAccount.getCreatedAt.toString,
stormAccount.getModifiedAt.toString, stormAccount.getProviderData.getModifiedAt.toString, GroupItems(findGroups(stormAccount))).toJson
def findGroups(stormAccount: Account): Seq[GroupFields] = stormAccount.getGroups.map(mapGroup).toSeq
private def mapGroup(g: Group): GroupFields = GroupFields(g.getName, g.getDescription, g.getStatus.toString, g.getHref)
}
case class AccountFields(href: String, username: String, email: String, givenName:String, middleName:String, surname:String, status:String,
createdAt: String, modifiedAt:String, passwordModifiedAt:String, groups: GroupItems)
case class GroupItems(items: Seq[GroupFields])
case class GroupFields(name: String, description: String, status: String, href: String) | ValSmith/play-stormpath | app/model/AccountFields.scala | Scala | mit | 1,271 |
package com.github.lookout.serviceartifact
import com.twitter.finagle.{Http, Service}
import com.twitter.io.Charsets
import com.twitter.server.TwitterServer
import com.twitter.util.{Await, Future}
import org.jboss.netty.buffer.ChannelBuffers.copiedBuffer
import org.jboss.netty.handler.codec.http._
object BasicServer extends TwitterServer {
val service = new Service[HttpRequest, HttpResponse] {
def apply(request: HttpRequest) = {
val response =
new DefaultHttpResponse(request.getProtocolVersion, HttpResponseStatus.OK)
response.setContent(copiedBuffer("hello", Charsets.Utf8))
Future.value(response)
}
}
def main() {
val server = Http.serve(":8888", service)
onExit {
server.close()
}
Await.ready(server)
}
}
| lookout/service-artifact-gradle-plugin | examples/scala-service/src/main/scala/com/github/lookout/serviceartifact/HelloWorld.scala | Scala | mit | 781 |
package cz.kamenitxan.jakon.webui.controller.pagelets
import java.sql.Connection
import java.util.Date
import cz.kamenitxan.jakon.core.database.DBHelper
import cz.kamenitxan.jakon.core.dynamic.{Get, Pagelet, Post}
import cz.kamenitxan.jakon.core.model.JakonUser
import cz.kamenitxan.jakon.core.service.UserService
import cz.kamenitxan.jakon.utils.PageContext
import cz.kamenitxan.jakon.utils.Utils.StringImprovements
import cz.kamenitxan.jakon.webui.controller.pagelets.data.{ForgetPasswordData, SetPasswordData}
import cz.kamenitxan.jakon.webui.entity.{Message, MessageSeverity, ResetPasswordEmailEntity}
import spark.{Request, Response}
import scala.collection.mutable
/**
* Created by TPa on 2018-11-27.
*/
@Pagelet(path = "/admin")
class ForgetPasswordPagelet extends AbstractAdminPagelet {
override val name: String = classOf[ForgetPasswordPagelet].getName
private val SQL_FIND_USER = "SELECT id, username, password, enabled, acl_id, email FROM JakonUser WHERE email = ?"
@Get(path = "/resetPassword", template = "pagelet/reset_password/resetPassword")
def get(req: Request, res: Response): Unit = {
// just render
}
//noinspection AccessorLikeMethodIsUnit
@Get(path = "/resetPasswordStep2", template = "pagelet/reset_password/resetPasswordStep2")
def getStep2(req: Request, res: Response): Unit = {
// just render
}
@Post(path = "/resetPassword", template = "pagelet/reset_password/resetPassword")
def post(req: Request, res: Response, conn: Connection, data: ForgetPasswordData): mutable.Map[String, Any] = {
val stmt = conn.prepareStatement(SQL_FIND_USER)
stmt.setString(1, data.email)
val result = DBHelper.selectSingle(stmt, classOf[JakonUser])(conn)
if (result.entity != null) {
val user = result.entity
UserService.sendForgetPasswordEmail(user, req)(conn)
}
PageContext.getInstance().messages += new Message(MessageSeverity.SUCCESS, "PASSWORD_RESET_OK")
redirect(req, res, "/admin")
}
@Post(path = "setPassword", template = "SetPassword")
def postStep2(req: Request, res: Response, data: SetPasswordData): mutable.Map[String, Any] = {
DBHelper.withDbConnection(implicit conn => {
// language=SQL
val sql = "SELECT * FROM ResetPasswordEmailEntity where token = ?"
val stmt = conn.prepareStatement(sql)
stmt.setString(1, data.token.urlEncode)
val rpe = DBHelper.selectSingleDeep(stmt)(implicitly, classOf[ResetPasswordEmailEntity])
if (rpe == null || rpe.expirationDate.before(new Date())) {
PageContext.getInstance().addMessage(MessageSeverity.ERROR, "PASSWORD_CHANGE_NOT_FOUND")
return redirect(req, res, "/")
}
rpe.user.password = data.password
rpe.user.update()
rpe.delete()
})
PageContext.getInstance().addMessage(MessageSeverity.SUCCESS, "PASSWORD_CHANGED")
redirect(req, res, "/")
}
}
| kamenitxan/Jakon | modules/backend/src/main/scala/cz/kamenitxan/jakon/webui/controller/pagelets/ForgetPasswordPagelet.scala | Scala | bsd-3-clause | 2,804 |
/*
* Copyright 2016 Lightcopy
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.lightcopy.testutil
import java.io.InputStream
import java.util.UUID
import org.apache.hadoop.conf.{Configuration => HadoopConf}
import org.apache.hadoop.fs.{Path => HadoopPath}
import org.apache.spark.sql.{DataFrame, Row}
import com.github.lightcopy.testutil.implicits._
trait TestBase {
val RESOLVER = "path-resolver"
var path: String = ""
/** returns raw path of the folder where it finds resolver */
private def getRawPath(): String = {
if (path.isEmpty) {
path = getClass().getResource("/" + RESOLVER).getPath()
}
path
}
/** base directory of the project */
final protected def baseDirectory(): String = {
val original = getRawPath().split("/")
require(original.length > 4, s"Path length is too short (<= 4): ${original.length}")
val base = original.dropRight(4)
var dir = ""
for (suffix <- base) {
if (suffix.nonEmpty) {
dir = dir / suffix
}
}
dir
}
/** main directory of the project (./src/main) */
final protected def mainDirectory(): String = {
baseDirectory() / "src" / "main"
}
/** test directory of the project (./src/test) */
final protected def testDirectory(): String = {
baseDirectory() / "src" / "test"
}
/** target directory of the project (./target) */
final protected def targetDirectory(): String = {
baseDirectory() / "target"
}
final protected def mkdirs(path: String): Boolean = {
val p = new HadoopPath(path)
val fs = p.getFileSystem(new HadoopConf(false))
fs.mkdirs(p)
}
/** delete directory / file with path. Recursive must be true for directory */
final protected def rm(path: String, recursive: Boolean): Boolean = {
val p = new HadoopPath(path)
val fs = p.getFileSystem(new HadoopConf(false))
fs.delete(p, recursive)
}
/** open file for a path */
final protected def open(path: String): InputStream = {
val p = new HadoopPath(path)
val fs = p.getFileSystem(new HadoopConf(false))
fs.open(p)
}
/** compare two DataFrame objects */
final protected def checkAnswer(df: DataFrame, expected: DataFrame): Unit = {
val got = df.collect().map(_.toString()).sortWith(_ < _)
val exp = expected.collect().map(_.toString()).sortWith(_ < _)
assert(got.sameElements(exp), s"Failed to compare DataFrame ${got.mkString("[", ", ", "]")} " +
s"with expected input ${exp.mkString("[", ", ", "]")}")
}
final protected def checkAnswer(df: DataFrame, expected: Seq[Row]): Unit = {
val sc = df.sqlContext.sparkContext
checkAnswer(df, df.sqlContext.createDataFrame(sc.parallelize(expected), df.schema))
}
/** Create temporary directory on local file system */
def createTempDir(
root: String = System.getProperty("java.io.tmpdir"),
namePrefix: String = "lightcopy"): HadoopPath = {
val dir = new HadoopPath(root / namePrefix / UUID.randomUUID().toString)
val fs = dir.getFileSystem(new HadoopConf(false))
fs.mkdirs(dir)
dir
}
/** Execute block of code with temporary hadoop path */
private def withTempHadoopPath(path: HadoopPath)(func: HadoopPath => Unit): Unit = {
try {
func(path)
} finally {
val fs = path.getFileSystem(new HadoopConf(false))
fs.delete(path, true)
}
}
/** Execute code block with created temporary directory */
def withTempDir(func: HadoopPath => Unit): Unit = {
withTempHadoopPath(createTempDir())(func)
}
}
| lightcopy/spark-github-pr | src/test/scala/com/github/lightcopy/testutil/TestBase.scala | Scala | apache-2.0 | 4,047 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.utils
import org.apache.flink.api.common.typeinfo.{AtomicType, TypeInformation}
import org.apache.flink.api.java.typeutils.{PojoTypeInfo, RowTypeInfo, TupleTypeInfo}
import org.apache.flink.api.scala.typeutils.CaseClassTypeInfo
import org.apache.flink.streaming.api.datastream.DataStream
import org.apache.flink.streaming.api.environment.{LocalStreamEnvironment, StreamExecutionEnvironment}
import org.apache.flink.streaming.api.graph.GlobalDataExchangeMode
import org.apache.flink.streaming.api.scala.{StreamExecutionEnvironment => ScalaStreamExecEnv}
import org.apache.flink.streaming.api.{TimeCharacteristic, environment}
import org.apache.flink.table.api._
import org.apache.flink.table.api.config.ExecutionConfigOptions
import org.apache.flink.table.api.internal.{TableEnvironmentImpl, TableEnvironmentInternal, TableImpl}
import org.apache.flink.table.api.bridge.java.internal.{StreamTableEnvironmentImpl => JavaStreamTableEnvImpl}
import org.apache.flink.table.api.bridge.java.{StreamTableEnvironment => JavaStreamTableEnv}
import org.apache.flink.table.api.bridge.scala.internal.{StreamTableEnvironmentImpl => ScalaStreamTableEnvImpl}
import org.apache.flink.table.api.bridge.scala.{StreamTableEnvironment => ScalaStreamTableEnv}
import org.apache.flink.table.catalog.{CatalogManager, FunctionCatalog, GenericInMemoryCatalog, ObjectIdentifier}
import org.apache.flink.table.data.RowData
import org.apache.flink.table.delegation.{Executor, ExecutorFactory, PlannerFactory}
import org.apache.flink.table.descriptors.ConnectorDescriptorValidator.CONNECTOR_TYPE
import org.apache.flink.table.descriptors.Schema.SCHEMA
import org.apache.flink.table.descriptors.{CustomConnectorDescriptor, DescriptorProperties, Schema}
import org.apache.flink.table.expressions.Expression
import org.apache.flink.table.factories.{ComponentFactoryService, StreamTableSourceFactory}
import org.apache.flink.table.functions._
import org.apache.flink.table.module.ModuleManager
import org.apache.flink.table.operations.{CatalogSinkModifyOperation, ModifyOperation, Operation, QueryOperation}
import org.apache.flink.table.planner.calcite.CalciteConfig
import org.apache.flink.table.planner.delegation.PlannerBase
import org.apache.flink.table.planner.functions.sql.FlinkSqlOperatorTable
import org.apache.flink.table.planner.operations.{DataStreamQueryOperation, PlannerQueryOperation, RichTableSourceQueryOperation}
import org.apache.flink.table.planner.plan.nodes.calcite.LogicalWatermarkAssigner
import org.apache.flink.table.planner.plan.nodes.exec.ExecNode
import org.apache.flink.table.planner.plan.optimize.program._
import org.apache.flink.table.planner.plan.stats.FlinkStatistic
import org.apache.flink.table.planner.plan.utils.{ExecNodePlanDumper, FlinkRelOptUtil}
import org.apache.flink.table.planner.runtime.utils.{TestingAppendTableSink, TestingRetractTableSink, TestingUpsertTableSink}
import org.apache.flink.table.planner.sinks.CollectRowTableSink
import org.apache.flink.table.runtime.types.TypeInfoLogicalTypeConverter.fromLogicalTypeToTypeInfo
import org.apache.flink.table.sinks._
import org.apache.flink.table.sources.{StreamTableSource, TableSource}
import org.apache.flink.table.types.logical.LogicalType
import org.apache.flink.table.types.utils.TypeConversions
import org.apache.flink.table.typeutils.FieldInfoUtils
import org.apache.flink.types.Row
import org.apache.calcite.avatica.util.TimeUnit
import org.apache.calcite.rel.RelNode
import org.apache.calcite.sql.parser.SqlParserPos
import org.apache.calcite.sql.{SqlExplainLevel, SqlIntervalQualifier}
import org.apache.commons.lang3.SystemUtils
import org.junit.Assert.{assertEquals, assertTrue}
import org.junit.Rule
import org.junit.rules.{ExpectedException, TemporaryFolder, TestName}
import _root_.java.math.{BigDecimal => JBigDecimal}
import _root_.java.util
import _root_.scala.collection.JavaConversions._
import _root_.scala.io.Source
/**
* Test base for testing Table API / SQL plans.
*/
abstract class TableTestBase {
// used for accurate exception information checking.
val expectedException: ExpectedException = ExpectedException.none()
// used for get test case method name
val testName: TestName = new TestName
val _tempFolder = new TemporaryFolder
@Rule
def tempFolder: TemporaryFolder = _tempFolder
@Rule
def thrown: ExpectedException = expectedException
@Rule
def name: TestName = testName
def streamTestUtil(conf: TableConfig = new TableConfig): StreamTableTestUtil =
StreamTableTestUtil(this, conf = conf)
def scalaStreamTestUtil(): ScalaStreamTableTestUtil = ScalaStreamTableTestUtil(this)
def javaStreamTestUtil(): JavaStreamTableTestUtil = JavaStreamTableTestUtil(this)
def batchTestUtil(conf: TableConfig = new TableConfig): BatchTableTestUtil =
BatchTableTestUtil(this, conf = conf)
def scalaBatchTestUtil(): ScalaBatchTableTestUtil = ScalaBatchTableTestUtil(this)
def javaBatchTestUtil(): JavaBatchTableTestUtil = JavaBatchTableTestUtil(this)
def verifyTableEquals(expected: Table, actual: Table): Unit = {
val expectedString = FlinkRelOptUtil.toString(TableTestUtil.toRelNode(expected))
val actualString = FlinkRelOptUtil.toString(TableTestUtil.toRelNode(actual))
assertEquals(
"Logical plans do not match",
LogicalPlanFormatUtils.formatTempTableId(expectedString),
LogicalPlanFormatUtils.formatTempTableId(actualString))
}
}
abstract class TableTestUtilBase(test: TableTestBase, isStreamingMode: Boolean) {
protected lazy val diffRepository: DiffRepository = DiffRepository.lookup(test.getClass)
protected val setting: EnvironmentSettings = if (isStreamingMode) {
EnvironmentSettings.newInstance().inStreamingMode().build()
} else {
EnvironmentSettings.newInstance().inBatchMode().build()
}
// a counter for unique table names
private var counter = 0L
private def getNextId: Long = {
counter += 1
counter
}
protected def getTableEnv: TableEnvironment
protected def isBounded: Boolean = !isStreamingMode
def getPlanner: PlannerBase = {
getTableEnv.asInstanceOf[TableEnvironmentImpl].getPlanner.asInstanceOf[PlannerBase]
}
/**
* Creates a table with the given DDL SQL string.
*/
def addTable(ddl: String): Unit = {
getTableEnv.executeSql(ddl)
}
/**
* Create a [[DataStream]] with the given schema,
* and registers this DataStream under given name into the TableEnvironment's catalog.
*
* @param name table name
* @param fields field names
* @tparam T field types
* @return returns the registered [[Table]].
*/
def addDataStream[T: TypeInformation](name: String, fields: Expression*): Table = {
val env = new ScalaStreamExecEnv(new LocalStreamEnvironment())
val dataStream = env.fromElements[T]().javaStream
val tableEnv = getTableEnv
TableTestUtil.createTemporaryView(tableEnv, name, dataStream, Some(fields.toArray))
tableEnv.from(name)
}
/**
* Create a [[TestTableSource]] with the given schema,
* and registers this TableSource under a unique name into the TableEnvironment's catalog.
*
* @param fields field names
* @tparam T field types
* @return returns the registered [[Table]].
*/
def addTableSource[T: TypeInformation](fields: Expression*): Table = {
addTableSource[T](s"Table$getNextId", fields: _*)
}
/**
* Create a [[TestTableSource]] with the given schema,
* and registers this TableSource under given name into the TableEnvironment's catalog.
*
* @param name table name
* @param fields field names
* @tparam T field types
* @return returns the registered [[Table]].
*/
def addTableSource[T: TypeInformation](name: String, fields: Expression*): Table = {
val typeInfo: TypeInformation[T] = implicitly[TypeInformation[T]]
val tableSchema = if (fields.isEmpty) {
val fieldTypes: Array[TypeInformation[_]] = typeInfo match {
case tt: TupleTypeInfo[_] => (0 until tt.getArity).map(tt.getTypeAt).toArray
case ct: CaseClassTypeInfo[_] => (0 until ct.getArity).map(ct.getTypeAt).toArray
case at: AtomicType[_] => Array[TypeInformation[_]](at)
case pojo: PojoTypeInfo[_] => (0 until pojo.getArity).map(pojo.getTypeAt).toArray
case _ => throw new TableException(s"Unsupported type info: $typeInfo")
}
val types = fieldTypes.map(TypeConversions.fromLegacyInfoToDataType)
val names = FieldInfoUtils.getFieldNames(typeInfo)
TableSchema.builder().fields(names, types).build()
} else {
FieldInfoUtils.getFieldsInfo(typeInfo, fields.toArray).toTableSchema
}
addTableSource(name, new TestTableSource(isBounded, tableSchema))
}
/**
* Create a [[TestTableSource]] with the given schema, table stats and unique keys,
* and registers this TableSource under given name into the TableEnvironment's catalog.
*
* @param name table name
* @param types field types
* @param fields field names
* @return returns the registered [[Table]].
*/
def addTableSource(
name: String,
types: Array[TypeInformation[_]],
fields: Array[String]): Table = {
val schema = new TableSchema(fields, types)
val tableSource = new TestTableSource(isBounded, schema)
addTableSource(name, tableSource)
}
/**
* Register this TableSource under given name into the TableEnvironment's catalog.
*
* @param name table name
* @param tableSource table source
* @return returns the registered [[Table]].
*/
def addTableSource(
name: String,
tableSource: TableSource[_]): Table = {
getTableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal(
name, tableSource)
getTableEnv.from(name)
}
/**
* Registers a [[ScalarFunction]] under given name into the TableEnvironment's catalog.
*/
def addFunction(name: String, function: ScalarFunction): Unit = {
getTableEnv.registerFunction(name, function)
}
/**
* Registers a [[UserDefinedFunction]] according to FLIP-65.
*/
def addTemporarySystemFunction(name: String, function: UserDefinedFunction): Unit = {
getTableEnv.createTemporarySystemFunction(name, function)
}
/**
* Registers a [[UserDefinedFunction]] class according to FLIP-65.
*/
def addTemporarySystemFunction(name: String, function: Class[_ <: UserDefinedFunction]): Unit = {
getTableEnv.createTemporarySystemFunction(name, function)
}
def verifyPlan(sql: String): Unit = {
doVerifyPlan(sql, Array.empty[ExplainDetail], withRowType = false, printPlanBefore = true)
}
def verifyPlan(sql: String, extraDetails: ExplainDetail*): Unit = {
doVerifyPlan(sql, extraDetails.toArray, withRowType = false, printPlanBefore = true)
}
def verifyPlan(table: Table): Unit = {
doVerifyPlan(table, Array.empty[ExplainDetail], withRowType = false, printPlanBefore = true)
}
def verifyPlan(table: Table, extraDetails: ExplainDetail*): Unit = {
doVerifyPlan(table, extraDetails.toArray, withRowType = false, printPlanBefore = true)
}
def verifyPlanWithType(sql: String): Unit = {
doVerifyPlan(sql, Array.empty[ExplainDetail], withRowType = true, printPlanBefore = true)
}
def verifyPlanWithType(table: Table): Unit = {
doVerifyPlan(table, Array.empty[ExplainDetail], withRowType = true, printPlanBefore = true)
}
def verifyPlanNotExpected(sql: String, notExpected: String*): Unit = {
verifyPlanNotExpected(getTableEnv.sqlQuery(sql), notExpected: _*)
}
def verifyPlanNotExpected(table: Table, notExpected: String*): Unit = {
require(notExpected.nonEmpty)
val relNode = TableTestUtil.toRelNode(table)
val optimizedPlan = getOptimizedPlan(Array(relNode), Array.empty, withRowType = false)
val result = notExpected.forall(!optimizedPlan.contains(_))
val message = s"\\nactual plan:\\n$optimizedPlan\\nnot expected:\\n${notExpected.mkString(", ")}"
assertTrue(message, result)
}
def verifyExplain(stmtSet: StatementSet, extraDetails: ExplainDetail*): Unit = {
doVerifyExplain(
stmtSet.explain(extraDetails: _*),
extraDetails.contains(ExplainDetail.ESTIMATED_COST))
}
def verifyExplain(sql: String): Unit = verifyExplain(getTableEnv.sqlQuery(sql))
def verifyExplain(sql: String, extraDetails: ExplainDetail*): Unit = {
val table = getTableEnv.sqlQuery(sql)
verifyExplain(table, extraDetails: _*)
}
def verifyExplain(table: Table): Unit = {
doVerifyExplain(table.explain(), needReplaceEstimatedCost = false)
}
def verifyExplain(table: Table, extraDetails: ExplainDetail*): Unit = {
doVerifyExplain(
table.explain(extraDetails: _*),
extraDetails.contains(ExplainDetail.ESTIMATED_COST))
}
def verifyExplainInsert(
table: Table,
sink: TableSink[_],
targetPath: String,
extraDetails: ExplainDetail*): Unit = {
val stmtSet = getTableEnv.createStatementSet()
getTableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal(targetPath, sink)
stmtSet.addInsert(targetPath, table)
verifyExplain(stmtSet, extraDetails: _*)
}
def doVerifyPlan(
sql: String,
extraDetails: Array[ExplainDetail],
withRowType: Boolean,
printPlanBefore: Boolean): Unit = {
val table = getTableEnv.sqlQuery(sql)
val relNode = TableTestUtil.toRelNode(table)
val optimizedPlan = getOptimizedPlan(Array(relNode), extraDetails, withRowType = withRowType)
assertEqualsOrExpand("sql", sql)
if (printPlanBefore) {
val planBefore = SystemUtils.LINE_SEPARATOR +
FlinkRelOptUtil.toString(
relNode,
SqlExplainLevel.EXPPLAN_ATTRIBUTES,
withRowType = withRowType)
assertEqualsOrExpand("planBefore", planBefore)
}
val actual = SystemUtils.LINE_SEPARATOR + optimizedPlan
assertEqualsOrExpand("planAfter", actual.toString, expand = false)
}
def verifyResource(sql: String): Unit = {
assertEqualsOrExpand("sql", sql)
val table = getTableEnv.sqlQuery(sql)
doVerifyPlan(
table,
Array.empty,
withRowType = false,
printResource = true,
printPlanBefore = false)
}
def doVerifyPlan(
table: Table,
extraDetails: Array[ExplainDetail],
withRowType: Boolean,
printPlanBefore: Boolean): Unit = {
doVerifyPlan(
table = table,
extraDetails,
withRowType = withRowType,
printPlanBefore = printPlanBefore,
printResource = false)
}
def doVerifyPlan(
table: Table,
extraDetails: Array[ExplainDetail],
withRowType: Boolean,
printPlanBefore: Boolean,
printResource: Boolean): Unit = {
val relNode = TableTestUtil.toRelNode(table)
val optimizedPlan = getOptimizedPlan(
Array(relNode),
extraDetails,
withRowType = withRowType,
withResource = printResource)
if (printPlanBefore) {
val planBefore = SystemUtils.LINE_SEPARATOR +
FlinkRelOptUtil.toString(
relNode,
SqlExplainLevel.EXPPLAN_ATTRIBUTES,
withRowType = withRowType)
assertEqualsOrExpand("planBefore", planBefore)
}
val actual = SystemUtils.LINE_SEPARATOR + optimizedPlan
assertEqualsOrExpand("planAfter", actual.toString, expand = false)
}
private def doVerifyExplain(explainResult: String, needReplaceEstimatedCost: Boolean): Unit = {
val actual = if (needReplaceEstimatedCost) {
replaceEstimatedCost(explainResult)
} else {
explainResult
}
assertEqualsOrExpand("explain", TableTestUtil.replaceStageId(actual), expand = false)
}
protected def getOptimizedPlan(
relNodes: Array[RelNode],
extraDetails: Array[ExplainDetail],
withRowType: Boolean,
withResource: Boolean = false): String = {
require(relNodes.nonEmpty)
val planner = getPlanner
val optimizedRels = planner.optimize(relNodes)
val explainLevel = if (extraDetails.contains(ExplainDetail.ESTIMATED_COST)) {
SqlExplainLevel.ALL_ATTRIBUTES
} else {
SqlExplainLevel.EXPPLAN_ATTRIBUTES
}
val withChangelogTraits = extraDetails.contains(ExplainDetail.CHANGELOG_MODE)
optimizedRels.head match {
case _: ExecNode[_, _] =>
val optimizedNodes = planner.translateToExecNodePlan(optimizedRels)
require(optimizedNodes.length == optimizedRels.length)
ExecNodePlanDumper.dagToString(
optimizedNodes,
detailLevel = explainLevel,
withChangelogTraits = withChangelogTraits,
withOutputType = withRowType,
withResource = withResource)
case _ =>
optimizedRels.map { rel =>
FlinkRelOptUtil.toString(
rel,
detailLevel = explainLevel,
withChangelogTraits = withChangelogTraits,
withRowType = withRowType)
}.mkString("\\n")
}
}
/**
* ignore estimated cost, because it may be unstable.
*/
protected def replaceEstimatedCost(s: String): String = {
var str = s.replaceAll("\\\\r\\\\n", "\\n")
val scientificFormRegExpr = "[+-]?[\\\\d]+([\\\\.][\\\\d]*)?([Ee][+-]?[0-9]{0,2})?"
str = str.replaceAll(s"rowcount = $scientificFormRegExpr", "rowcount = ")
str = str.replaceAll(s"$scientificFormRegExpr rows", "rows")
str = str.replaceAll(s"$scientificFormRegExpr cpu", "cpu")
str = str.replaceAll(s"$scientificFormRegExpr io", "io")
str = str.replaceAll(s"$scientificFormRegExpr network", "network")
str = str.replaceAll(s"$scientificFormRegExpr memory", "memory")
str
}
protected def assertEqualsOrExpand(tag: String, actual: String, expand: Boolean = true): Unit = {
val expected = s"$${$tag}"
if (!expand) {
diffRepository.assertEquals(test.name.getMethodName, tag, expected, actual)
return
}
val expanded = diffRepository.expand(test.name.getMethodName, tag, expected)
if (expanded != null && !expanded.equals(expected)) {
// expected does exist, check result
diffRepository.assertEquals(test.name.getMethodName, tag, expected, actual)
} else {
// expected does not exist, update
diffRepository.expand(test.name.getMethodName, tag, actual)
}
}
}
abstract class TableTestUtil(
test: TableTestBase,
// determines if the table environment should work in a batch or streaming mode
isStreamingMode: Boolean,
catalogManager: Option[CatalogManager] = None,
val tableConfig: TableConfig)
extends TableTestUtilBase(test, isStreamingMode) {
protected val testingTableEnv: TestingTableEnvironment =
TestingTableEnvironment.create(setting, catalogManager, tableConfig)
val tableEnv: TableEnvironment = testingTableEnv
tableEnv.getConfig.getConfiguration.setString(
ExecutionConfigOptions.TABLE_EXEC_SHUFFLE_MODE,
GlobalDataExchangeMode.ALL_EDGES_PIPELINED.toString)
private val env: StreamExecutionEnvironment = getPlanner.getExecEnv
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
override def getTableEnv: TableEnvironment = tableEnv
def getStreamEnv: StreamExecutionEnvironment = env
/**
* Create a [[TestTableSource]] with the given schema, table stats and unique keys,
* and registers this TableSource under given name into the TableEnvironment's catalog.
*
* @param name table name
* @param types field types
* @param fields field names
* @param statistic statistic of current table
* @return returns the registered [[Table]].
*/
def addTableSource(
name: String,
types: Array[TypeInformation[_]],
fields: Array[String],
statistic: FlinkStatistic = FlinkStatistic.UNKNOWN): Table = {
val schema = new TableSchema(fields, types)
val tableSource = new TestTableSource(isBounded, schema)
addTableSource(name, tableSource, statistic)
}
/**
* Register this TableSource under given name into the TableEnvironment's catalog.
*
* @param name table name
* @param tableSource table source
* @param statistic statistic of current table
* @return returns the registered [[Table]].
*/
def addTableSource(
name: String,
tableSource: TableSource[_],
statistic: FlinkStatistic): Table = {
// TODO RichTableSourceQueryOperation should be deleted and use registerTableSourceInternal
// method instead of registerTable method here after unique key in TableSchema is ready
// and setting catalog statistic to TableSourceTable in DatabaseCalciteSchema is ready
val identifier = ObjectIdentifier.of(
testingTableEnv.getCurrentCatalog,
testingTableEnv.getCurrentDatabase,
name)
val operation = new RichTableSourceQueryOperation(
identifier,
tableSource,
statistic)
val table = testingTableEnv.createTable(operation)
testingTableEnv.registerTable(name, table)
testingTableEnv.from(name)
}
/**
* Registers a [[TableFunction]] under given name into the TableEnvironment's catalog.
*/
def addFunction[T: TypeInformation](
name: String,
function: TableFunction[T]): Unit = testingTableEnv.registerFunction(name, function)
/**
* Registers a [[AggregateFunction]] under given name into the TableEnvironment's catalog.
*/
def addFunction[T: TypeInformation, ACC: TypeInformation](
name: String,
function: AggregateFunction[T, ACC]): Unit = testingTableEnv.registerFunction(name, function)
/**
* Registers a [[TableAggregateFunction]] under given name into the TableEnvironment's catalog.
*/
def addFunction[T: TypeInformation, ACC: TypeInformation](
name: String,
function: TableAggregateFunction[T, ACC]): Unit = {
testingTableEnv.registerFunction(name, function)
}
def verifyPlanInsert(sql: String): Unit = {
doVerifyPlanInsert(sql, Array.empty, withRowType = false, printPlanBefore = true)
}
def verifyPlanInsert(
table: Table,
sink: TableSink[_],
targetPath: String,
extraDetails: ExplainDetail*): Unit = {
val stmtSet = tableEnv.createStatementSet()
tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal(targetPath, sink)
stmtSet.addInsert(targetPath, table)
verifyPlan(stmtSet, extraDetails: _*)
}
def verifyPlan(stmtSet: StatementSet, extraDetails: ExplainDetail*): Unit = {
doVerifyPlan(stmtSet, extraDetails.toArray, withRowType = false, printPlanBefore = true)
}
def doVerifyPlanInsert(
sql: String,
extraDetails: Array[ExplainDetail],
withRowType: Boolean,
printPlanBefore: Boolean): Unit = {
assertEqualsOrExpand("sql", sql)
val stmtSet = tableEnv.createStatementSet()
stmtSet.addInsertSql(sql)
doVerifyPlan(stmtSet, extraDetails, withRowType, printPlanBefore)
}
def doVerifyPlan(
stmtSet: StatementSet,
extraDetails: Array[ExplainDetail],
withRowType: Boolean,
printPlanBefore: Boolean): Unit = {
val testStmtSet = stmtSet.asInstanceOf[TestingStatementSet]
val relNodes = testStmtSet.getOperations.map(getPlanner.translateToRel)
if (relNodes.isEmpty) {
throw new TableException("No output table have been created yet. " +
"A program needs at least one output table that consumes data.\\n" +
"Please create output table(s) for your program")
}
val optimizedPlan = getOptimizedPlan(
relNodes.toArray,
extraDetails,
withRowType = withRowType)
if (printPlanBefore) {
val planBefore = new StringBuilder
relNodes.foreach { sink =>
planBefore.append(System.lineSeparator)
planBefore.append(FlinkRelOptUtil.toString(sink, SqlExplainLevel.EXPPLAN_ATTRIBUTES))
}
assertEqualsOrExpand("planBefore", planBefore.toString())
}
val actual = if (extraDetails.contains(ExplainDetail.ESTIMATED_COST)) {
SystemUtils.LINE_SEPARATOR + replaceEstimatedCost(optimizedPlan)
} else {
SystemUtils.LINE_SEPARATOR + optimizedPlan
}
assertEqualsOrExpand("planAfter", actual.toString, expand = false)
}
}
abstract class ScalaTableTestUtil(
test: TableTestBase,
isStreamingMode: Boolean)
extends TableTestUtilBase(test, isStreamingMode) {
// scala env
val env = new ScalaStreamExecEnv(new LocalStreamEnvironment())
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
// scala tableEnv
val tableEnv: ScalaStreamTableEnv = ScalaStreamTableEnv.create(env, setting)
override def getTableEnv: TableEnvironment = tableEnv
/**
* Registers a [[TableFunction]] under given name into the TableEnvironment's catalog.
*/
def addFunction[T: TypeInformation](
name: String,
function: TableFunction[T]): Unit = tableEnv.registerFunction(name, function)
/**
* Registers a [[AggregateFunction]] under given name into the TableEnvironment's catalog.
*/
def addFunction[T: TypeInformation, ACC: TypeInformation](
name: String,
function: AggregateFunction[T, ACC]): Unit = tableEnv.registerFunction(name, function)
/**
* Registers a [[TableAggregateFunction]] under given name into the TableEnvironment's catalog.
*/
def addFunction[T: TypeInformation, ACC: TypeInformation](
name: String,
function: TableAggregateFunction[T, ACC]): Unit = tableEnv.registerFunction(name, function)
}
abstract class JavaTableTestUtil(
test: TableTestBase,
isStreamingMode: Boolean)
extends TableTestUtilBase(test, isStreamingMode) {
// java env
val env = new LocalStreamEnvironment()
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
// java tableEnv
// use impl class instead of interface class to avoid
// "Static methods in interface require -target:jvm-1.8"
val tableEnv: JavaStreamTableEnv = JavaStreamTableEnvImpl.create(env, setting, new TableConfig)
override def getTableEnv: TableEnvironment = tableEnv
/**
* Registers a [[TableFunction]] under given name into the TableEnvironment's catalog.
*/
def addFunction[T: TypeInformation](
name: String,
function: TableFunction[T]): Unit = tableEnv.registerFunction(name, function)
/**
* Registers a [[AggregateFunction]] under given name into the TableEnvironment's catalog.
*/
def addFunction[T: TypeInformation, ACC: TypeInformation](
name: String,
function: AggregateFunction[T, ACC]): Unit = tableEnv.registerFunction(name, function)
/**
* Registers a [[TableAggregateFunction]] under given name into the TableEnvironment's catalog.
*/
def addFunction[T: TypeInformation, ACC: TypeInformation](
name: String,
function: TableAggregateFunction[T, ACC]): Unit = tableEnv.registerFunction(name, function)
}
/**
* Utility for stream table test.
*/
case class StreamTableTestUtil(
test: TableTestBase,
catalogManager: Option[CatalogManager] = None,
conf: TableConfig = new TableConfig)
extends TableTestUtil(test, isStreamingMode = true, catalogManager, conf) {
/**
* Register a table with specific row time field and offset.
*
* @param tableName table name
* @param sourceTable table to register
* @param rowtimeField row time field
* @param offset offset to the row time field value
*/
def addTableWithWatermark(
tableName: String,
sourceTable: Table,
rowtimeField: String,
offset: Long): Unit = {
val sourceRel = TableTestUtil.toRelNode(sourceTable)
val rowtimeFieldIdx = sourceRel.getRowType.getFieldNames.indexOf(rowtimeField)
if (rowtimeFieldIdx < 0) {
throw new TableException(s"$rowtimeField does not exist, please check it")
}
val rexBuilder = sourceRel.getCluster.getRexBuilder
val inputRef = rexBuilder.makeInputRef(sourceRel, rowtimeFieldIdx)
val offsetLiteral = rexBuilder.makeIntervalLiteral(
JBigDecimal.valueOf(offset),
new SqlIntervalQualifier(TimeUnit.MILLISECOND, null, SqlParserPos.ZERO))
val expr = rexBuilder.makeCall(FlinkSqlOperatorTable.MINUS, inputRef, offsetLiteral)
val watermarkAssigner = new LogicalWatermarkAssigner(
sourceRel.getCluster,
sourceRel.getTraitSet,
sourceRel,
rowtimeFieldIdx,
expr
)
val queryOperation = new PlannerQueryOperation(watermarkAssigner)
testingTableEnv.registerTable(tableName, testingTableEnv.createTable(queryOperation))
}
def buildStreamProgram(firstProgramNameToRemove: String): Unit = {
val program = FlinkStreamProgram.buildProgram(tableEnv.getConfig.getConfiguration)
var startRemove = false
program.getProgramNames.foreach {
name =>
if (name.equals(firstProgramNameToRemove)) {
startRemove = true
}
if (startRemove) {
program.remove(name)
}
}
replaceStreamProgram(program)
}
def replaceStreamProgram(program: FlinkChainedProgram[StreamOptimizeContext]): Unit = {
var calciteConfig = TableConfigUtils.getCalciteConfig(tableEnv.getConfig)
calciteConfig = CalciteConfig.createBuilder(calciteConfig)
.replaceStreamProgram(program).build()
tableEnv.getConfig.setPlannerConfig(calciteConfig)
}
def getStreamProgram(): FlinkChainedProgram[StreamOptimizeContext] = {
val tableConfig = tableEnv.getConfig
val calciteConfig = TableConfigUtils.getCalciteConfig(tableConfig)
calciteConfig.getStreamProgram.getOrElse(FlinkStreamProgram.buildProgram(
tableConfig.getConfiguration))
}
def enableMiniBatch(): Unit = {
tableEnv.getConfig.getConfiguration.setBoolean(
ExecutionConfigOptions.TABLE_EXEC_MINIBATCH_ENABLED, true)
tableEnv.getConfig.getConfiguration.setString(
ExecutionConfigOptions.TABLE_EXEC_MINIBATCH_ALLOW_LATENCY, "1 s")
tableEnv.getConfig.getConfiguration.setLong(
ExecutionConfigOptions.TABLE_EXEC_MINIBATCH_SIZE, 3L)
}
def createAppendTableSink(
fieldNames: Array[String],
fieldTypes: Array[LogicalType]): AppendStreamTableSink[Row] = {
require(fieldNames.length == fieldTypes.length)
val typeInfos = fieldTypes.map(fromLogicalTypeToTypeInfo)
new TestingAppendTableSink().configure(fieldNames, typeInfos)
}
def createUpsertTableSink(
keys: Array[Int],
fieldNames: Array[String],
fieldTypes: Array[LogicalType]): UpsertStreamTableSink[RowData] = {
require(fieldNames.length == fieldTypes.length)
val typeInfos = fieldTypes.map(fromLogicalTypeToTypeInfo)
new TestingUpsertTableSink(keys).configure(fieldNames, typeInfos)
}
def createRetractTableSink(
fieldNames: Array[String],
fieldTypes: Array[LogicalType]): RetractStreamTableSink[Row] = {
require(fieldNames.length == fieldTypes.length)
val typeInfos = fieldTypes.map(fromLogicalTypeToTypeInfo)
new TestingRetractTableSink().configure(fieldNames, typeInfos)
}
}
/**
* Utility for stream scala table test.
*/
case class ScalaStreamTableTestUtil(test: TableTestBase) extends ScalaTableTestUtil(test, true) {
}
/**
* Utility for stream java table test.
*/
case class JavaStreamTableTestUtil(test: TableTestBase) extends JavaTableTestUtil(test, true) {
}
/**
* Utility for batch table test.
*/
case class BatchTableTestUtil(
test: TableTestBase,
catalogManager: Option[CatalogManager] = None,
conf: TableConfig = new TableConfig)
extends TableTestUtil(test, isStreamingMode = false, catalogManager, conf) {
def buildBatchProgram(firstProgramNameToRemove: String): Unit = {
val program = FlinkBatchProgram.buildProgram(tableEnv.getConfig.getConfiguration)
var startRemove = false
program.getProgramNames.foreach {
name =>
if (name.equals(firstProgramNameToRemove)) {
startRemove = true
}
if (startRemove) {
program.remove(name)
}
}
replaceBatchProgram(program)
}
def replaceBatchProgram(program: FlinkChainedProgram[BatchOptimizeContext]): Unit = {
var calciteConfig = TableConfigUtils.getCalciteConfig(tableEnv.getConfig)
calciteConfig = CalciteConfig.createBuilder(calciteConfig)
.replaceBatchProgram(program).build()
tableEnv.getConfig.setPlannerConfig(calciteConfig)
}
def getBatchProgram(): FlinkChainedProgram[BatchOptimizeContext] = {
val tableConfig = tableEnv.getConfig
val calciteConfig = TableConfigUtils.getCalciteConfig(tableConfig)
calciteConfig.getBatchProgram.getOrElse(FlinkBatchProgram.buildProgram(
tableConfig.getConfiguration))
}
def createCollectTableSink(
fieldNames: Array[String],
fieldTypes: Array[LogicalType]): TableSink[Row] = {
require(fieldNames.length == fieldTypes.length)
val typeInfos = fieldTypes.map(fromLogicalTypeToTypeInfo)
new CollectRowTableSink().configure(fieldNames, typeInfos)
}
}
/**
* Utility for batch scala table test.
*/
case class ScalaBatchTableTestUtil(test: TableTestBase) extends ScalaTableTestUtil(test, false) {
}
/**
* Utility for batch java table test.
*/
case class JavaBatchTableTestUtil(test: TableTestBase) extends JavaTableTestUtil(test, false) {
}
/**
* Batch/Stream [[org.apache.flink.table.sources.TableSource]] for testing.
*/
class TestTableSource(override val isBounded: Boolean, schema: TableSchema)
extends StreamTableSource[Row] {
override def getDataStream(execEnv: environment.StreamExecutionEnvironment): DataStream[Row] = {
execEnv.fromCollection(List[Row](), getReturnType)
}
override def getReturnType: TypeInformation[Row] = {
val logicalTypes = schema.getFieldTypes
new RowTypeInfo(logicalTypes, schema.getFieldNames)
}
override def getTableSchema: TableSchema = schema
}
object TestTableSource {
def createTemporaryTable(
tEnv: TableEnvironment,
isBounded: Boolean,
tableSchema: TableSchema,
tableName: String): Unit = {
tEnv.connect(
new CustomConnectorDescriptor("TestTableSource", 1, false)
.property("is-bounded", if (isBounded) "true" else "false"))
.withSchema(new Schema().schema(tableSchema))
.createTemporaryTable(tableName)
}
}
class TestTableSourceFactory extends StreamTableSourceFactory[Row] {
override def createStreamTableSource(
properties: util.Map[String, String]): StreamTableSource[Row] = {
val dp = new DescriptorProperties
dp.putProperties(properties)
val tableSchema = dp.getTableSchema(SCHEMA)
val isBounded = dp.getOptionalBoolean("is-bounded").orElse(false)
new TestTableSource(isBounded, tableSchema)
}
override def requiredContext(): util.Map[String, String] = {
val context = new util.HashMap[String, String]()
context.put(CONNECTOR_TYPE, "TestTableSource")
context
}
override def supportedProperties(): util.List[String] = {
val properties = new util.ArrayList[String]()
properties.add("*")
properties
}
}
class TestingTableEnvironment private(
catalogManager: CatalogManager,
moduleManager: ModuleManager,
tableConfig: TableConfig,
executor: Executor,
functionCatalog: FunctionCatalog,
planner: PlannerBase,
isStreamingMode: Boolean)
extends TableEnvironmentImpl(
catalogManager,
moduleManager,
tableConfig,
executor,
functionCatalog,
planner,
isStreamingMode) {
// just for testing, remove this method while
// `<T, ACC> void registerFunction(String name, AggregateFunction<T, ACC> aggregateFunction);`
// is added into TableEnvironment
def registerFunction[T: TypeInformation](name: String, tf: TableFunction[T]): Unit = {
val typeInfo = UserDefinedFunctionHelper
.getReturnTypeOfTableFunction(tf, implicitly[TypeInformation[T]])
functionCatalog.registerTempSystemTableFunction(
name,
tf,
typeInfo
)
}
// just for testing, remove this method while
// `<T> void registerFunction(String name, TableFunction<T> tableFunction);`
// is added into TableEnvironment
def registerFunction[T: TypeInformation, ACC: TypeInformation](
name: String,
f: AggregateFunction[T, ACC]): Unit = {
registerUserDefinedAggregateFunction(name, f)
}
// just for testing, remove this method while
// `<T, ACC> void registerFunction(String name, TableAggregateFunction<T, ACC> tableAggFunc);`
// is added into TableEnvironment
def registerFunction[T: TypeInformation, ACC: TypeInformation](
name: String,
f: TableAggregateFunction[T, ACC]): Unit = {
registerUserDefinedAggregateFunction(name, f)
}
private def registerUserDefinedAggregateFunction[T: TypeInformation, ACC: TypeInformation](
name: String,
f: UserDefinedAggregateFunction[T, ACC]): Unit = {
val typeInfo = UserDefinedFunctionHelper
.getReturnTypeOfAggregateFunction(f, implicitly[TypeInformation[T]])
val accTypeInfo = UserDefinedFunctionHelper
.getAccumulatorTypeOfAggregateFunction(f, implicitly[TypeInformation[ACC]])
functionCatalog.registerTempSystemAggregateFunction(
name,
f,
typeInfo,
accTypeInfo
)
}
override def createTable(tableOperation: QueryOperation): TableImpl = {
super.createTable(tableOperation)
}
override def createStatementSet(): StatementSet = new TestingStatementSet(this)
}
class TestingStatementSet(tEnv: TestingTableEnvironment) extends StatementSet {
private val operations: util.List[ModifyOperation] = new util.ArrayList[ModifyOperation]
def getOperations: util.List[ModifyOperation] = operations
override def addInsertSql(statement: String): StatementSet = {
val operations = tEnv.getParser.parse(statement)
if (operations.size != 1) {
throw new TableException("Only single statement is supported.")
}
operations.get(0) match {
case op: ModifyOperation =>
this.operations.add(op)
case _ =>
throw new TableException("Only insert statement is supported now.")
}
this
}
override def addInsert(targetPath: String, table: Table): StatementSet = {
this.addInsert(targetPath, table, overwrite = false)
}
override def addInsert(targetPath: String, table: Table, overwrite: Boolean): StatementSet = {
val unresolvedIdentifier = tEnv.getParser.parseIdentifier(targetPath)
val objectIdentifier = tEnv.getCatalogManager.qualifyIdentifier(unresolvedIdentifier)
operations.add(new CatalogSinkModifyOperation(
objectIdentifier,
table.getQueryOperation,
util.Collections.emptyMap[String, String],
overwrite,
util.Collections.emptyMap[String, String]))
this
}
override def explain(extraDetails: ExplainDetail*): String = {
tEnv.explainInternal(operations.map(o => o.asInstanceOf[Operation]), extraDetails: _*)
}
override def execute(): TableResult = {
try {
tEnv.executeInternal(operations)
} finally {
operations.clear()
}
}
}
object TestingTableEnvironment {
def create(
settings: EnvironmentSettings,
catalogManager: Option[CatalogManager] = None,
tableConfig: TableConfig): TestingTableEnvironment = {
// temporary solution until FLINK-15635 is fixed
val classLoader = Thread.currentThread.getContextClassLoader
val moduleManager = new ModuleManager
val catalogMgr = catalogManager match {
case Some(c) => c
case _ =>
CatalogManager.newBuilder
.classLoader(classLoader)
.config(tableConfig.getConfiguration)
.defaultCatalog(
settings.getBuiltInCatalogName,
new GenericInMemoryCatalog(
settings.getBuiltInCatalogName,
settings.getBuiltInDatabaseName))
.build
}
val functionCatalog = new FunctionCatalog(tableConfig, catalogMgr, moduleManager)
val executorProperties = settings.toExecutorProperties
val executor = ComponentFactoryService.find(classOf[ExecutorFactory],
executorProperties).create(executorProperties)
val plannerProperties = settings.toPlannerProperties
val planner = ComponentFactoryService.find(classOf[PlannerFactory], plannerProperties)
.create(plannerProperties, executor, tableConfig, functionCatalog, catalogMgr)
.asInstanceOf[PlannerBase]
new TestingTableEnvironment(
catalogMgr,
moduleManager,
tableConfig,
executor,
functionCatalog,
planner,
settings.isStreamingMode)
}
}
object TableTestUtil {
val STREAM_SETTING: EnvironmentSettings =
EnvironmentSettings.newInstance().inStreamingMode().build()
val BATCH_SETTING: EnvironmentSettings = EnvironmentSettings.newInstance().inBatchMode().build()
/**
* Converts operation tree in the given table to a RelNode tree.
*/
def toRelNode(table: Table): RelNode = {
table.asInstanceOf[TableImpl]
.getTableEnvironment.asInstanceOf[TableEnvironmentImpl]
.getPlanner.asInstanceOf[PlannerBase]
.getRelBuilder.queryOperation(table.getQueryOperation).build()
}
def createTemporaryView[T](
tEnv: TableEnvironment,
name: String,
dataStream: DataStream[T],
fields: Option[Array[Expression]] = None,
fieldNullables: Option[Array[Boolean]] = None,
statistic: Option[FlinkStatistic] = None): Unit = {
val planner = tEnv.asInstanceOf[TableEnvironmentImpl].getPlanner.asInstanceOf[PlannerBase]
val execEnv = planner.getExecEnv
val streamType = dataStream.getType
// get field names and types for all non-replaced fields
val typeInfoSchema = fields.map((f: Array[Expression]) => {
val fieldsInfo = FieldInfoUtils.getFieldsInfo(streamType, f)
// check if event-time is enabled
if (fieldsInfo.isRowtimeDefined &&
(execEnv.getStreamTimeCharacteristic ne TimeCharacteristic.EventTime)) {
throw new ValidationException(String.format(
"A rowtime attribute requires an EventTime time characteristic in stream " +
"environment. But is: %s",
execEnv.getStreamTimeCharacteristic))
}
fieldsInfo
}).getOrElse(FieldInfoUtils.getFieldsInfo(streamType))
val fieldCnt = typeInfoSchema.getFieldTypes.length
val dataStreamQueryOperation = new DataStreamQueryOperation(
ObjectIdentifier.of(tEnv.getCurrentCatalog, tEnv.getCurrentDatabase, name),
dataStream,
typeInfoSchema.getIndices,
typeInfoSchema.toTableSchema,
fieldNullables.getOrElse(Array.fill(fieldCnt)(true)),
statistic.getOrElse(FlinkStatistic.UNKNOWN)
)
val table = createTable(tEnv, dataStreamQueryOperation)
tEnv.registerTable(name, table)
}
def createTable(tEnv: TableEnvironment, queryOperation: QueryOperation): Table = {
val createTableMethod = tEnv match {
case _: ScalaStreamTableEnvImpl | _: JavaStreamTableEnvImpl =>
tEnv.getClass.getSuperclass.getDeclaredMethod("createTable", classOf[QueryOperation])
case t: TableEnvironmentImpl =>
t.getClass.getDeclaredMethod("createTable", classOf[QueryOperation])
case _ => throw new TableException(s"Unsupported class: ${tEnv.getClass.getCanonicalName}")
}
createTableMethod.setAccessible(true)
createTableMethod.invoke(tEnv, queryOperation).asInstanceOf[Table]
}
def readFromResource(path: String): String = {
val inputStream = getClass.getResource(path).getFile
Source.fromFile(inputStream).mkString
}
/**
* Stage {id} is ignored, because id keeps incrementing in test class
* while StreamExecutionEnvironment is up
*/
def replaceStageId(s: String): String = {
s.replaceAll("\\\\r\\\\n", "\\n").replaceAll("Stage \\\\d+", "")
}
}
| GJL/flink | flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/utils/TableTestBase.scala | Scala | apache-2.0 | 44,390 |
trait Foo[+A] {
type Repr[+O] <: Foo[O] {
type Repr[+OO] = Foo.this.Repr[OO]
}
def foo: Repr[A]
def bar: Repr[A] = this.foo.foo
}
| dotty-staging/dotty | tests/pending/pos/i9346.scala | Scala | apache-2.0 | 144 |
package chk.commons.config
import java.util.Properties
import java.util.concurrent.TimeUnit
import chk.commons.{SystemError, ChkException}
import chk.commons.di.Injection
import collection.JavaConverters._
import scala.language.existentials
import com.typesafe.config._
import scala.util.control.Exception._
import scaldi._
trait ConfigPrefixInjector extends ConfigUtils with Injection {
def configPrefix: String
lazy val config = inject[Config] (identified by configPrefix is by default Configuration.empty)
}
class ConfigModule(config: Config) extends Module {
bind [Config] to config
}
trait ConfigUtils {
implicit class ConfigOps(underlying: Config) {
private def option[T](body: => T) = handling[Option[T]](classOf[ConfigException.Missing]) by (_ => None) apply Option(body)
def getStringOpt(path: String) = option(underlying.getString(path))
def getStringListOpt(path: String) = option(underlying.getStringList(path))
def getIntOpt(path: String) = option(underlying.getInt(path))
def getIntListOpt(path: String) = option(underlying.getIntList(path))
def getLongOpt(path: String) = option(underlying.getLong(path))
def getLongListOpt(path: String) = option(underlying.getLongList(path))
def getDoubleOpt(path: String) = option(underlying.getDouble(path))
def getDoubleListOpt(path: String) = option(underlying.getDoubleList(path))
def getNumberOpt(path: String) = option(underlying.getNumber(path))
def getNumberListOpt(path: String) = option(underlying.getNumberList(path))
def getDurationOpt(path: String, unit: TimeUnit) = option(underlying.getDuration(path, unit))
def getDurationListOpt(path: String, unit: TimeUnit) = option(underlying.getDurationList(path, unit))
def getBooleanOpt(path: String) = option(underlying.getBoolean(path))
def getBooleanListOpt(path: String) = option(underlying.getBooleanList(path))
def getConfigOpt(path: String) = option(underlying.getConfig(path))
def getConfigListOpt(path: String) = option(underlying.getConfigList(path))
def getValueOpt(path: String) = option(underlying.getValue(path))
def keys: Set[String] = underlying.entrySet().asScala.map(_.getKey).toSet
def subKeys: Set[String] = underlying.root().keySet().asScala.toSet
def toProperties: Properties = {
val prop = new Properties()
underlying.subKeys.foreach { key =>
val value = underlying.getString(key)
prop.setProperty(key, value)
}
prop
}
private[commons] def getOriginDescription(origin: ConfigOrigin) = {
val line = origin.lineNumber()
val input = Option(origin.url).map(_.toURI.getPath).orNull
val source = Option(origin.filename).orNull
s"(line = $line, input = $input, source = $source)"
}
def reportError(path: String, message: String, cause: Option[Throwable] = None): ChkException = {
val origin = underlying.getValueOpt(path).map(_.origin()).getOrElse(underlying.root().origin())
ChkException(SystemError.CONFIG_ERROR, cause).set("origin", origin)
}
def globalError(message: String, cause: Option[Throwable] = None): ChkException = {
ChkException(SystemError.CONFIG_ERROR, cause).set("origin", underlying.root().origin())
}
}
}
| chainkite/chk-commons | core/src/main/scala/chk/commons/config/ConfigUtils.scala | Scala | mit | 3,273 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
// SKIP-SCALATESTJS,NATIVE-START
import org.scalatestplus.junit.JUnit3Suite
import org.scalatestplus.junit.JUnitSuite
import org.junit.Test
import org.testng.annotations.{Test => TestNG }
import org.scalatestplus.testng.TestNGSuite
import org.scalatest.refspec.RefSpec
// SKIP-SCALATESTJS,NATIVE-END
import org.scalatest.{ featurespec, flatspec, freespec, funspec, funsuite, propspec, wordspec }
import org.scalatest.featurespec.AnyFeatureSpec
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.freespec.AnyFreeSpec
import org.scalatest.funspec.AnyFunSpec
import org.scalatest.funsuite.AnyFunSuite
import org.scalatest.propspec.AnyPropSpec
import org.scalatest.wordspec.AnyWordSpec
class TestNameProp extends AllSuiteProp {
type FixtureServices = TestNameFixtureServices
// SKIP-SCALATESTJS,NATIVE-START
def spec = new ExampleTestNameSpec
def junit3Suite = new ExampleTestNameJUnit3Suite
def junitSuite = new ExampleTestNameJUnitSuite
def testngSuite = new ExampleTestNameTestNGSuite
// SKIP-SCALATESTJS,NATIVE-END
def funSuite = new ExampleTestNameFunSuite
def fixtureFunSuite = new ExampleTestNameFixtureFunSuite
def funSpec = new ExampleTestNameFunSpec
def fixtureFunSpec = new ExampleTestNameFixtureFunSpec
def featureSpec = new ExampleTestNameFeatureSpec
def fixtureFeatureSpec = new ExampleTestNameFixtureFeatureSpec
def flatSpec = new ExampleTestNameFlatSpec
def fixtureFlatSpec = new ExampleTestNameFixtureFlatSpec
def freeSpec = new ExampleTestNameFreeSpec
def fixtureFreeSpec = new ExampleTestNameFixtureFreeSpec
def propSpec = new ExampleTestNamePropSpec
def fixturePropSpec = new ExampleTestNameFixturePropSpec
def wordSpec = new ExampleTestNameWordSpec
def fixtureWordSpec = new ExampleTestNameFixtureWordSpec
def pathFreeSpec = new ExampleTestNamePathFreeSpec
def pathFunSpec = new ExampleTestNamePathFunSpec
test("test name will be constructed by concatennating scopes, outer to inner, followed by the test text, separated by a space after each component is trimmed.") {
forAll(examples) { s => s.assertTestNames() }
}
}
trait TestNameFixtureServices { suite: Suite =>
val expectedTestNames: Set[String]
def assertTestNames(): Unit = {
val expectedSet = expectedTestNames
val testNameSet = testNames
assert(expectedSet.size === testNameSet.size)
expectedSet.foreach { tn =>
assert(testNameSet contains tn, "Unable to find test name: '" + tn + "', testNames is: \\n" + testNameSet.map("'" + _ + "'").mkString("\\n"))
}
}
}
// SKIP-SCALATESTJS,NATIVE-START
@DoNotDiscover
class ExampleTestNameSpec extends RefSpec with TestNameFixtureServices {
val expectedTestNames =
Set(
"Testing 1 Scala code should be fun",
"Testing 2 Scala code should be fun",
"Testing 3 Scala code should be fun",
"Testing 4 Scala code should be fun",
"Testing 5 Scala code should be fun",
"Testing 6 Scala code should be fun",
"Testing 7 Scala code should be fun",
"Testing 8 Scala code should be fun",
"Testing 9 Scala code should be fun"
)
object `Testing 1` {
object `Scala code` {
def `should be fun`: Unit = {}
}
}
object `Testing 2 ` {
object `Scala code` {
def `should be fun`: Unit = {}
}
}
object `Testing 3` {
object ` Scala code` {
def `should be fun`: Unit = {}
}
}
object `Testing 4` {
object `Scala code ` {
def `should be fun`: Unit = {}
}
}
object `Testing 5` {
object `Scala code` {
def ` should be fun`: Unit = {}
}
}
object ` Testing 6` {
object `Scala code` {
def `should be fun`: Unit = {}
}
}
object `Testing 7` {
object `Scala code` {
def `should be fun `: Unit = {}
}
}
object `Testing 8 ` {
object ` Scala code` {
def `should be fun`: Unit = {}
}
}
object `Testing 9 ` {
object `Scala code` {
def `should be fun`: Unit = {}
}
}
}
@DoNotDiscover
class ExampleTestNameJUnit3Suite extends JUnit3Suite with TestNameFixtureServices {
val expectedTestNames =
Set(
"testingShouldBeFun"
)
def testingShouldBeFun(): Unit = { }
}
@DoNotDiscover
class ExampleTestNameJUnitSuite extends JUnitSuite with TestNameFixtureServices {
val expectedTestNames =
Set(
"testingShouldBeFun"
)
@Test
def testingShouldBeFun(): Unit = {}
}
@DoNotDiscover
class ExampleTestNameTestNGSuite extends TestNGSuite with TestNameFixtureServices {
val expectedTestNames =
Set(
"testingShouldBeFun"
)
@TestNG
def testingShouldBeFun(): Unit = {}
}
// SKIP-SCALATESTJS,NATIVE-END
@DoNotDiscover
protected[scalatest] class ExampleTestNameFunSuite extends AnyFunSuite with TestNameFixtureServices {
val expectedTestNames =
Set(
"Testing 1 should be fun",
"Testing 2 should be fun",
"Testing 3 should be fun",
"Testing 4 should be fun",
"Testing 5 should be fun"
)
test("Testing 1 should be fun") {}
test(" Testing 2 should be fun") {}
test("Testing 3 should be fun ") {}
test(" Testing 4 should be fun") {}
test("Testing 5 should be fun ") {}
}
@DoNotDiscover
protected[scalatest] class ExampleTestNameFixtureFunSuite extends funsuite.FixtureAnyFunSuite with TestNameFixtureServices with StringFixture {
val expectedTestNames =
Set(
"Testing 1 should be fun",
"Testing 2 should be fun",
"Testing 3 should be fun",
"Testing 4 should be fun",
"Testing 5 should be fun"
)
test("Testing 1 should be fun") { s => }
test(" Testing 2 should be fun") { s => }
test("Testing 3 should be fun ") { s => }
test(" Testing 4 should be fun") { s => }
test("Testing 5 should be fun ") { s => }
}
@DoNotDiscover
protected[scalatest] class ExampleTestNameFunSpec extends AnyFunSpec with TestNameFixtureServices {
val expectedTestNames =
Set(
"Testing 1 Scala code should be fun",
"Testing 2 Scala code should be fun",
"Testing 3 Scala code should be fun",
"Testing 4 Scala code should be fun",
"Testing 5 Scala code should be fun",
"Testing 6 Scala code should be fun",
"Testing 7 Scala code should be fun",
"Testing 8 Scala code should be fun",
"Testing 9 Scala code should be fun"
)
describe("Testing 1") {
describe("Scala code") {
it("should be fun") {}
}
}
describe("Testing 2 ") {
describe("Scala code") {
it("should be fun") {}
}
}
describe("Testing 3") {
describe(" Scala code") {
it("should be fun") {}
}
}
describe("Testing 4") {
describe("Scala code ") {
it("should be fun") {}
}
}
describe("Testing 5") {
describe("Scala code") {
it(" should be fun") {}
}
}
describe(" Testing 6") {
describe("Scala code") {
it("should be fun") {}
}
}
describe("Testing 7") {
describe("Scala code") {
it("should be fun ") {}
}
}
describe("Testing 8 ") {
describe(" Scala code") {
it("should be fun") {}
}
}
describe("Testing 9 ") {
describe("Scala code") {
it("should be fun") {}
}
}
}
@DoNotDiscover
protected[scalatest] class ExampleTestNameFixtureFunSpec extends funspec.FixtureAnyFunSpec with TestNameFixtureServices with StringFixture {
val expectedTestNames =
Set(
"Testing 1 Scala code should be fun",
"Testing 2 Scala code should be fun",
"Testing 3 Scala code should be fun",
"Testing 4 Scala code should be fun",
"Testing 5 Scala code should be fun",
"Testing 6 Scala code should be fun",
"Testing 7 Scala code should be fun",
"Testing 8 Scala code should be fun",
"Testing 9 Scala code should be fun"
)
describe("Testing 1") {
describe("Scala code") {
it("should be fun") { s => }
}
}
describe("Testing 2 ") {
describe("Scala code") {
it("should be fun") { s => }
}
}
describe("Testing 3") {
describe(" Scala code") {
it("should be fun") { s => }
}
}
describe("Testing 4") {
describe("Scala code ") {
it("should be fun") { s => }
}
}
describe("Testing 5") {
describe("Scala code") {
it(" should be fun") { s => }
}
}
describe(" Testing 6") {
describe("Scala code") {
it("should be fun") { s => }
}
}
describe("Testing 7") {
describe("Scala code") {
it("should be fun ") { s => }
}
}
describe("Testing 8 ") {
describe(" Scala code") {
it("should be fun") { s => }
}
}
describe("Testing 9 ") {
describe("Scala code") {
it("should be fun") { s => }
}
}
}
@DoNotDiscover
protected[scalatest] class ExampleTestNameFeatureSpec extends AnyFeatureSpec with TestNameFixtureServices {
val expectedTestNames =
Set(
"Feature: Testing 1 Scenario: Scala code should be fun",
"Feature: Testing 2 Scenario: Scala code should be fun",
"Feature: Testing 3 Scenario: Scala code should be fun",
"Feature: Testing 4 Scenario: Scala code should be fun",
"Feature: Testing 5 Scenario: Scala code should be fun",
"Feature: Testing 6 Scenario: Scala code should be fun",
"Feature: Testing 7 Scenario: Scala code should be fun"
)
Feature("Testing 1") {
Scenario("Scala code should be fun") {}
}
Feature("Testing 2 ") {
Scenario("Scala code should be fun") {}
}
Feature("Testing 3") {
Scenario(" Scala code should be fun") {}
}
Feature("Testing 4") {
Scenario("Scala code should be fun ") {}
}
Feature(" Testing 5") {
Scenario("Scala code should be fun") {}
}
Feature("Testing 6 ") {
Scenario(" Scala code should be fun") {}
}
Feature("Testing 7 ") {
Scenario("Scala code should be fun") {}
}
}
@DoNotDiscover
protected[scalatest] class ExampleTestNameFixtureFeatureSpec extends featurespec.FixtureAnyFeatureSpec with TestNameFixtureServices with StringFixture {
val expectedTestNames =
Set(
"Feature: Testing 1 Scenario: Scala code should be fun",
"Feature: Testing 2 Scenario: Scala code should be fun",
"Feature: Testing 3 Scenario: Scala code should be fun",
"Feature: Testing 4 Scenario: Scala code should be fun",
"Feature: Testing 5 Scenario: Scala code should be fun",
"Feature: Testing 6 Scenario: Scala code should be fun",
"Feature: Testing 7 Scenario: Scala code should be fun"
)
Feature("Testing 1") {
Scenario("Scala code should be fun") { s => }
}
Feature("Testing 2 ") {
Scenario("Scala code should be fun") { s => }
}
Feature("Testing 3") {
Scenario(" Scala code should be fun") { s => }
}
Feature("Testing 4") {
Scenario("Scala code should be fun ") { s => }
}
Feature(" Testing 5") {
Scenario("Scala code should be fun") { s => }
}
Feature("Testing 6 ") {
Scenario(" Scala code should be fun") { s => }
}
Feature("Testing 7 ") {
Scenario("Scala code should be fun") { s => }
}
}
@DoNotDiscover
protected[scalatest] class ExampleTestNameFlatSpec extends AnyFlatSpec with TestNameFixtureServices {
val expectedTestNames =
Set(
"Testing 1 should be fun to code in Scala",
"Testing 2 should be fun to code in Scala",
"Testing 3 should be fun to code in Scala",
"Testing 4 should be fun to code in Scala",
"Testing 5 should be fun to code in Scala",
"Testing 6 should be fun to code in Scala",
"Testing 7 should be fun to code in Scala"
)
"Testing 1" should "be fun to code in Scala" in {
}
"Testing 2 " should "be fun to code in Scala" in {
}
"Testing 3" should " be fun to code in Scala" in {
}
"Testing 4" should "be fun to code in Scala " in {
}
" Testing 5" should "be fun to code in Scala" in {
}
"Testing 6 " should " be fun to code in Scala" in {
}
"Testing 7 " should "be fun to code in Scala" in {
}
}
@DoNotDiscover
protected[scalatest] class ExampleTestNameFixtureFlatSpec extends flatspec.FixtureAnyFlatSpec with TestNameFixtureServices with StringFixture {
val expectedTestNames =
Set(
"Testing 1 should be fun to code in Scala",
"Testing 2 should be fun to code in Scala",
"Testing 3 should be fun to code in Scala",
"Testing 4 should be fun to code in Scala",
"Testing 5 should be fun to code in Scala",
"Testing 6 should be fun to code in Scala",
"Testing 7 should be fun to code in Scala"
)
"Testing 1" should "be fun to code in Scala" in { s =>
}
"Testing 2 " should "be fun to code in Scala" in { s =>
}
"Testing 3" should " be fun to code in Scala" in { s =>
}
"Testing 4" should "be fun to code in Scala " in { s =>
}
" Testing 5" should "be fun to code in Scala" in { s =>
}
"Testing 6 " should " be fun to code in Scala" in { s =>
}
"Testing 7 " should "be fun to code in Scala" in { s =>
}
}
@DoNotDiscover
protected[scalatest] class ExampleTestNameFreeSpec extends AnyFreeSpec with TestNameFixtureServices {
val expectedTestNames =
Set(
"Testing 1 Scala code should be fun",
"Testing 2 Scala code should be fun",
"Testing 3 Scala code should be fun",
"Testing 4 Scala code should be fun",
"Testing 5 Scala code should be fun",
"Testing 6 Scala code should be fun",
"Testing 7 Scala code should be fun",
"Testing 8 Scala code should be fun",
"Testing 9 Scala code should be fun"
)
"Testing 1" - {
"Scala code" - {
"should be fun" in {}
}
}
"Testing 2 " - {
"Scala code" - {
"should be fun" in {}
}
}
"Testing 3" - {
" Scala code" - {
"should be fun" in {}
}
}
"Testing 4" - {
"Scala code " - {
"should be fun" in {}
}
}
"Testing 5" - {
"Scala code" - {
" should be fun" in {}
}
}
" Testing 6" - {
"Scala code" - {
"should be fun" in {}
}
}
"Testing 7" - {
"Scala code" - {
"should be fun " in {}
}
}
"Testing 8 " - {
" Scala code" - {
"should be fun" in {}
}
}
"Testing 9 " - {
"Scala code" - {
"should be fun" in {}
}
}
}
@DoNotDiscover
protected[scalatest] class ExampleTestNameFixtureFreeSpec extends freespec.FixtureAnyFreeSpec with TestNameFixtureServices with StringFixture {
val expectedTestNames =
Set(
"Testing 1 Scala code should be fun",
"Testing 2 Scala code should be fun",
"Testing 3 Scala code should be fun",
"Testing 4 Scala code should be fun",
"Testing 5 Scala code should be fun",
"Testing 6 Scala code should be fun",
"Testing 7 Scala code should be fun",
"Testing 8 Scala code should be fun",
"Testing 9 Scala code should be fun"
)
"Testing 1" - {
"Scala code" - {
"should be fun" in { s => }
}
}
"Testing 2 " - {
"Scala code" - {
"should be fun" in { s => }
}
}
"Testing 3" - {
" Scala code" - {
"should be fun" in { s => }
}
}
"Testing 4" - {
"Scala code " - {
"should be fun" in { s => }
}
}
"Testing 5" - {
"Scala code" - {
" should be fun" in { s => }
}
}
" Testing 6" - {
"Scala code" - {
"should be fun" in { s => }
}
}
"Testing 7" - {
"Scala code" - {
"should be fun " in { s => }
}
}
"Testing 8 " - {
" Scala code" - {
"should be fun" in { s => }
}
}
"Testing 9 " - {
"Scala code" - {
"should be fun" in { s => }
}
}
}
@DoNotDiscover
protected[scalatest] class ExampleTestNamePropSpec extends AnyPropSpec with TestNameFixtureServices {
val expectedTestNames =
Set(
"Testing 1 Scala code should be fun",
"Testing 2 Scala code should be fun",
"Testing 3 Scala code should be fun",
"Testing 4 Scala code should be fun",
"Testing 5 Scala code should be fun",
"Testing 6 Scala code should be fun"
)
property("Testing 1 Scala code should be fun") {}
property(" Testing 2 Scala code should be fun") {}
property("Testing 3 Scala code should be fun ") {}
property(" Testing 4 Scala code should be fun") {}
property("Testing 5 Scala code should be fun ") {}
property(" Testing 6 Scala code should be fun ") {}
}
@DoNotDiscover
protected[scalatest] class ExampleTestNameFixturePropSpec extends propspec.FixtureAnyPropSpec with TestNameFixtureServices with StringFixture {
val expectedTestNames =
Set(
"Testing 1 Scala code should be fun",
"Testing 2 Scala code should be fun",
"Testing 3 Scala code should be fun",
"Testing 4 Scala code should be fun",
"Testing 5 Scala code should be fun",
"Testing 6 Scala code should be fun"
)
property("Testing 1 Scala code should be fun") { s => }
property(" Testing 2 Scala code should be fun") { s => }
property("Testing 3 Scala code should be fun ") { s => }
property(" Testing 4 Scala code should be fun") { s => }
property("Testing 5 Scala code should be fun ") { s => }
property(" Testing 6 Scala code should be fun ") { s => }
}
@DoNotDiscover
protected[scalatest] class ExampleTestNameWordSpec extends AnyWordSpec with TestNameFixtureServices {
val expectedTestNames =
Set(
"Testing 1 should test Scala code should be fun",
"Testing 2 should test Scala code should be fun",
"Testing 3 should test Scala code should be fun",
"Testing 4 should test Scala code should be fun",
"Testing 5 should test Scala code should be fun",
"Testing 6 should test Scala code should be fun",
"Testing 7 should test Scala code should be fun",
"Testing 8 should test Scala code should be fun",
"Testing 9 should test Scala code should be fun"
)
"Testing 1" should {
"test Scala code" should {
"be fun" in {}
}
}
"Testing 2 " should {
"test Scala code" should {
"be fun" in {}
}
}
"Testing 3" should {
" test Scala code" should {
"be fun" in {}
}
}
"Testing 4" should {
"test Scala code " should {
"be fun" in {}
}
}
"Testing 5" should {
"test Scala code" should {
" be fun" in {}
}
}
" Testing 6" should {
"test Scala code" should {
"be fun" in {}
}
}
"Testing 7" should {
"test Scala code" should {
"be fun " in {}
}
}
"Testing 8 " should {
" test Scala code" should {
"be fun" in {}
}
}
"Testing 9 " should {
"test Scala code" should {
"be fun" in {}
}
}
}
@DoNotDiscover
protected[scalatest] class ExampleTestNameFixtureWordSpec extends wordspec.FixtureAnyWordSpec with TestNameFixtureServices with StringFixture {
val expectedTestNames =
Set(
"Testing 1 should test Scala code should be fun",
"Testing 2 should test Scala code should be fun",
"Testing 3 should test Scala code should be fun",
"Testing 4 should test Scala code should be fun",
"Testing 5 should test Scala code should be fun",
"Testing 6 should test Scala code should be fun",
"Testing 7 should test Scala code should be fun",
"Testing 8 should test Scala code should be fun",
"Testing 9 should test Scala code should be fun"
)
"Testing 1" should {
"test Scala code" should {
"be fun" in { s => }
}
}
"Testing 2 " should {
"test Scala code" should {
"be fun" in { s => }
}
}
"Testing 3" should {
" test Scala code" should {
"be fun" in { s => }
}
}
"Testing 4" should {
"test Scala code " should {
"be fun" in { s => }
}
}
"Testing 5" should {
"test Scala code" should {
" be fun" in { s => }
}
}
" Testing 6" should {
"test Scala code" should {
"be fun" in { s => }
}
}
"Testing 7" should {
"test Scala code" should {
"be fun " in { s => }
}
}
"Testing 8 " should {
" test Scala code" should {
"be fun" in { s => }
}
}
"Testing 9 " should {
"test Scala code" should {
"be fun" in { s => }
}
}
}
@DoNotDiscover
protected[scalatest] class ExampleTestNamePathFreeSpec extends freespec.PathAnyFreeSpec with TestNameFixtureServices {
val expectedTestNames =
Set(
"Testing 1 Scala code should be fun",
"Testing 2 Scala code should be fun",
"Testing 3 Scala code should be fun",
"Testing 4 Scala code should be fun",
"Testing 5 Scala code should be fun",
"Testing 6 Scala code should be fun",
"Testing 7 Scala code should be fun",
"Testing 8 Scala code should be fun",
"Testing 9 Scala code should be fun"
)
"Testing 1" - {
"Scala code" - {
"should be fun" in {}
}
}
"Testing 2 " - {
"Scala code" - {
"should be fun" in {}
}
}
"Testing 3" - {
" Scala code" - {
"should be fun" in {}
}
}
"Testing 4" - {
"Scala code " - {
"should be fun" in {}
}
}
"Testing 5" - {
"Scala code" - {
" should be fun" in {}
}
}
" Testing 6" - {
"Scala code" - {
"should be fun" in {}
}
}
"Testing 7" - {
"Scala code" - {
"should be fun " in {}
}
}
"Testing 8 " - {
" Scala code" - {
"should be fun" in {}
}
}
"Testing 9 " - {
"Scala code" - {
"should be fun" in {}
}
}
override def newInstance: freespec.PathAnyFreeSpecLike = new ExampleTestNamePathFreeSpec
}
@DoNotDiscover
protected[scalatest] class ExampleTestNamePathFunSpec extends funspec.PathAnyFunSpec with TestNameFixtureServices {
val expectedTestNames =
Set(
"Testing 1 Scala code should be fun",
"Testing 2 Scala code should be fun",
"Testing 3 Scala code should be fun",
"Testing 4 Scala code should be fun",
"Testing 5 Scala code should be fun",
"Testing 6 Scala code should be fun",
"Testing 7 Scala code should be fun",
"Testing 8 Scala code should be fun",
"Testing 9 Scala code should be fun"
)
describe("Testing 1") {
describe("Scala code") {
it("should be fun") {}
}
}
describe("Testing 2 ") {
describe("Scala code") {
it("should be fun") {}
}
}
describe("Testing 3") {
describe(" Scala code") {
it("should be fun") {}
}
}
describe("Testing 4") {
describe("Scala code ") {
it("should be fun") {}
}
}
describe("Testing 5") {
describe("Scala code") {
it(" should be fun") {}
}
}
describe(" Testing 6") {
describe("Scala code") {
it("should be fun") {}
}
}
describe("Testing 7") {
describe("Scala code") {
it("should be fun ") {}
}
}
describe("Testing 8 ") {
describe(" Scala code") {
it("should be fun") {}
}
}
describe("Testing 9 ") {
describe("Scala code") {
it("should be fun") {}
}
}
override def newInstance: funspec.PathAnyFunSpecLike = new ExampleTestNamePathFunSpec
}
| scalatest/scalatest | jvm/scalatest-test/src/test/scala/org/scalatest/TestNameProp.scala | Scala | apache-2.0 | 23,877 |
/*
* The MIT License
*
* Copyright (c) 2017 Fulcrum Genomics
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*
*/
package com.fulcrumgenomics.rnaseq
import com.fulcrumgenomics.FgBioDef._
import com.fulcrumgenomics.bam.api.SamRecord
import com.fulcrumgenomics.commons.io.PathUtil
import com.fulcrumgenomics.rnaseq.EstimateRnaSeqInsertSize._
import com.fulcrumgenomics.testing.SamBuilder._
import com.fulcrumgenomics.testing.{SamBuilder, UnitSpec}
import com.fulcrumgenomics.util.GeneAnnotations.{Exon, Gene, GeneLocus, Transcript}
import com.fulcrumgenomics.util.{Io, Metric}
import htsjdk.samtools.SamPairUtil.PairOrientation
import org.scalatest.OptionValues
class EstimateRnaSeqInsertSizeTest extends UnitSpec with OptionValues {
/** Calculates the insert size from a gene. Returns None if the record's span is not enclosed in the gene or if
* the insert size disagree across transcripts. */
def testInsertSizeFromGene(rec: SamRecord,
gene: Gene,
minimumOverlap: Double): Option[Int] = {
val mateCigar = EstimateRnaSeqInsertSize.getAndRequireMateCigar(rec)
val mateAlignmentEnd = EstimateRnaSeqInsertSize.mateAlignmentEndFrom(mateCigar, rec.mateStart)
EstimateRnaSeqInsertSize.insertSizeFromGene(
rec = rec,
gene = gene.loci.head,
minimumOverlap = minimumOverlap,
recInterval = EstimateRnaSeqInsertSize.intervalFrom(rec=rec, mateAlignmentEnd=mateAlignmentEnd),
recBlocks = rec.asSam.getAlignmentBlocks.toList,
mateBlocks = EstimateRnaSeqInsertSize.mateAlignmentBlocksFrom(mateCigar, rec.mateStart),
mateAlignmentEnd = mateAlignmentEnd
)
}
def mateAlignmentEnd(rec: SamRecord): Int = {
val mateCigar = EstimateRnaSeqInsertSize.getAndRequireMateCigar(rec)
EstimateRnaSeqInsertSize.mateAlignmentEndFrom(mateCigar, rec.mateStart)
}
"EstimateRnaSeqInsertSize.numReadBasesOverlappingTranscript" should "return the number of read bases overlapping a transcript" in {
def estimate(rec: SamRecord, transcript: Transcript) = EstimateRnaSeqInsertSize.numReadBasesOverlappingTranscript(rec.asSam.getAlignmentBlocks.toList, transcript)
// many .value calls here, I know
val transcript = Transcript("tx", "chr1", 2, 10, Some(2), Some(10), negativeStrand=false, exons=Seq(Exon(2,2), Exon(4,4), Exon(11, 11)))
val builder = new SamBuilder(readLength=10)
// simple matches
builder.addFrag(start=1, strand=Plus) foreach { rec => estimate(rec, transcript) shouldBe 2 }
builder.addFrag(start=1, strand=Minus) foreach { rec => estimate(rec, transcript) shouldBe 2 }
builder.addFrag(start=2, strand=Plus) foreach { rec => estimate(rec, transcript) shouldBe 3 }
builder.addFrag(start=2, strand=Minus) foreach { rec => estimate(rec, transcript) shouldBe 3 }
builder.addFrag(start=5, strand=Plus) foreach { rec => estimate(rec, transcript) shouldBe 1 }
builder.addFrag(start=5, strand=Minus) foreach { rec => estimate(rec, transcript) shouldBe 1 }
builder.addFrag(start=11, strand=Plus) foreach { rec => estimate(rec, transcript) shouldBe 1 }
builder.addFrag(start=11, strand=Minus) foreach { rec => estimate(rec, transcript) shouldBe 1 }
builder.addFrag(start=12, strand=Plus) foreach { rec => estimate(rec, transcript) shouldBe 0 }
builder.addFrag(start=12, strand=Minus) foreach { rec => estimate(rec, transcript) shouldBe 0 }
// some indels
builder.addFrag(start=2, cigar="1M1D1M6D8M") foreach { rec => estimate(rec, transcript) shouldBe 3 } // deletions between exons
builder.addFrag(start=2, cigar="1M8I1M") foreach { rec => estimate(rec, transcript) shouldBe 1 } // insertions
builder.addFrag(start=1, cigar="1M20D9M") foreach { rec => estimate(rec, transcript) shouldBe 0 } // deletion skips gene
// skips
builder.addFrag(start=2, cigar="1M1N1M6N8M") foreach { rec => estimate(rec, transcript) shouldBe 3 } // skips between exons
}
"EstimateRnaSeqInsertSize.insertSizeFrom" should "return the number of read bases overlapping a transcript" in {
def estimate(rec: SamRecord, transcript: Transcript) = insertSizeFromTranscript(rec, transcript, mateAlignmentEnd(rec))
// many .value calls here, I know
val transcript = Transcript("tx", "chr1", 2, 10, Some(2), Some(10), negativeStrand=false, exons=Seq(Exon(2,2), Exon(4,4), Exon(11, 11)))
val builder = new SamBuilder(readLength=5)
// Overlaps all three exons
builder.addPair(start1=1, start2=7, strand1=Plus, strand2=Minus) foreach { rec => estimate(rec, transcript) shouldBe 3 }
builder.addPair(start1=7, start2=1, strand1=Minus, strand2=Plus) foreach { rec => estimate(rec, transcript) shouldBe 3 }
// Overlaps the first two exons
builder.addPair(start1=1, start2=7, strand1=Plus, strand2=Plus) foreach { rec => estimate(rec, transcript) shouldBe 2 }
// Overlaps the last exon
builder.addPair(start1=7, start2=1, strand1=Minus, strand2=Minus) foreach { rec => estimate(rec, transcript) shouldBe 1 }
// Overlaps all last exon
builder.addPair(start1=7, start2=12, strand1=Plus, strand2=Minus) foreach { rec => estimate(rec, transcript) shouldBe 1 }
builder.addPair(start1=12, start2=7, strand1=Minus, strand2=Plus) foreach { rec => estimate(rec, transcript) shouldBe 1 }
builder.addPair(start1=7, start2=12, strand1=Plus, strand2=Plus) foreach { rec => estimate(rec, transcript) shouldBe 1 }
builder.addPair(start1=12, start2=7, strand1=Minus, strand2=Minus) foreach { rec => estimate(rec, transcript) shouldBe 1 }
// No overlap (5' positions are 26 and 12)
builder.addPair(start1=22, start2=8, strand1=Minus, strand2=Minus) foreach { rec => estimate(rec, transcript) shouldBe 0 }
// One base overlap (5' positions are 26 and 11)
builder.addPair(start1=22, start2=7, strand1=Minus, strand2=Minus) foreach { rec => estimate(rec, transcript) shouldBe 1 }
}
it should "return a value if the record overlaps a gene" in {
val transcript = Transcript("example_transcript", "chr1", 10, 20, Some(10), Some(20), negativeStrand=false, exons=Seq(Exon(10,10), Exon(14,14), Exon(20,20)))
val gene = Gene(name="", loci=Seq(GeneLocus(Seq(transcript))))
val builder = new SamBuilder(readLength=5)
///////////////////////////////////////////////////////
// not enclosed
///////////////////////////////////////////////////////
// too far left
builder.addPair(start1=9, start2=16, strand1=Plus, strand2=Minus) foreach { rec => testInsertSizeFromGene(rec, gene, 0.0).isEmpty shouldBe true }
builder.addPair(start1=16, start2=9, strand1=Minus, strand2=Plus) foreach { rec => testInsertSizeFromGene(rec, gene, 0.0).isEmpty shouldBe true }
builder.addPair(start1=9, start2=16, strand1=Plus, strand2=Plus) foreach { rec => testInsertSizeFromGene(rec, gene, 0.0).isEmpty shouldBe true }
builder.addPair(start1=16, start2=9, strand1=Minus, strand2=Minus) foreach { rec => testInsertSizeFromGene(rec, gene, 0.0).isEmpty shouldBe true }
// too far right
builder.addPair(start1=10, start2=17, strand1=Plus, strand2=Minus) foreach { rec => testInsertSizeFromGene(rec, gene, 0.0).isEmpty shouldBe true }
builder.addPair(start1=17, start2=10, strand1=Minus, strand2=Plus) foreach { rec => testInsertSizeFromGene(rec, gene, 0.0).isEmpty shouldBe true }
builder.addPair(start1=10, start2=17, strand1=Plus, strand2=Plus) foreach { rec => testInsertSizeFromGene(rec, gene, 0.0).isEmpty shouldBe true }
builder.addPair(start1=17, start2=10, strand1=Minus, strand2=Minus) foreach { rec => testInsertSizeFromGene(rec, gene, 0.0).isEmpty shouldBe true }
///////////////////////////////////////////////////////
// enclosed
///////////////////////////////////////////////////////
// enclosed (just barely)
builder.addPair(start1=10, start2=16, strand1=Plus, strand2=Minus) foreach { rec => testInsertSizeFromGene(rec, gene, 0.0).value shouldBe 3 }
builder.addPair(start1=16, start2=10, strand1=Minus, strand2=Plus) foreach { rec => testInsertSizeFromGene(rec, gene, 0.0).value shouldBe 3 }
builder.addPair(start1=10, start2=16, strand1=Plus, strand2=Plus) foreach { rec => testInsertSizeFromGene(rec, gene, 0.0).value shouldBe 2 }
builder.addPair(start1=16, start2=10, strand1=Minus, strand2=Minus) foreach { rec => testInsertSizeFromGene(rec, gene, 0.0).value shouldBe 2 }
}
it should "not return a value if there is too little overlap" in {
val transcript = Transcript("example_transcript", "chr1", 10, 20, Some(10), Some(20), negativeStrand=false, exons=Seq(Exon(10,10), Exon(20, 20)))
val gene = Gene(name="", loci=Seq(GeneLocus(Seq(transcript))))
val builder = new SamBuilder(readLength=5)
builder.addPair(start1=10, start2=16, strand1=Plus, strand2=Minus) foreach { rec => testInsertSizeFromGene(rec, gene, 0.0).value shouldBe 2 }
builder.addPair(start1=10, start2=16, strand1=Plus, strand2=Minus) foreach { rec => testInsertSizeFromGene(rec, gene, 0.2).value shouldBe 2 }
builder.addPair(start1=10, start2=16, strand1=Plus, strand2=Minus) foreach { rec => testInsertSizeFromGene(rec, gene, 0.2001).isEmpty shouldBe true }
builder.addPair(start1=10, start2=16, strand1=Plus, strand2=Minus) foreach { rec => testInsertSizeFromGene(rec, gene, 0.3).isEmpty shouldBe true }
builder.addPair(start1=10, start2=16, strand1=Plus, strand2=Minus) foreach { rec => testInsertSizeFromGene(rec, gene, 1.0).isEmpty shouldBe true }
}
it should "not return a value if the insert size disagrees across two transcripts" in {
val transcriptA = Transcript("example_transcript_A", "chr1", 10, 20, Some(10), Some(20), negativeStrand=false, exons=Seq(Exon(10,10), Exon(20, 20)))
val transcriptB = Transcript("example_transcript_B", "chr1", 10, 20, Some(10), Some(20), negativeStrand=false, exons=Seq(Exon(10,10), Exon(19, 20))) // one longer than A
val gene = Gene(name="", loci=Seq(GeneLocus(Seq(transcriptA, transcriptB))))
val builder = new SamBuilder(readLength=5)
builder.addPair(start1=10, start2=16, strand1=Plus, strand2=Minus) foreach { rec => testInsertSizeFromGene(rec, gene, 0.0).isEmpty shouldBe true }
}
private val RefFlatFile = {
val lines = Seq(
// a run-of-the-mill gene, with one transcript and one exon
Seq("ACKR4-3", "NM_178445-A", "chr3", "+", "133801670", "133804175", "133801931", "133802984", "1", "133801670", "133804175"),
// two genes that overlap
Seq("ACKR4-4-1", "NM_178445-B", "chr4", "+", "133801670", "133804175", "133801931", "133802984", "1", "133801670", "133804175"),
Seq("ACKR4-4-2", "NM_178445-C", "chr4", "+", "133801671", "133804176", "133801931", "133802984", "1", "133801671", "133804176"),
// two transcripts that overlap but have different lengths
Seq("ACKR4-5", "NM_178445-D", "chr5", "+", "133801670", "133804176", "133801931", "133802985", "1", "133801670", "133804175"),
Seq("ACKR4-5", "NM_178445-E", "chr5", "+", "133801670", "133804176", "133801931", "133802985", "1", "133801670", "133804176"),
// a transcript with two exons
Seq("ACKR4-6", "NM_178445-F", "chr6", "+", "133801670", "133804176", "133801931", "133802985", "2", "133801670,133804175", "133801671,133804176")
).map(_.mkString("\\t"))
val refFlat = makeTempFile("refFlat.", ".txt")
Io.writeLines(path=refFlat, lines=lines)
refFlat
}
private val EmptyMetrics = PairOrientation.values().map { po => InsertSizeMetric(po) }
"EstimateRnaSeqInsertSize" should "run end-to-end" in {
val builder = new SamBuilder()
// FR = (133804075 + 100) - 133801671 = 2504
builder.addPair(contig=2, start1=133801671, start2=133804075, strand1=Plus, strand2=Minus) // overlaps ACKR4 by 100%
builder.addPair(contig=2, start1=133801672, start2=133804074, strand1=Plus, strand2=Minus) //insert is two less
// RF = (133804075 + 1) - (133801671 + 100 - 1) = 2306
builder.addPair(contig=2, start1=133801671, start2=133804075, strand1=Minus, strand2=Plus) // overlaps ACKR4 by 100%
builder.addPair(contig=2, start1=133801672, start2=133804074, strand1=Minus, strand2=Plus) // overlaps ACKR4 by 100%
// TANDEM = (133804075 + 1) - 133801671 = 2405
builder.addPair(contig=2, start1=133801671, start2=133804075, strand1=Plus, strand2=Plus) // overlaps ACKR4 by 100%
builder.addPair(contig=2, start1=133801672, start2=133804074, strand1=Plus, strand2=Plus) // overlaps ACKR4 by 100%
val bam = builder.toTempFile()
val out = PathUtil.pathTo(PathUtil.removeExtension(bam).toString + EstimateRnaSeqInsertSize.RnaSeqInsertSizeMetricExtension)
new EstimateRnaSeqInsertSize(input=bam, refFlat=RefFlatFile).execute()
val metrics = Metric.read[InsertSizeMetric](path=out)
metrics.length shouldBe PairOrientation.values().length
val expectedMetrics = Seq(
InsertSizeMetric(
pair_orientation = PairOrientation.FR,
read_pairs = 2,
standard_deviation = 1.414214,
mean = 2503,
min = 1,
max = 1,
median = 2503,
median_absolute_deviation = 1
),
InsertSizeMetric(
pair_orientation = PairOrientation.RF,
read_pairs = 2,
standard_deviation = 1.414214,
mean = 2305,
min = 1,
max = 1,
median = 2305,
median_absolute_deviation = 1
),
InsertSizeMetric(
pair_orientation = PairOrientation.TANDEM,
read_pairs = 2,
standard_deviation = 1.414214,
mean = 2404,
min = 1,
max = 1,
median = 2404,
median_absolute_deviation = 1
)
)
metrics.zip(expectedMetrics).foreach { case (actual, expected) =>
actual shouldBe expected
}
val histogramPath = PathUtil.pathTo(PathUtil.removeExtension(bam).toString + EstimateRnaSeqInsertSize.RnaSeqInsertSizeMetricHistogramExtension)
Io.readLines(path=histogramPath).mkString("\\n") shouldBe
"""
|insert_size fr rf tandem
|2304 0 1 0
|2306 0 1 0
|2403 0 0 1
|2405 0 0 1
|2502 1 0 0
|2504 1 0 0
""".stripMargin.trim
}
/** Developer Note (Nils Homer Jan 19 2017)
*
* The tests below are kept here for now for added test coverage, but can be removed at a later date if they become
* difficult to maintain.
*/
it should "run end-to-end and ignore reads that overlap multiple genes" in {
val builder = new SamBuilder()
builder.addPair(contig=3, start1=133801671, start2=133801671) // overlaps ACKR4-4-1 and ACKR4-4-2
val bam = builder.toTempFile()
val out = PathUtil.pathTo(PathUtil.removeExtension(bam).toString + EstimateRnaSeqInsertSize.RnaSeqInsertSizeMetricExtension)
new EstimateRnaSeqInsertSize(input=bam, refFlat=RefFlatFile, minimumOverlap=0.0).execute()
val metrics = Metric.read[InsertSizeMetric](path=out)
metrics.length shouldBe PairOrientation.values().length
metrics.zip(EmptyMetrics).foreach { case (actual, expected) =>
actual shouldBe expected
}
}
it should "run end-to-end and ignore a reads that are not fully enclosed in a gene" in {
val builder = new SamBuilder()
builder.addPair(contig=2, start1=1, start2=133801671) // before ACKR4-3
builder.addPair(contig=2, start1=133801671, start2=133814175) // after ACKR4-3
val bam = builder.toTempFile()
val out = PathUtil.pathTo(PathUtil.removeExtension(bam).toString + EstimateRnaSeqInsertSize.RnaSeqInsertSizeMetricExtension)
new EstimateRnaSeqInsertSize(input=bam, refFlat=RefFlatFile, minimumOverlap=0.0).execute()
val metrics = Metric.read[InsertSizeMetric](path=out)
metrics.length shouldBe PairOrientation.values().length
metrics.zip(EmptyMetrics).foreach { case (actual, expected) =>
actual shouldBe expected
}
}
it should "run end-to-end and ignore reads when the insert size is different across transcripts" in {
val builder = new SamBuilder()
builder.addPair(contig=4, start1=133801671, start2=133804077) // overlaps ACKR4-5 (multiple transcripts)
val bam = builder.toTempFile()
val out = PathUtil.pathTo(PathUtil.removeExtension(bam).toString + EstimateRnaSeqInsertSize.RnaSeqInsertSizeMetricExtension)
new EstimateRnaSeqInsertSize(input=bam, refFlat=RefFlatFile, minimumOverlap=0.0).execute()
val metrics = Metric.read[InsertSizeMetric](path=out)
metrics.length shouldBe PairOrientation.values().length
metrics.zip(EmptyMetrics).foreach { case (actual, expected) =>
actual shouldBe expected
}
}
it should "run end-to-end and ignore reads when there are too few mapped bases overlapping exonic sequence" in {
val builder = new SamBuilder()
builder.addPair(contig=5, start1=133801671, start2=133804077, strand1=Plus, strand2=Plus) // overlaps ACKR4-6 by 2 bases!
val bam = builder.toTempFile()
val out = PathUtil.pathTo(PathUtil.removeExtension(bam).toString + EstimateRnaSeqInsertSize.RnaSeqInsertSizeMetricExtension)
// OK
{
new EstimateRnaSeqInsertSize(input=bam, refFlat=RefFlatFile, minimumOverlap=2/200.0).execute()
val metrics = Metric.read[InsertSizeMetric](path=out)
metrics.length shouldBe PairOrientation.values().length
metrics.zip(EmptyMetrics).foreach { case (actual, expected) =>
if (actual.pair_orientation == PairOrientation.TANDEM) {
actual shouldBe InsertSizeMetric(
pair_orientation = PairOrientation.TANDEM,
read_pairs = 1,
mean = 1,
min = 1,
max = 1,
median = 1
)
}
else {
actual shouldBe expected
}
}
}
// Too few bases overlapping exonic sequence
{
new EstimateRnaSeqInsertSize(input=bam, refFlat=RefFlatFile, minimumOverlap=3/200.0).execute()
val metrics = Metric.read[InsertSizeMetric](path=out)
metrics.length shouldBe PairOrientation.values().length
metrics.zip(EmptyMetrics).foreach { case (actual, expected) =>
actual shouldBe expected
}
}
}
}
| fulcrumgenomics/fgbio | src/test/scala/com/fulcrumgenomics/rnaseq/EstimateRnaSeqInsertSizeTest.scala | Scala | mit | 19,543 |
package uk.gov.dvla.vehicles.presentation.common.filters
import com.google.inject.Inject
import org.apache.commons.codec.binary.Base64
import play.api.Logger
import play.api.http.ContentTypes.HTML
import play.api.http.HeaderNames.REFERER
import play.api.http.HttpVerbs.{GET, POST}
import play.api.libs.Crypto
import play.api.libs.concurrent.Execution.Implicits.defaultContext
import play.api.libs.iteratee.{Done, Enumerator, Iteratee, Traversable}
import play.api.mvc.BodyParsers.parse.tolerantFormUrlEncoded
import play.api.mvc.{EssentialAction, EssentialFilter, Headers, RequestHeader, Result, Results}
import uk.gov.dvla.vehicles.presentation.common
import uk.gov.dvla.vehicles.presentation.common.ConfigProperties.{booleanProp, getOptionalProperty, getProperty, stringProp}
import uk.gov.dvla.vehicles.presentation.common.LogFormats.DVLALogger
import uk.gov.dvla.vehicles.presentation.common.clientsidesession.CookieImplicits.RichCookies
import uk.gov.dvla.vehicles.presentation.common.clientsidesession.{AesEncryption, ClientSideSessionFactory, TrackingId}
import uk.gov.dvla.vehicles.presentation.common.utils.helpers.CommonConfig
import scala.util.Try
class CsrfPreventionFilter @Inject()
(implicit clientSideSessionFactory: ClientSideSessionFactory) extends EssentialFilter {
def apply(next: EssentialAction): EssentialAction = new CsrfPreventionAction(next)
}
/**
* This class is based upon the Play's v2.2 CSRF protection. It has been stripped of code not relevant to this project, and
* project specific exception handling and aesEncryption has been added. The unmarshalling and onward streaming in the
* checkBody method is as Play intended it apart from the token comparison.
*
* https://www.playframework.com/documentation/2.2.x/ScalaCsrf
*
*/
class CsrfPreventionAction(next: EssentialAction)
(implicit clientSideSessionFactory: ClientSideSessionFactory)
extends EssentialAction with DVLALogger {
import uk.gov.dvla.vehicles.presentation.common.filters.CsrfPreventionAction.{TokenName, aesEncryption, buildTokenWithReferer, buildTokenWithUri, preventionEnabled, split}
def apply(requestHeader: RequestHeader) = {
// check if csrf prevention is switched on
if (preventionEnabled) {
if (requestHeader.method == POST) {
// TODO remove debris around reading the whitelist from config.
if (requestHeader.contentType.exists(_ == "application/x-www-form-urlencoded" )) checkBody(requestHeader, next)
else error("POST contentType was not urlencoded")(requestHeader)
} else if (requestHeader.method == GET && requestHeader.accepts(HTML)) {
next(requestWithNewToken(requestHeader))
} else next(requestHeader)
} else next(requestHeader)
}
private def requestWithNewToken(requestHeader: RequestHeader) = {
// No token in header and we have to create one if not found, so create a new token
val newToken = buildTokenWithUri(requestHeader.cookies.trackingId(), requestHeader.uri)
val newEncryptedToken = aesEncryption.encrypt(newToken)
val newSignedEncryptedToken = Crypto.signToken(newEncryptedToken)
requestHeader.copy(tags = requestHeader.tags + (TokenName -> newSignedEncryptedToken))
}
private def checkBody(requestHeader: RequestHeader, next: EssentialAction) = {
val firstPartOfBody: Iteratee[Array[Byte], Array[Byte]] =
Traversable.take[Array[Byte]](102400L.asInstanceOf[Int]) &>> Iteratee.consume[Array[Byte]]()
firstPartOfBody.flatMap { bytes: Array[Byte] =>
val parsedBody = Enumerator(bytes) |>>> tolerantFormUrlEncoded(requestHeader)
Iteratee.flatten(parsedBody.map { parseResult =>
if (isValidTokenInPostBody(parseResult, requestHeader) || isValidTokenInPostUrl(requestHeader))
Iteratee.flatten(Enumerator(bytes) |>> next(requestHeader))
else
error("No valid token found in form body or cookies")(requestHeader)
})
}
}
private def isValidTokenInPostBody(parseResult: Either[Result, Map[String, Seq[String]]],
requestHeader: RequestHeader) =
parseResult.fold(
simpleResult => false, // valid token not found
body => (for {// valid token found
values <- identity(body).get(TokenName)
tokenOpt <- values.headOption
token <- Crypto.extractSignedToken(tokenOpt)
} yield {
val decryptedExtractedSignedToken = aesEncryption.decrypt(token)
val splitDecryptedExtractedSignedToken = split(decryptedExtractedSignedToken)
val headerToken = buildTokenWithReferer(
requestHeader.cookies.trackingId(),
requestHeader.headers
)
//TODO name the tuple parts accordingly instead of referencing it by number
val splitTokenFromHeader = split(headerToken)
(splitDecryptedExtractedSignedToken._1 == splitTokenFromHeader._1) &&
splitTokenFromHeader._2.contains(splitDecryptedExtractedSignedToken._2)
}).getOrElse(false)
)
private def isValidTokenInPostUrl(requestHeader: RequestHeader): Boolean = {
val result = {
val tokenEncryptedAndUriEncoded = requestHeader.path.split("/").last // Split the path based on "/" character, if there is a token it will be at the end
val tokenEncrypted = new String(Base64.decodeBase64(tokenEncryptedAndUriEncoded))
Crypto.extractSignedToken(tokenEncrypted)
.map( signedToken => aesEncryption.decrypt(signedToken))
.map(decryptedExtractedSignedToken => split(decryptedExtractedSignedToken))
}
val trackingIdFromCookie = requestHeader.cookies.trackingId()
val refererFromCookie = requestHeader.cookies.getString(REFERER)
result.exists{
case (trackingIdFromUrl, refererFromUrl) =>
TrackingId(trackingIdFromUrl) == trackingIdFromCookie && refererFromCookie.exists(_.contains(refererFromUrl))
}
}
private def error(message: String)(requestHeader: RequestHeader): Iteratee[Array[Byte], Result] = {
val remoteAddress = requestHeader.remoteAddress
val path = requestHeader.path
val msg = s"CsrfPreventionException remote address: $remoteAddress path: $path, message: $message"
logMessage(requestHeader.cookies.trackingId(), Error, msg)
Done(Results.Forbidden)
}
}
object CsrfPreventionAction {
final val TokenName = "csrf_prevention_token"
private final val Delimiter = "-"
lazy val preventionEnabled = {
val enabled = getOptionalProperty[Boolean]("csrf.prevention").getOrElse(CommonConfig.DEFAULT_CSRF_PREVENTION)
Logger.info(s"[CSRF] is ${if (enabled) "enabled" else "disabled"}")
enabled
}
lazy val postWhitelist = getProperty[String]("csrf.post.whitelist").split(",")
private val aesEncryption = {
Logger.info("[CSRF] is using AES encryption for the prevention token")
new AesEncryption()
}
case class CsrfPreventionToken(value: String)
// TODO : Trap the missing token exception differently?
implicit def getToken(implicit request: RequestHeader,
clientSideSessionFactory: ClientSideSessionFactory): CsrfPreventionToken =
Try {
CsrfPreventionToken(
Crypto.signToken(
aesEncryption.encrypt(
buildTokenWithUri(request.cookies.trackingId(), request.uri)
)
)
)
}.getOrElse(CsrfPreventionToken(""))
private def buildTokenWithReferer(trackingId: TrackingId, requestHeaders: Headers) = {
trackingId.value + Delimiter + requestHeaders.get(REFERER).getOrElse("INVALID")
}
private def buildTokenWithUri(trackingId: TrackingId, uri: String) = {
trackingId.value + Delimiter + uri
}
private def split(token: String): (String, String) = {
(token.split(Delimiter)(0), token.drop(token.indexOf(Delimiter) + 1))
}
}
| dvla/vehicles-presentation-common | app/uk/gov/dvla/vehicles/presentation/common/filters/CsrfPreventionFilter.scala | Scala | mit | 7,811 |
/*
* Copyright 2016 Oleg Morozenkov
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ru.reo7sp.f3m.data
import java.io.{OutputStreamWriter, PrintWriter}
import android.content.Context
import org.json4s.DefaultFormats
import ru.reo7sp.f3m.image.understand.perspective.Scenery
import ru.reo7sp.f3m.math.geometry.Point
import scala.io.Source
import scala.util.control.NonFatal
class AuthDataStorage(val fileName: String = "authdata.json")(implicit ctx: Context) {
def load: Option[Scenery] = {
implicit val formats = DefaultFormats
try {
// val points = JsonMethods.parse(Source.fromInputStream(ctx.openFileInput(fileName)).mkString).children.map(_.extract[Point])
val points = Source.fromInputStream(ctx.openFileInput(fileName)).getLines().map { line =>
val parts = line.split(' ').map(_.toDouble)
Point(parts: _*)
}
Option(Scenery(points))
} catch {
case NonFatal(_) => None
}
}
def save(scenery: Scenery): Unit = {
implicit val formats = DefaultFormats
val writer = new PrintWriter(new OutputStreamWriter(ctx.openFileOutput(fileName, Context.MODE_PRIVATE)))
try {
// val json = scenery.points.map { point =>
// ("x" -> point.x) ~ ("y" -> point.y) ~ ("z" -> point.z)
// }
// writer.write(JsonMethods.compact(JsonMethods.render(json)))
scenery.points.foreach { point =>
writer.println(s"${point.x} ${point.y} ${point.z}")
}
writer.flush()
} finally {
writer.close()
}
}
}
| reo7sp/Face3dMatch | src/main/scala/ru/reo7sp/f3m/data/AuthDataStorage.scala | Scala | apache-2.0 | 2,047 |
package org.jetbrains.plugins.scala.lang.typeInference
import org.jetbrains.plugins.scala.DependencyManagerBase._
import org.jetbrains.plugins.scala.{LatestScalaVersions, ScalaVersion}
import org.jetbrains.plugins.scala.base.ScalaLightCodeInsightFixtureTestAdapter
import org.jetbrains.plugins.scala.base.libraryLoaders.{IvyManagedLoader, LibraryLoader}
class ScalaTagsImplicitConversionTest extends ScalaLightCodeInsightFixtureTestAdapter {
override protected def supportedIn(version: ScalaVersion): Boolean = version == LatestScalaVersions.Scala_2_12
override def librariesLoaders: Seq[LibraryLoader] =
super.librariesLoaders :+ IvyManagedLoader("com.lihaoyi" %% "scalatags" % "0.8.6")
def testSCL17374(): Unit = checkTextHasNoErrors(
"""
|import scalatags.Text.all._
|
|val test = div(
| div(),
| Some(div())
| )
|""".stripMargin
)
}
| JetBrains/intellij-scala | scala/scala-impl/test/org/jetbrains/plugins/scala/lang/typeInference/ScalaTagsImplicitConversionTest.scala | Scala | apache-2.0 | 903 |
package chat.tox.antox.activities
import java.util.regex.Pattern
import android.app.ActivityManager
import android.graphics.Color
import android.graphics.drawable.ColorDrawable
import android.os.{Build, Bundle}
import android.support.v7.app.AppCompatActivity
import android.text.Html
import android.text.method.LinkMovementMethod
import android.text.util.Linkify
import android.view.{MenuItem, WindowManager}
import android.widget.TextView
import chat.tox.antox.R
class ToxMeInfoActivity extends AppCompatActivity {
protected override def onCreate(savedInstanceState: Bundle) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_toxme_info)
getSupportActionBar.setHomeButtonEnabled(true)
getSupportActionBar.setDisplayHomeAsUpEnabled(true)
getWindow.addFlags(WindowManager.LayoutParams.FLAG_DRAWS_SYSTEM_BAR_BACKGROUNDS)
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
getWindow.setStatusBarColor(Color.parseColor("#202020"))
}
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB && getSupportActionBar != null) {
val info = new ActivityManager.RunningTaskInfo()
getSupportActionBar.setBackgroundDrawable(new ColorDrawable(Color.parseColor("#24221f")))
}
val toxMeWebsite = findViewById(R.id.toxme_info_website).asInstanceOf[TextView]
toxMeWebsite.setMovementMethod(LinkMovementMethod.getInstance)
toxMeWebsite.setText(Html.fromHtml(getResources.getString(R.string.toxme_website)))
val sourceURLTextView = findViewById(R.id.toxme_source).asInstanceOf[TextView]
val pattern = Pattern.compile("https://github.com/LittleVulpix/toxme")
Linkify.addLinks(sourceURLTextView, pattern, "")
}
override def onOptionsItemSelected(item: MenuItem): Boolean = {
item.getItemId match {
case android.R.id.home =>
finish()
true
case _ =>
super.onOptionsItemSelected(item)
}
}
}
| subliun/Antox | app/src/main/scala/chat/tox/antox/activities/ToxMeInfoActivity.scala | Scala | gpl-3.0 | 1,938 |
package org.workcraft.pluginmanager
import java.util.UUID
import java.io.PrintWriter
import java.io.BufferedWriter
import java.io.OutputStreamWriter
import java.io.FileOutputStream
import java.io.File
import java.io.BufferedReader
import java.io.InputStreamReader
import java.io.FileInputStream
import java.nio.charset.Charset
import scala.collection.mutable.ListBuffer
sealed trait ManifestReadError
object ManifestReadError {
case class Empty() extends ManifestReadError
case class VersionMismatch() extends ManifestReadError
case class Exception(e: Throwable) extends ManifestReadError
}
object PluginManifest {
def write(version: UUID, path: String, plugins: Traversable[String]) : Option[Throwable] = {
try {
val writer = new PrintWriter(path, "UTF-8")
writer.println(version.toString())
plugins.foreach(writer.println(_))
writer.close()
None
} catch {
case e => Some (e)
}
}
def read(version: UUID, path: String): Either[ManifestReadError, List[String]] = {
def readWith(reader: BufferedReader): Either[ManifestReadError, List[String]] = {
def readList: List[String] = {
val buffer = ListBuffer[String]()
while (true) {
val s = reader.readLine()
if (s == null) return buffer.toList
buffer += s
}
buffer.toList
}
try {
val manifestString = reader.readLine()
if (manifestString == null)
Left(ManifestReadError.Empty())
else if (!UUID.fromString(manifestString).equals(version))
Left(ManifestReadError.VersionMismatch())
else Right(readList)
} catch {
case e => Left(ManifestReadError.Exception(e))
}
}
try {
val reader = new BufferedReader(new InputStreamReader(new FileInputStream(new File(path)), Charset.forName("UTF-8")))
val result = readWith(reader)
reader.close()
result
} catch {
case e => Left(ManifestReadError.Exception(e))
}
}
}
| tuura/workcraft-2.2 | PluginManager/src/main/scala/org/workcraft/pluginmanager/PluginManifest.scala | Scala | gpl-3.0 | 2,095 |
/*
Copyright 2015 Jo Pol
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see http://www.gnu.org/licenses/gpl.html dibl
*/
package dibl.fte
import java.lang.Math.{log, max, min}
import scala.scalajs.js.{Array, Dictionary}
import scala.scalajs.js.annotation.{JSExport, JSExportTopLevel}
@JSExportTopLevel("SvgPricking") object SvgPricking {
// TODO make implicit like scale
private val offsetX = 300
private val offsetY = 250
@JSExport
def create(deltas: Map[TopoLink, Delta]): String = {
println(s"SvgPricking.create deltas=${deltas.mkString("; ")}")
val startId = deltas.keys.head.sourceId
val nodes = Locations.create(Map(startId -> (0, 0)), deltas)
val tileVectors = TileVector(startId, deltas).toSeq
val minLength = deltas.values.map {case Delta(dx,dy) =>
Math.sqrt(dx*dx + dy*dy)
}.min
implicit val scale: Double = 30 / minLength
val tile = deltas.map { case (tl @ TopoLink(_, s, _, t, weight), Delta(dx, dy)) =>
val (x1, y1) = nodes(s)
val w = min(5,max(2, 2 + log(3*weight)))
val l = line(x1, y1, x1 - dx, y1 - dy, s"""id="$s-$t" style="stroke:rgb(0,0,0);stroke-width:$w" """)
.replace("/>", s"><title>$tl</title></line>")
s"""<a href="#" onclick="clickedLink(this)">$l</a>"""
}
val dots = nodes.map{ case (id,(x,y)) =>
val c = s"""<circle id="$id" cx="${ scale * x + offsetX}" cy="${ scale * y + offsetY}" r="8" style="fill:rgb(225,0,0);opacity:0.65"><title>$id</title></circle>"""
s"""<a href="#" onclick="clickedDot(this)">$c</a>"""
}
val clones = if (tileVectors.isEmpty) Seq("")
else {
val vectorLines = tileVectors.map { case (dx, dy) =>
line(0, 0, dx, dy, """style="stroke:rgb(0,255,0);stroke-width:3" """)
}
val (dx1, dy1) = tileVectors.head
val (dx2, dy2) = tileVectors.tail.headOption.getOrElse((-dy1 * 4, dx1 * 4))
val clones = for {
i <- -3 to 6
j <- -3 to 6
} yield {
if (i == 0 && j == 0) ""
else s"""<use transform="translate(${ i * dx1 * scale + j * dx2 * scale },${ i * dy1 * scale + j * dy2 * scale })" xlink:href="#tile" style="opacity:0.65"/>"""
}
vectorLines ++ clones
}
s"""<svg
| xmlns="http://www.w3.org/2000/svg"
| xmlns:xlink="http://www.w3.org/1999/xlink"
| id="svg2" version="1.1"
| width="${ 5 * scale }" height="${ 5 * scale }"
|>
|<g id="tile">
|${ tile.mkString("\\n") }
|${ dots.mkString("\\n") }
|</g>
|${ clones.mkString("\\n") }
|</svg>
|""".stripMargin
}
private def line(x1: Double, y1: Double, x2: Double, y2: Double, attrs: String)(implicit scale: Double): String = {
s"""<line x1="${ scale * x1 + offsetX }" y1="${ scale * y1 + offsetY }" x2="${ scale * x2 + offsetX }" y2="${ scale * y2 + offsetY }" $attrs/>"""
}
}
| jo-pol/GroundForge | src/main/scala/dibl/fte/SvgPricking.scala | Scala | gpl-3.0 | 3,606 |
package org.scala_libs.scuartz
import java.util.Date
import org.quartz.{Job,JobDetail,JobExecutionContext,SimpleTrigger}
import org.quartz.impl.StdSchedulerFactory
import org.specs._
import org.specs.specification._
import Scuartz._
class ScuartzSpecs extends Specification with DetailedFailures {
class TestJob extends Job {
def execute(ctxt : JobExecutionContext) {
println("test")
}
}
"Scuartz" should {
val sched = StdSchedulerFactory.getDefaultScheduler
val testJob = new JobDetail("test", classOf[TestJob])
sched.addJob(testJob, true)
"implicitly convert to RichScheduler as needed" in {
val trigger = new SimpleTrigger("test", "test", new Date)
trigger.setJobName("test")
val ret = sched.schedule(trigger)
ret must haveClass[RichScheduler]
}
"schedule a simple timed job" in {
(sched.schedule { "test" at (new Date) }).isExpectation
}
"schedule a complex timed job" in {
val now = System.currentTimeMillis
(sched.schedule { "test" named "test2" at (now + 5000l) every 1000l until (new Date(now + 10000l)) }).isExpectation
}
"schedule a job from a function" in {
(sched.schedule {(() ⇒ { println("Tick!") }) as "ticker" every 1000l }).isExpectation
}
"schedule a closure properly" in {
sched.start()
// Let's actually do something in this spec
var counter = 0
val incrementer = () => {
counter += 1
println("Counter = " + counter)
}
sched.schedule { incrementer as "incrementer" after 1000l every 100l repeat 5 }
Thread.sleep(3000l)
sched.shutdown()
counter must_== 5
}
}
}
| dchenbecker/Scuartz | src/test/scala/org/scala_libs/scuartz/ScuartzSpecs.scala | Scala | apache-2.0 | 1,714 |
package model
/**
* User: mtrupkin
* Date: 1/3/14
*/
class Weapon(val name: String) {
var target: Entity = Entity.None
}
| mtrupkin/brace-for-impact | game-app/src/main/scala/model/Weapon.scala | Scala | mit | 127 |
trait Is[A]
case object IsInt extends Is[Int]
case object IsString extends Is[String]
case class C[A](is: Is[A], value: A)
@main
def Test = {
val c_string: C[String] = C(IsString, "name")
val c_any: C[_] = c_string
val any: Any = c_string
// Case 1: error, tested in tests/neg/i5077.scala
// c_string match {
// case C(IsInt, _) => println(s"An Int") // Can't possibly happen!
// case C(IsString, s) => println(s"A String with length ${s.length}")
// case _ => println("No match")
// }
// Case 2: Should match the second case and print the length of the string
c_any match {
case C(IsInt, i) if i < 10 => println(s"An Int less than 10")
case C(IsString, s) => println(s"A String with length ${s.length}")
case _ => println("No match")
}
// Case 3: Same as above; should match the second case and print the length of the string
any match {
case C(IsInt, i) if i < 10 => println(s"An Int less than 10")
case C(IsString, s) => println(s"A String with length ${s.length}")
case _ => println("No match")
}
} | lampepfl/dotty | tests/run/i5077.scala | Scala | apache-2.0 | 1,066 |
import java.io.{OutputStreamWriter, PrintWriter, StreamTokenizer, BufferedReader, InputStreamReader}
object Solution {
def main(args: Array[String]) {
val reader: BufferedReader = new BufferedReader(new InputStreamReader(System.in))
val tokenizer: StreamTokenizer = new StreamTokenizer(reader)
val out: PrintWriter = new PrintWriter(new OutputStreamWriter(System.out))
tokenizer.nextToken()
val n = tokenizer.nval.toInt
tokenizer.nextToken()
val k = tokenizer.nval.toInt
val a = new Array[Int](n * n)
val b = new Array[Boolean](n)
var i = 0
var j = 0
while (i < k) {
tokenizer.nextToken()
b(tokenizer.nval.toInt - 1) = true
i += 1
}
i = 0
while (i < n) {
j = 0
while (j < n) {
tokenizer.nextToken()
a(i * n + j) = tokenizer.nval.toInt
j += 1
}
i += 1
}
var result = 0
var q = 0
val size = n - k
var v = 0
while (q < size) {
var mi = -1
var mv = Int.MaxValue
i = 0
while (i < n) {
if (b(i)) {
j = 0
while (j < n) {
if (!b(j)) {
v = a(i * n + j)
if (mv > v) {
mi = j
mv = v
}
}
j += 1
}
}
i += 1
}
result += mv
b(mi) = true
q += 1
}
out.println(result)
out.flush()
}
} | FireFry/online-judge-solutions | acm.timus.ru/1982.scala | Scala | gpl-2.0 | 1,445 |
package pl.touk.nussknacker.ui.api.helpers
import io.circe.{Encoder, Json}
import pl.touk.nussknacker.engine.api.deployment.ProcessActionType.{Deploy, ProcessActionType}
import pl.touk.nussknacker.engine.api.process.{ProcessId, VersionId}
import pl.touk.nussknacker.engine.api.{FragmentSpecificData, RequestResponseMetaData, StreamMetaData}
import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess
import pl.touk.nussknacker.engine.graph.EspProcess
import pl.touk.nussknacker.engine.graph.node.SubprocessInputDefinition.{SubprocessClazzRef, SubprocessParameter}
import pl.touk.nussknacker.engine.graph.node.{NodeData, SubprocessInputDefinition}
import pl.touk.nussknacker.engine.marshall.ProcessMarshaller
import pl.touk.nussknacker.restmodel.displayedgraph.{DisplayableProcess, ProcessProperties, ValidatedDisplayableProcess}
import pl.touk.nussknacker.restmodel.process.ProcessingType
import pl.touk.nussknacker.restmodel.processdetails._
import pl.touk.nussknacker.ui.api.helpers.TestProcessingTypes.{Fraud, RequestResponse, Streaming}
import pl.touk.nussknacker.ui.process.ProcessCategoryService.Category
import pl.touk.nussknacker.ui.process.marshall.ProcessConverter
import java.time.LocalDateTime
import scala.util.Random
object TestProcessUtil {
type ProcessWithJson = BaseProcessDetails[DisplayableProcess]
private val randomGenerator = new Random()
def toDisplayable(espProcess: EspProcess, processingType: ProcessingType = TestProcessingTypes.Streaming): DisplayableProcess =
ProcessConverter.toDisplayable(espProcess.toCanonicalProcess, processingType)
def toJson(espProcess: EspProcess, processingType: ProcessingType = TestProcessingTypes.Streaming): Json =
Encoder[DisplayableProcess].apply(toDisplayable(espProcess, processingType))
def createBasicProcess(name: String, category: Category, isArchived: Boolean = false, processingType: String = Streaming, lastAction: Option[ProcessActionType] = None, json: Option[DisplayableProcess] = None): BaseProcessDetails[DisplayableProcess] =
toDetails(name, category, isSubprocess = false, isArchived, processingType, json = json, lastAction = lastAction)
def createSubProcess(name: String, category: Category, isArchived: Boolean = false, processingType: String = Streaming, json: Option[DisplayableProcess] = None, lastAction: Option[ProcessActionType] = None): BaseProcessDetails[DisplayableProcess] =
toDetails(name, category, isSubprocess = true, isArchived, processingType, lastAction = lastAction, json = Some(json.getOrElse(createDisplayableSubprocess(name, processingType))))
def displayableToProcess(displayable: DisplayableProcess, category: Category = TestCategories.Category1, isArchived: Boolean = false) : ProcessDetails =
toDetails(displayable.id, category, isArchived = isArchived, processingType = displayable.processingType, json = Some(displayable))
def validatedToProcess(displayable: ValidatedDisplayableProcess) : ValidatedProcessDetails =
toDetails(displayable.id, processingType = displayable.processingType).copy(json = displayable)
def toDetails(name: String, category: Category = TestCategories.Category1, isSubprocess: Boolean = false, isArchived: Boolean = false,
processingType: ProcessingType = Streaming, json: Option[DisplayableProcess] = None, lastAction: Option[ProcessActionType] = None,
description: Option[String] = None, history: Option[List[ProcessVersion]] = None) : ProcessDetails = {
val jsonData = json.map(_.copy(id = name, processingType = processingType)).getOrElse(createEmptyJson(name, processingType))
BaseProcessDetails[DisplayableProcess](
id = name,
name = name,
processId = ProcessId(generateId()),
processVersionId = VersionId.initialVersionId,
isLatestVersion = true,
description = description,
isArchived = isArchived,
isSubprocess = isSubprocess,
processingType = processingType,
processCategory = category,
modificationDate = LocalDateTime.now(),
modifiedAt = LocalDateTime.now(),
modifiedBy = "user1",
createdAt = LocalDateTime.now(),
createdBy = "user1",
tags = List(),
lastAction = lastAction.map(createProcessAction),
lastDeployedAction = lastAction.collect {
case Deploy => createProcessAction(Deploy)
},
json = jsonData,
history = history.getOrElse(Nil),
modelVersion = None
)
}
private def createEmptyJson(id: String, processingType: ProcessingType = Streaming) = {
val typeSpecificProperties = processingType match {
case RequestResponse => RequestResponseMetaData(None)
case Streaming | Fraud => StreamMetaData()
case _ => throw new IllegalArgumentException(s"Unknown processing type: $processingType.")
}
DisplayableProcess(id, ProcessProperties(typeSpecificProperties), Nil, Nil, processingType)
}
def createDisplayableSubprocess(name: String, processingType: ProcessingType): DisplayableProcess =
createDisplayableSubprocess(name, List(SubprocessInputDefinition("input", List(SubprocessParameter("in", SubprocessClazzRef[String])))), processingType)
def createDisplayableSubprocess(name: String, nodes: List[NodeData], processingType: ProcessingType): DisplayableProcess =
DisplayableProcess(name, ProcessProperties(FragmentSpecificData()), nodes, Nil, processingType)
def createProcessAction(action: ProcessActionType): ProcessAction = ProcessAction(
processVersionId = VersionId(generateId()),
performedAt = LocalDateTime.now(),
user = "user",
action = action,
commentId = None,
comment = None,
buildInfo = Map.empty
)
def createEmptyStreamingGraph(id: String): CanonicalProcess = ProcessMarshaller.fromJsonUnsafe(
s"""
|{
| "metaData" : {
| "id" : "$id",
| "typeSpecificData" : {
| "type" : "StreamMetaData"
| }
| },
| "nodes" : []
|}
|""".stripMargin
)
private def generateId() = Math.abs(randomGenerator.nextLong())
}
| TouK/nussknacker | ui/server/src/test/scala/pl/touk/nussknacker/ui/api/helpers/TestProcessUtil.scala | Scala | apache-2.0 | 6,085 |
package recfun
import common._
object Main {
def main(args: Array[String]) {
println("Pascal's Triangle")
for (row <- 0 to 10) {
for (col <- 0 to row)
print(pascal(col, row) + " ")
println()
}
}
/**
* Exercise 1
*/
def pascal(c: Int, r: Int): Int = if (c == 0 || r == 0 || c == r) 1 else pascal(c - 1, r - 1) + pascal(c, r - 1)
/**
* Exercise 2
*/
def balance(chars: List[Char]): Boolean = {
def iter(chars: List[Char], parens: List[Char]): Boolean =
if (chars.isEmpty) parens.isEmpty
else if (chars.head != '(' && chars.head != ')') iter(chars.tail, parens)
else if (parens.isEmpty) chars.head != ')' && iter(chars.tail, chars.head :: parens)
else if (chars.head == parens.head) iter(chars.tail, chars.head :: parens)
else iter(chars.tail, parens.tail)
iter(chars, Nil)
}
/**
* Exercise 3
*/
def countChange(money: Int, coins: List[Int]): Int =
if (money == 0) 1
else if (coins.isEmpty) 0
else if (money < coins.head) countChange(money, coins.tail)
else countChange(money - coins.head, coins) + countChange(money, coins.tail)
}
| hsinhuang/codebase | ProgFun-005/recfun/src/main/scala/recfun/Main.scala | Scala | gpl-2.0 | 1,155 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.streaming
import java.io.File
import java.nio.ByteBuffer
import scala.collection.mutable.ArrayBuffer
import scala.concurrent.duration._
import scala.language.postfixOps
import scala.reflect.ClassTag
import org.apache.hadoop.conf.Configuration
import org.scalatest.{BeforeAndAfter, Matchers}
import org.scalatest.concurrent.Eventually._
import org.apache.spark._
import org.apache.spark.broadcast.BroadcastManager
import org.apache.spark.internal.Logging
import org.apache.spark.internal.config._
import org.apache.spark.memory.StaticMemoryManager
import org.apache.spark.network.netty.NettyBlockTransferService
import org.apache.spark.rpc.RpcEnv
import org.apache.spark.scheduler.LiveListenerBus
import org.apache.spark.security.CryptoStreamUtils
import org.apache.spark.serializer.{KryoSerializer, SerializerManager}
import org.apache.spark.shuffle.sort.SortShuffleManager
import org.apache.spark.storage._
import org.apache.spark.streaming.receiver._
import org.apache.spark.streaming.util._
import org.apache.spark.util.{ManualClock, Utils}
import org.apache.spark.util.io.ChunkedByteBuffer
abstract class BaseReceivedBlockHandlerSuite(enableEncryption: Boolean)
extends SparkFunSuite
with BeforeAndAfter
with Matchers
with Logging {
import WriteAheadLogBasedBlockHandler._
import WriteAheadLogSuite._
val conf = new SparkConf()
.set("spark.streaming.receiver.writeAheadLog.rollingIntervalSecs", "1")
.set("spark.app.id", "streaming-test")
.set(IO_ENCRYPTION_ENABLED, enableEncryption)
val encryptionKey =
if (enableEncryption) {
Some(CryptoStreamUtils.createKey(conf))
} else {
None
}
val hadoopConf = new Configuration()
val streamId = 1
val securityMgr = new SecurityManager(conf, encryptionKey)
val broadcastManager = new BroadcastManager(true, conf, securityMgr)
val mapOutputTracker = new MapOutputTrackerMaster(conf, broadcastManager, true)
val shuffleManager = new SortShuffleManager(conf)
val serializer = new KryoSerializer(conf)
var serializerManager = new SerializerManager(serializer, conf, encryptionKey)
val manualClock = new ManualClock
val blockManagerSize = 10000000
val blockManagerBuffer = new ArrayBuffer[BlockManager]()
var rpcEnv: RpcEnv = null
var blockManagerMaster: BlockManagerMaster = null
var blockManager: BlockManager = null
var storageLevel: StorageLevel = null
var tempDirectory: File = null
before {
rpcEnv = RpcEnv.create("test", "localhost", 0, conf, securityMgr)
conf.set("spark.driver.port", rpcEnv.address.port.toString)
blockManagerMaster = new BlockManagerMaster(rpcEnv.setupEndpoint("blockmanager",
new BlockManagerMasterEndpoint(rpcEnv, true, conf,
new LiveListenerBus(conf))), conf, true)
storageLevel = StorageLevel.MEMORY_ONLY_SER
blockManager = createBlockManager(blockManagerSize, conf)
tempDirectory = Utils.createTempDir()
manualClock.setTime(0)
}
after {
for ( blockManager <- blockManagerBuffer ) {
if (blockManager != null) {
blockManager.stop()
}
}
blockManager = null
blockManagerBuffer.clear()
if (blockManagerMaster != null) {
blockManagerMaster.stop()
blockManagerMaster = null
}
rpcEnv.shutdown()
rpcEnv.awaitTermination()
rpcEnv = null
Utils.deleteRecursively(tempDirectory)
}
test("BlockManagerBasedBlockHandler - store blocks") {
withBlockManagerBasedBlockHandler { handler =>
testBlockStoring(handler) { case (data, blockIds, storeResults) =>
// Verify the data in block manager is correct
val storedData = blockIds.flatMap { blockId =>
blockManager
.getLocalValues(blockId)
.map(_.data.map(_.toString).toList)
.getOrElse(List.empty)
}.toList
storedData shouldEqual data
// Verify that the store results are instances of BlockManagerBasedStoreResult
assert(
storeResults.forall { _.isInstanceOf[BlockManagerBasedStoreResult] },
"Unexpected store result type"
)
}
}
}
test("BlockManagerBasedBlockHandler - handle errors in storing block") {
withBlockManagerBasedBlockHandler { handler =>
testErrorHandling(handler)
}
}
test("WriteAheadLogBasedBlockHandler - store blocks") {
withWriteAheadLogBasedBlockHandler { handler =>
testBlockStoring(handler) { case (data, blockIds, storeResults) =>
// Verify the data in block manager is correct
val storedData = blockIds.flatMap { blockId =>
blockManager
.getLocalValues(blockId)
.map(_.data.map(_.toString).toList)
.getOrElse(List.empty)
}.toList
storedData shouldEqual data
// Verify that the store results are instances of WriteAheadLogBasedStoreResult
assert(
storeResults.forall { _.isInstanceOf[WriteAheadLogBasedStoreResult] },
"Unexpected store result type"
)
// Verify the data in write ahead log files is correct
val walSegments = storeResults.map { result =>
result.asInstanceOf[WriteAheadLogBasedStoreResult].walRecordHandle
}
val loggedData = walSegments.flatMap { walSegment =>
val fileSegment = walSegment.asInstanceOf[FileBasedWriteAheadLogSegment]
val reader = new FileBasedWriteAheadLogRandomReader(fileSegment.path, hadoopConf)
val bytes = reader.read(fileSegment)
reader.close()
serializerManager.dataDeserializeStream(
generateBlockId(),
new ChunkedByteBuffer(bytes).toInputStream())(ClassTag.Any).toList
}
loggedData shouldEqual data
}
}
}
test("WriteAheadLogBasedBlockHandler - handle errors in storing block") {
withWriteAheadLogBasedBlockHandler { handler =>
testErrorHandling(handler)
}
}
test("WriteAheadLogBasedBlockHandler - clean old blocks") {
withWriteAheadLogBasedBlockHandler { handler =>
val blocks = Seq.tabulate(10) { i => IteratorBlock(Iterator(1 to i)) }
storeBlocks(handler, blocks)
val preCleanupLogFiles = getWriteAheadLogFiles()
require(preCleanupLogFiles.size > 1)
// this depends on the number of blocks inserted using generateAndStoreData()
manualClock.getTimeMillis() shouldEqual 5000L
val cleanupThreshTime = 3000L
handler.cleanupOldBlocks(cleanupThreshTime)
eventually(timeout(10000 millis), interval(10 millis)) {
getWriteAheadLogFiles().size should be < preCleanupLogFiles.size
}
}
}
test("Test Block - count messages") {
// Test count with BlockManagedBasedBlockHandler
testCountWithBlockManagerBasedBlockHandler(true)
// Test count with WriteAheadLogBasedBlockHandler
testCountWithBlockManagerBasedBlockHandler(false)
}
test("Test Block - isFullyConsumed") {
val sparkConf = new SparkConf().set("spark.app.id", "streaming-test")
sparkConf.set("spark.storage.unrollMemoryThreshold", "512")
// spark.storage.unrollFraction set to 0.4 for BlockManager
sparkConf.set("spark.storage.unrollFraction", "0.4")
sparkConf.set(IO_ENCRYPTION_ENABLED, enableEncryption)
// Block Manager with 12000 * 0.4 = 4800 bytes of free space for unroll
blockManager = createBlockManager(12000, sparkConf)
// there is not enough space to store this block in MEMORY,
// But BlockManager will be able to serialize this block to WAL
// and hence count returns correct value.
testRecordcount(false, StorageLevel.MEMORY_ONLY,
IteratorBlock((List.fill(70)(new Array[Byte](100))).iterator), blockManager, Some(70))
// there is not enough space to store this block in MEMORY,
// But BlockManager will be able to serialize this block to DISK
// and hence count returns correct value.
testRecordcount(true, StorageLevel.MEMORY_AND_DISK,
IteratorBlock((List.fill(70)(new Array[Byte](100))).iterator), blockManager, Some(70))
// there is not enough space to store this block With MEMORY_ONLY StorageLevel.
// BlockManager will not be able to unroll this block
// and hence it will not tryToPut this block, resulting the SparkException
storageLevel = StorageLevel.MEMORY_ONLY
withBlockManagerBasedBlockHandler { handler =>
val thrown = intercept[SparkException] {
storeSingleBlock(handler, IteratorBlock((List.fill(70)(new Array[Byte](100))).iterator))
}
}
}
private def testCountWithBlockManagerBasedBlockHandler(isBlockManagerBasedBlockHandler: Boolean) {
// ByteBufferBlock-MEMORY_ONLY
testRecordcount(isBlockManagerBasedBlockHandler, StorageLevel.MEMORY_ONLY,
ByteBufferBlock(ByteBuffer.wrap(Array.tabulate(100)(i => i.toByte))), blockManager, None)
// ByteBufferBlock-MEMORY_ONLY_SER
testRecordcount(isBlockManagerBasedBlockHandler, StorageLevel.MEMORY_ONLY_SER,
ByteBufferBlock(ByteBuffer.wrap(Array.tabulate(100)(i => i.toByte))), blockManager, None)
// ArrayBufferBlock-MEMORY_ONLY
testRecordcount(isBlockManagerBasedBlockHandler, StorageLevel.MEMORY_ONLY,
ArrayBufferBlock(ArrayBuffer.fill(25)(0)), blockManager, Some(25))
// ArrayBufferBlock-MEMORY_ONLY_SER
testRecordcount(isBlockManagerBasedBlockHandler, StorageLevel.MEMORY_ONLY_SER,
ArrayBufferBlock(ArrayBuffer.fill(25)(0)), blockManager, Some(25))
// ArrayBufferBlock-DISK_ONLY
testRecordcount(isBlockManagerBasedBlockHandler, StorageLevel.DISK_ONLY,
ArrayBufferBlock(ArrayBuffer.fill(50)(0)), blockManager, Some(50))
// ArrayBufferBlock-MEMORY_AND_DISK
testRecordcount(isBlockManagerBasedBlockHandler, StorageLevel.MEMORY_AND_DISK,
ArrayBufferBlock(ArrayBuffer.fill(75)(0)), blockManager, Some(75))
// IteratorBlock-MEMORY_ONLY
testRecordcount(isBlockManagerBasedBlockHandler, StorageLevel.MEMORY_ONLY,
IteratorBlock((ArrayBuffer.fill(100)(0)).iterator), blockManager, Some(100))
// IteratorBlock-MEMORY_ONLY_SER
testRecordcount(isBlockManagerBasedBlockHandler, StorageLevel.MEMORY_ONLY_SER,
IteratorBlock((ArrayBuffer.fill(100)(0)).iterator), blockManager, Some(100))
// IteratorBlock-DISK_ONLY
testRecordcount(isBlockManagerBasedBlockHandler, StorageLevel.DISK_ONLY,
IteratorBlock((ArrayBuffer.fill(125)(0)).iterator), blockManager, Some(125))
// IteratorBlock-MEMORY_AND_DISK
testRecordcount(isBlockManagerBasedBlockHandler, StorageLevel.MEMORY_AND_DISK,
IteratorBlock((ArrayBuffer.fill(150)(0)).iterator), blockManager, Some(150))
}
private def createBlockManager(
maxMem: Long,
conf: SparkConf,
name: String = SparkContext.DRIVER_IDENTIFIER): BlockManager = {
val memManager = new StaticMemoryManager(conf, Long.MaxValue, maxMem, numCores = 1)
val transfer = new NettyBlockTransferService(conf, securityMgr, "localhost", "localhost", 0, 1)
val blockManager = new BlockManager(name, rpcEnv, blockManagerMaster, serializerManager, conf,
memManager, mapOutputTracker, shuffleManager, transfer, securityMgr, 0)
memManager.setMemoryStore(blockManager.memoryStore)
blockManager.initialize("app-id")
blockManagerBuffer += blockManager
blockManager
}
/**
* Test storing of data using different types of Handler, StorageLevel and ReceivedBlocks
* and verify the correct record count
*/
private def testRecordcount(isBlockManagedBasedBlockHandler: Boolean,
sLevel: StorageLevel,
receivedBlock: ReceivedBlock,
bManager: BlockManager,
expectedNumRecords: Option[Long]
) {
blockManager = bManager
storageLevel = sLevel
var bId: StreamBlockId = null
try {
if (isBlockManagedBasedBlockHandler) {
// test received block with BlockManager based handler
withBlockManagerBasedBlockHandler { handler =>
val (blockId, blockStoreResult) = storeSingleBlock(handler, receivedBlock)
bId = blockId
assert(blockStoreResult.numRecords === expectedNumRecords,
"Message count not matches for a " +
receivedBlock.getClass.getName +
" being inserted using BlockManagerBasedBlockHandler with " + sLevel)
}
} else {
// test received block with WAL based handler
withWriteAheadLogBasedBlockHandler { handler =>
val (blockId, blockStoreResult) = storeSingleBlock(handler, receivedBlock)
bId = blockId
assert(blockStoreResult.numRecords === expectedNumRecords,
"Message count not matches for a " +
receivedBlock.getClass.getName +
" being inserted using WriteAheadLogBasedBlockHandler with " + sLevel)
}
}
} finally {
// Removing the Block Id to use same blockManager for next test
blockManager.removeBlock(bId, true)
}
}
/**
* Test storing of data using different forms of ReceivedBlocks and verify that they succeeded
* using the given verification function
*/
private def testBlockStoring(receivedBlockHandler: ReceivedBlockHandler)
(verifyFunc: (Seq[String], Seq[StreamBlockId], Seq[ReceivedBlockStoreResult]) => Unit) {
val data = Seq.tabulate(100) { _.toString }
def storeAndVerify(blocks: Seq[ReceivedBlock]) {
blocks should not be empty
val (blockIds, storeResults) = storeBlocks(receivedBlockHandler, blocks)
withClue(s"Testing with ${blocks.head.getClass.getSimpleName}s:") {
// Verify returns store results have correct block ids
(storeResults.map { _.blockId }) shouldEqual blockIds
// Call handler-specific verification function
verifyFunc(data, blockIds, storeResults)
}
}
def dataToByteBuffer(b: Seq[String]) =
serializerManager.dataSerialize(generateBlockId, b.iterator)
val blocks = data.grouped(10).toSeq
storeAndVerify(blocks.map { b => IteratorBlock(b.toIterator) })
storeAndVerify(blocks.map { b => ArrayBufferBlock(new ArrayBuffer ++= b) })
storeAndVerify(blocks.map { b => ByteBufferBlock(dataToByteBuffer(b).toByteBuffer) })
}
/** Test error handling when blocks that cannot be stored */
private def testErrorHandling(receivedBlockHandler: ReceivedBlockHandler) {
// Handle error in iterator (e.g. divide-by-zero error)
intercept[Exception] {
val iterator = (10 to (-10, -1)).toIterator.map { _ / 0 }
receivedBlockHandler.storeBlock(StreamBlockId(1, 1), IteratorBlock(iterator))
}
// Handler error in block manager storing (e.g. too big block)
intercept[SparkException] {
val byteBuffer = ByteBuffer.wrap(new Array[Byte](blockManagerSize + 1))
receivedBlockHandler.storeBlock(StreamBlockId(1, 1), ByteBufferBlock(byteBuffer))
}
}
/** Instantiate a BlockManagerBasedBlockHandler and run a code with it */
private def withBlockManagerBasedBlockHandler(body: BlockManagerBasedBlockHandler => Unit) {
body(new BlockManagerBasedBlockHandler(blockManager, storageLevel))
}
/** Instantiate a WriteAheadLogBasedBlockHandler and run a code with it */
private def withWriteAheadLogBasedBlockHandler(body: WriteAheadLogBasedBlockHandler => Unit) {
require(WriteAheadLogUtils.getRollingIntervalSecs(conf, isDriver = false) === 1)
val receivedBlockHandler = new WriteAheadLogBasedBlockHandler(blockManager, serializerManager,
1, storageLevel, conf, hadoopConf, tempDirectory.toString, manualClock)
try {
body(receivedBlockHandler)
} finally {
receivedBlockHandler.stop()
}
}
/** Store blocks using a handler */
private def storeBlocks(
receivedBlockHandler: ReceivedBlockHandler,
blocks: Seq[ReceivedBlock]
): (Seq[StreamBlockId], Seq[ReceivedBlockStoreResult]) = {
val blockIds = Seq.fill(blocks.size)(generateBlockId())
val storeResults = blocks.zip(blockIds).map {
case (block, id) =>
manualClock.advance(500) // log rolling interval set to 1000 ms through SparkConf
logDebug("Inserting block " + id)
receivedBlockHandler.storeBlock(id, block)
}.toList
logDebug("Done inserting")
(blockIds, storeResults)
}
/** Store single block using a handler */
private def storeSingleBlock(
handler: ReceivedBlockHandler,
block: ReceivedBlock
): (StreamBlockId, ReceivedBlockStoreResult) = {
val blockId = generateBlockId
val blockStoreResult = handler.storeBlock(blockId, block)
logDebug("Done inserting")
(blockId, blockStoreResult)
}
private def getWriteAheadLogFiles(): Seq[String] = {
getLogFilesInDirectory(checkpointDirToLogDir(tempDirectory.toString, streamId))
}
private def generateBlockId(): StreamBlockId = StreamBlockId(streamId, scala.util.Random.nextLong)
}
class ReceivedBlockHandlerSuite extends BaseReceivedBlockHandlerSuite(false)
class ReceivedBlockHandlerWithEncryptionSuite extends BaseReceivedBlockHandlerSuite(true)
| cin/spark | streaming/src/test/scala/org/apache/spark/streaming/ReceivedBlockHandlerSuite.scala | Scala | apache-2.0 | 17,782 |
/*
* Util.scala
* Utility functions for atomic continuous elements.
*
* Created By: Avi Pfeffer ([email protected])
* Creation Date: Feb 25, 2011
*
* Copyright 2013 Avrom J. Pfeffer and Charles River Analytics, Inc.
* See http://www.cra.com or email [email protected] for information.
*
* See http://www.github.com/p2t2/figaro for a copy of the software license.
*/
package com.cra.figaro.library.atomic.continuous
import com.cra.figaro.util.random
import annotation.tailrec
import scala.math._
object Util {
/**
* Generate an exponentially distributed random variable.
*/
def generateExponential(lambda: Double): Double = -log(random.nextDouble()) / lambda
/**
* Generate a Gamma distributed random variable.
* Best's rejection algorithm XGB from Luc Devroye, Non-Uniform Random Variate Generation, p. 410.
*/
def generateGamma(k: Double) = {
val b = k - 1
val c = 3 * k - 0.75
@tailrec
def generateGreaterThanOne(): Double = {
val u = random.nextDouble()
val v = random.nextDouble()
val w = u * (1 - u)
val y = sqrt(c / w) * (u - 0.5)
val x = b + y
val accept =
if (x >= 0) {
val z = 64 * w * w * w * v * v
(z <= 1 - 2 * y * y / x) || (log(z) <= 2 * (b * log(x / b) - y))
} else false
if (accept) x; else generateGreaterThanOne()
}
// See Wikipedia, Gamma distribution
@tailrec
def generateLessThanOne(): Double = {
val v0 = random.nextDouble()
val v1 = random.nextDouble()
val v2 = random.nextDouble()
val (epsilon, eta) =
if (v2 <= E / (E + k)) {
val epsilon = pow(v1, 1 / k)
val eta = v0 * pow(epsilon, k - 1)
(epsilon, eta)
} else {
val epsilon = 1 - log(v1)
val eta = v0 * exp(-epsilon)
(epsilon, eta)
}
if (eta <= pow(epsilon, k - 1) * exp(-epsilon)) epsilon
else generateLessThanOne()
}
if (k > 1.0) generateGreaterThanOne()
else if (k < 1.0) generateLessThanOne()
else generateExponential(1.0)
}
/**
* Generate a Beta distributed random variable.
* See Devroye, Non Uniform Random Variate Generation, p. 432
*/
def generateBeta(a: Double, b: Double) = {
val ga = generateGamma(a)
val gb = generateGamma(b)
ga / (ga + gb)
}
}
| jyuhuan/figaro | Figaro/src/main/scala/com/cra/figaro/library/atomic/continuous/Util.scala | Scala | bsd-3-clause | 2,363 |
package com.imageprocessing.core
import akka.actor.{ ActorRef, Actor }
import akka.actor.SupervisorStrategy.Escalate
import com.imageprocessing._
import scala.Some
import akka.actor.OneForOneStrategy
import com.sksamuel.scrimage
import scala.concurrent.Future
import dispatch.Defaults.executor
import dispatch._
import com.github.nscala_time.time.Imports._
import java.io.InputStream
import java.io.ByteArrayInputStream
import com.sksamuel.scrimage.Image
import com.sksamuel.scrimage.filter._
import java.awt.Color
class ProcessImageActor() extends Actor {
def receive = {
/**
* Download file and extract operations to be performed
*/
case Request(id, operations) => {
downloadFileFromURL(id).map { imageBytes =>
{
val image = Image(imageBytes)
val operationList = getOperationMap(operations.split("/").toList, Map())
self ! ProcessImage(image, operationList)
}
}
}
/**
* *
* Do Image processing
*/
case ProcessImage(image, operations) => {
operations.isEmpty match {
case true => {
context.parent ! image.write
}
case false => {
val (key, value) = operations.head
val processedImage = processImage(image, key, value)
self ! ProcessImage(processedImage, operations.tail)
}
}
}
}
/**
*
* Do Image Processing
*/
def processImage(image: Image, op: String, values: String) = Image {
val params = values match {
case _ if values.contains(",") => values.split(",")
case _ if values.contains(":") => values.split(":")
case _ => values.split(" ")
}
op.toLowerCase() match {
case "blur" => image.filter(BlurFilter)
case "border" => image.filter(BorderFilter(params(0).toInt))
case "brightness" => image.filter(BrightnessFilter(params(0).toFloat))
case "bump" => image.filter(BumpFilter)
case "chrome" => image.filter(ChromeFilter())
case "color_halftone" => image.filter(ColorHalftoneFilter())
case "contour" => image.filter(ContourFilter())
case "contrast" => image.filter(ContrastFilter(params(0).toFloat))
case "despeckle" => image.filter(DespeckleFilter)
case "diffuse" => image.filter(DiffuseFilter(params(0).toInt))
case "dither" => image.filter(DitherFilter)
case "edge" => image.filter(EdgeFilter)
case "emboss" => image.filter(EmbossFilter)
case "errordiffusion" => image.filter(ErrorDiffusionHalftoneFilter())
case "gamma" => image.filter(GammaFilter(params(0).toInt))
case "gaussian" => image.filter(GaussianBlurFilter())
case "glow" => image.filter(GlowFilter())
case "grayscale" => image.filter(GrayscaleFilter)
case "hsb" => image.filter(HSBFilter(params(0).toInt))
case "invert" => image.filter(InvertFilter)
case "lensblur" => image.filter(LensBlurFilter())
case "lensflare" => image.filter(LensFlareFilter)
case "minimum" => image.filter(MinimumFilter)
case "maximum" => image.filter(MaximumFilter)
case "motionblur" => image.filter(MotionBlurFilter(Math.PI / params(0).toInt, params(1).toInt))
case "noise" => image.filter(NoiseFilter())
case "offset" => image.filter(OffsetFilter(params(0).toInt, params(1).toInt))
case "oil" => image.filter(OilFilter())
case "pixelate" => image.filter(PixelateFilter(params(0).toInt))
case "pointillize_square" => image.filter(PointillizeFilter(PointillizeGridType.Square))
case "posterize" => image.filter(PosterizeFilter())
case "prewitt" => image.filter(PrewittFilter)
case "quantize" => image.filter(QuantizeFilter(params(0).toInt))
case "rays" => image.filter(RaysFilter(threshold = params(0).toFloat, strength = params(1).toFloat))
case "ripple" => image.filter(RippleFilter(RippleType.Sine))
case "roberts" => image.filter(RobertsFilter)
case "rylanders" => image.filter(RylandersFilter)
case "sepia" => image.filter(SepiaFilter)
case "smear_circles" => image.filter(SmearFilter(SmearType.Circles))
case "snow" => image.filter(SnowFilter)
case "sobels" => image.filter(SobelsFilter)
case "solarize" => image.filter(SolarizeFilter)
case "sparkle" => image.filter(SparkleFilter())
case "summer" => image.filter(SummerFilter())
case "swim" => image.filter(SwimFilter())
case "television" => image.filter(TelevisionFilter)
case "threshold" => image.filter(ThresholdFilter(params(0).toInt))
case "tritone" => image.filter(TritoneFilter(new Color(0xFF000044), new Color(0xFF0066FF), Color.WHITE))
case "twirl" => image.filter(TwirlFilter(params(0).toInt))
case "unsharp" => image.filter(UnsharpFilter())
case "vignette" => image.filter(VignetteFilter())
case "vintage" => image.filter(VintageFilter)
case "autocrop" => image.autocrop(java.awt.Color.getColor(params(0)))
case "trim" => image.trim(params(0).toInt, params(1).toInt, params(2).toInt, params(3).toInt)
case "fit" => image.fit(params(0).toInt, params(1).toInt)
case "bound" => image.bound(params(0).toInt, params(1).toInt)
case "cover" => image.cover(params(0).toInt, params(1).toInt)
case "pad" => params.length match {
case 1 => image.pad(params(0).toInt)
case _ => image.padTo(params(0).toInt, params(1).toInt)
}
case "rotate" => params(0).toLowerCase() match {
case "left" => image.rotateLeft
case _ => image.rotateRight
}
case "flip" => params(0).toLowerCase() match {
case "x" => image.flipX
case _ => image.flipY
}
case "crop" =>
params.length match {
case 1 => image.resize(params(0).toInt)
case _ => image.resizeTo(params(0).toInt, params(1).toInt)
}
case "resize" =>
params.length match {
case 1 => image.scale(params(0).toInt)
case _ => image.scaleTo(params(0).toInt, params(1).toInt)
}
}
}
/**
* *
* Convert REST image operations to MAP
*/
def getOperationMap(ops: List[String], map: Map[String, String]): Map[String, String] = ops match {
case Nil => map
case _ => {
val key = ops.head
val value = ops.tail.head
val rest = ops.tail.tail
key match {
case "f" => {
val filterList = value.split(":")
filterList.length match {
case 1 => getOperationMap(rest, map + (filterList(0) -> "0"))
case _ => getOperationMap(rest, map + (filterList(0) -> filterList(1)))
}
}
case _ => getOperationMap(rest, map + (key -> value))
}
}
}
/**
* Download file from URL
*/
def downloadFileFromURL(id: String): Future[Array[Byte]] = {
//val folderName = s"${DateTime.now.year.get}0${DateTime.now.month.get}${DateTime.now.day.get}"
val req = url(s"http://farm8.staticflickr.com/7451/11994271374_8bd853ef41_h.jpg").GET
Http(req OK as.Bytes)
}
override val supervisorStrategy =
OneForOneStrategy() {
case _ => Escalate
}
} | karthik20522/SprayImageProcessing | src/main/scala/com/imageprocessing/core/ProcessImageActor.scala | Scala | mit | 7,157 |
package com.bio4j.release.generic
import com.bio4j.data._
import com.bio4j.model._
import com.bio4j.angulillos._
import scala.compat.java8.OptionConverters._
import bio4j.data.uniprot._
// from keywords-all.tsv
case class KeywordRow(val id: String, val description: String, val category: String)
case class ImportUniProt[V,E](val graph: UniProtGraph[V,E]) {
type G = UniProtGraph[V,E]
def g: G = graph
/* This class represents pairs of entries and the corresponding canonical protein */
case class EntryProtein(val entry: AnyEntry, val protein: G#Protein)
/*
This method imports the entry canonical protein *and* all isoforms, adding *isoforms* edges between them. All properties of the canonical protein are set, while for isoforms *sequences* are missing: they are imported from a separate fasta file.
The return value corresponds to `(e, entryProtein, isoforms)`.
*/
def allProteins(e: AnyEntry): (EntryProtein, Seq[G#Protein]) = {
val entryProteinAccession =
e.accessionNumbers.primary
val isoformComments =
e.comments collect { case i: Isoform => i }
val entryProteinID =
isoformComments.filter(_.isEntry).headOption.fold(entryProteinAccession)(_.id)
val isoforms =
isoformComments filterNot { _.isEntry }
// either there's a recommended name or a submitted name
val entryProteinFullName =
e.description.recommendedName
.fold(e.description.submittedNames.head.full)(_.full)
val dataset =
conversions.statusToDatasets( e.identification.status )
val existence =
conversions.proteinExistenceToExistenceEvidence( e.proteinExistence )
/* All protein properties are set at this point: */
val entryProtein =
g.protein.addVertex
.set(g.protein.id, entryProteinID)
.set(g.protein.accession, entryProteinAccession)
.set(g.protein.fullName, entryProteinFullName)
.set(g.protein.dataset, dataset)
.set(g.protein.sequence, e.sequence.value)
.set(g.protein.sequenceLength, e.sequenceHeader.length: Integer )
.set(g.protein.mass, e.sequenceHeader.molecularWeight: Integer)
// only newly imported isoform vertices are here
val isoformVertices =
isoforms collect {
scala.Function.unlift { isoform =>
g.protein.id.index.find(isoform.id).asScala
.fold[Option[G#Protein]]({
// need to add the new isoform
val isoformV =
g.protein.addVertex
.set(g.protein.id, isoform.id)
.set(g.protein.fullName, s"${e.description.recommendedName.fold(e.description.submittedNames.head.full)(_.full)} ${isoform.name}")
val edge = g.isoforms.addEdge(entryProtein, isoformV)
Some(isoformV)
})(
// already there; add an edge from the current entry protein
isoformV => { g.isoforms.addEdge(entryProtein, isoformV); None }
)
}
}
(EntryProtein(e, entryProtein), isoformVertices)
}
def geneNames(entryProtein: EntryProtein): (EntryProtein, Seq[G#GeneName]) = {
val geneNames: Seq[String] =
validGeneNames(entryProtein.entry.geneNames)
val newGeneNames = geneNames collect {
scala.Function.unlift { name =>
val present =
g.geneName.name.index.find(name)
.asScala
present.fold[Option[G#GeneName]]({
val newGeneName =
g.geneName.addVertex
.set(g.geneName.name, name)
val edge = g.geneProducts.addEdge(newGeneName, entryProtein.protein)
Some(newGeneName)
}
)(
// gene name vertex present, only add edge
geneName => {
g.geneProducts.addEdge(geneName, entryProtein.protein)
None
}
)
}
}
(entryProtein, newGeneNames)
}
def keywords(entryProtein: EntryProtein): (EntryProtein, Seq[G#Keywords]) = {
val keywords =
entryProtein.entry.keywords
val keywordEdges =
keywords collect {
scala.Function.unlift { kw =>
g.keyword.id.index.find(kw.id).asScala.map { g.keywords.addEdge(entryProtein.protein, _) }
}
}
(entryProtein, keywordEdges)
}
def comments(entryProtein: EntryProtein): (EntryProtein, Seq[G#Comment]) = {
val entryComments: Seq[Comment] =
entryProtein.entry.comments filterNot { x => x.isInstanceOf[Isoform] }
val commentVertices =
entryComments map { cc =>
val comment = g.comment.addVertex
.set(g.comment.topic, conversions.commentTopic(cc))
.set(g.comment.text, cc.asInstanceOf[{ val text: String }].text) // TODO needs bio4j/data.uniprot#19 or something similar
g.comments.addEdge(entryProtein.protein, comment)
comment
}
(entryProtein, commentVertices)
}
def features(entryProtein: EntryProtein): (EntryProtein, Seq[G#Annotation]) = {
val entryFeatures =
entryProtein.entry.features
val annotationVertices =
entryFeatures map { ft =>
val annotationV =
g.annotation.addVertex
.set(g.annotation.featureType, conversions.featureKeyToFeatureType(ft.key))
.set(g.annotation.description, ft.description)
val annotationE =
g.annotations.addEdge(entryProtein.protein, annotationV)
.set(g.annotations.begin, conversions.featureFromAsInt(ft.from): Integer)
.set(g.annotations.end, conversions.featureToAsInt(ft.to): Integer)
annotationV
}
(entryProtein, annotationVertices)
}
def isoformSequencesFrom(fasta: IsoformFasta): Option[G#Protein] =
g.protein.id.index.find(fasta.proteinID).asScala.map { isoform =>
isoform
.set(g.protein.sequence, fasta.sequence)
.set(g.protein.sequenceLength, fasta.sequence.length: java.lang.Integer)
}
def keywordTypes(row: KeywordRow): G#Keyword = {
val kwType =
g.keyword.addVertex
.set(g.keyword.id, row.id)
.set(g.keyword.definition, row.description)
conversions.stringToKeywordCategory(row.category).foreach { kwType.set(g.keyword.category, _) }
kwType
}
private def validGeneNames(gns: Seq[GeneName]): Seq[String] =
gns collect {
scala.Function.unlift { gn =>
gn.name.fold(gn.ORFNames.headOption)(n => Some(n.official))
}
}
}
| bio4j/bio4j-data-import | src/main/scala/uniprot/uniprot.scala | Scala | agpl-3.0 | 6,504 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License
*/
package org.apache.toree.utils
import java.io.OutputStream
case class MultiOutputStream(val outputStreams: List[OutputStream])
extends OutputStream
{
require(outputStreams != null)
override def write(cbuf: Array[Byte]): Unit =
outputStreams.foreach(outputStream => outputStream.write(cbuf))
override def write(cbuf: Array[Byte], off: Int, len: Int): Unit =
outputStreams.foreach(outputStream => outputStream.write(cbuf, off, len))
override def write(b: Int): Unit =
outputStreams.foreach(outputStream => outputStream.write(b))
override def flush() =
outputStreams.foreach(outputStream => outputStream.flush())
override def close() =
outputStreams.foreach(outputStream => outputStream.close())
}
| Myllyenko/incubator-toree | kernel-api/src/main/scala/org/apache/toree/utils/MultiOutputStream.scala | Scala | apache-2.0 | 1,550 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.clustering
import breeze.linalg.{DenseVector => BDV}
import org.apache.hadoop.fs.Path
import org.apache.spark.annotation.{Experimental, Since}
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.ml.{Estimator, Model}
import org.apache.spark.ml.impl.Utils.EPSILON
import org.apache.spark.ml.linalg._
import org.apache.spark.ml.param._
import org.apache.spark.ml.param.shared._
import org.apache.spark.ml.stat.distribution.MultivariateGaussian
import org.apache.spark.ml.util._
import org.apache.spark.mllib.linalg.{Matrices => OldMatrices, Matrix => OldMatrix,
Vector => OldVector, Vectors => OldVectors}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, Dataset, Row, SparkSession}
import org.apache.spark.sql.functions.udf
import org.apache.spark.sql.types.{IntegerType, StructType}
/**
* Common params for GaussianMixture and GaussianMixtureModel
*/
private[clustering] trait GaussianMixtureParams extends Params with HasMaxIter with HasFeaturesCol
with HasSeed with HasPredictionCol with HasProbabilityCol with HasTol {
/**
* Number of independent Gaussians in the mixture model. Must be greater than 1. Default: 2.
*
* @group param
*/
@Since("2.0.0")
final val k = new IntParam(this, "k", "Number of independent Gaussians in the mixture model. " +
"Must be > 1.", ParamValidators.gt(1))
/** @group getParam */
@Since("2.0.0")
def getK: Int = $(k)
/**
* Validates and transforms the input schema.
*
* @param schema input schema
* @return output schema
*/
protected def validateAndTransformSchema(schema: StructType): StructType = {
SchemaUtils.validateVectorCompatibleColumn(schema, getFeaturesCol)
val schemaWithPredictionCol = SchemaUtils.appendColumn(schema, $(predictionCol), IntegerType)
SchemaUtils.appendColumn(schemaWithPredictionCol, $(probabilityCol), new VectorUDT)
}
}
/**
* Multivariate Gaussian Mixture Model (GMM) consisting of k Gaussians, where points
* are drawn from each Gaussian i with probability weights(i).
*
* @param weights Weight for each Gaussian distribution in the mixture.
* This is a multinomial probability distribution over the k Gaussians,
* where weights(i) is the weight for Gaussian i, and weights sum to 1.
* @param gaussians Array of `MultivariateGaussian` where gaussians(i) represents
* the Multivariate Gaussian (Normal) Distribution for Gaussian i
*/
@Since("2.0.0")
class GaussianMixtureModel private[ml] (
@Since("2.0.0") override val uid: String,
@Since("2.0.0") val weights: Array[Double],
@Since("2.0.0") val gaussians: Array[MultivariateGaussian])
extends Model[GaussianMixtureModel] with GaussianMixtureParams with MLWritable {
/** @group setParam */
@Since("2.1.0")
def setFeaturesCol(value: String): this.type = set(featuresCol, value)
/** @group setParam */
@Since("2.1.0")
def setPredictionCol(value: String): this.type = set(predictionCol, value)
/** @group setParam */
@Since("2.1.0")
def setProbabilityCol(value: String): this.type = set(probabilityCol, value)
@Since("2.0.0")
override def copy(extra: ParamMap): GaussianMixtureModel = {
val copied = copyValues(new GaussianMixtureModel(uid, weights, gaussians), extra)
copied.setSummary(trainingSummary).setParent(this.parent)
}
@Since("2.0.0")
override def transform(dataset: Dataset[_]): DataFrame = {
transformSchema(dataset.schema, logging = true)
val predUDF = udf((vector: Vector) => predict(vector))
val probUDF = udf((vector: Vector) => predictProbability(vector))
dataset
.withColumn($(predictionCol), predUDF(DatasetUtils.columnToVector(dataset, getFeaturesCol)))
.withColumn($(probabilityCol), probUDF(DatasetUtils.columnToVector(dataset, getFeaturesCol)))
}
@Since("2.0.0")
override def transformSchema(schema: StructType): StructType = {
validateAndTransformSchema(schema)
}
private[clustering] def predict(features: Vector): Int = {
val r = predictProbability(features)
r.argmax
}
private[clustering] def predictProbability(features: Vector): Vector = {
val probs: Array[Double] =
GaussianMixtureModel.computeProbabilities(features.asBreeze.toDenseVector, gaussians, weights)
Vectors.dense(probs)
}
/**
* Retrieve Gaussian distributions as a DataFrame.
* Each row represents a Gaussian Distribution.
* Two columns are defined: mean and cov.
* Schema:
* {{{
* root
* |-- mean: vector (nullable = true)
* |-- cov: matrix (nullable = true)
* }}}
*/
@Since("2.0.0")
def gaussiansDF: DataFrame = {
val modelGaussians = gaussians.map { gaussian =>
(OldVectors.fromML(gaussian.mean), OldMatrices.fromML(gaussian.cov))
}
SparkSession.builder().getOrCreate().createDataFrame(modelGaussians).toDF("mean", "cov")
}
/**
* Returns a [[org.apache.spark.ml.util.MLWriter]] instance for this ML instance.
*
* For [[GaussianMixtureModel]], this does NOT currently save the training [[summary]].
* An option to save [[summary]] may be added in the future.
*
*/
@Since("2.0.0")
override def write: MLWriter = new GaussianMixtureModel.GaussianMixtureModelWriter(this)
private var trainingSummary: Option[GaussianMixtureSummary] = None
private[clustering] def setSummary(summary: Option[GaussianMixtureSummary]): this.type = {
this.trainingSummary = summary
this
}
/**
* Return true if there exists summary of model.
*/
@Since("2.0.0")
def hasSummary: Boolean = trainingSummary.nonEmpty
/**
* Gets summary of model on training set. An exception is
* thrown if `trainingSummary == None`.
*/
@Since("2.0.0")
def summary: GaussianMixtureSummary = trainingSummary.getOrElse {
throw new RuntimeException(
s"No training summary available for the ${this.getClass.getSimpleName}")
}
}
@Since("2.0.0")
object GaussianMixtureModel extends MLReadable[GaussianMixtureModel] {
@Since("2.0.0")
override def read: MLReader[GaussianMixtureModel] = new GaussianMixtureModelReader
@Since("2.0.0")
override def load(path: String): GaussianMixtureModel = super.load(path)
/** [[MLWriter]] instance for [[GaussianMixtureModel]] */
private[GaussianMixtureModel] class GaussianMixtureModelWriter(
instance: GaussianMixtureModel) extends MLWriter {
private case class Data(weights: Array[Double], mus: Array[OldVector], sigmas: Array[OldMatrix])
override protected def saveImpl(path: String): Unit = {
// Save metadata and Params
DefaultParamsWriter.saveMetadata(instance, path, sc)
// Save model data: weights and gaussians
val weights = instance.weights
val gaussians = instance.gaussians
val mus = gaussians.map(g => OldVectors.fromML(g.mean))
val sigmas = gaussians.map(c => OldMatrices.fromML(c.cov))
val data = Data(weights, mus, sigmas)
val dataPath = new Path(path, "data").toString
sparkSession.createDataFrame(Seq(data)).repartition(1).write.parquet(dataPath)
}
}
private class GaussianMixtureModelReader extends MLReader[GaussianMixtureModel] {
/** Checked against metadata when loading model */
private val className = classOf[GaussianMixtureModel].getName
override def load(path: String): GaussianMixtureModel = {
val metadata = DefaultParamsReader.loadMetadata(path, sc, className)
val dataPath = new Path(path, "data").toString
val row = sparkSession.read.parquet(dataPath).select("weights", "mus", "sigmas").head()
val weights = row.getSeq[Double](0).toArray
val mus = row.getSeq[OldVector](1).toArray
val sigmas = row.getSeq[OldMatrix](2).toArray
require(mus.length == sigmas.length, "Length of Mu and Sigma array must match")
require(mus.length == weights.length, "Length of weight and Gaussian array must match")
val gaussians = mus.zip(sigmas).map {
case (mu, sigma) =>
new MultivariateGaussian(mu.asML, sigma.asML)
}
val model = new GaussianMixtureModel(metadata.uid, weights, gaussians)
metadata.getAndSetParams(model)
model
}
}
/**
* Compute the probability (partial assignment) for each cluster for the given data point.
*
* @param features Data point
* @param dists Gaussians for model
* @param weights Weights for each Gaussian
* @return Probability (partial assignment) for each of the k clusters
*/
private[clustering]
def computeProbabilities(
features: BDV[Double],
dists: Array[MultivariateGaussian],
weights: Array[Double]): Array[Double] = {
val p = weights.zip(dists).map {
case (weight, dist) => EPSILON + weight * dist.pdf(features)
}
val pSum = p.sum
var i = 0
while (i < weights.length) {
p(i) /= pSum
i += 1
}
p
}
}
/**
* Gaussian Mixture clustering.
*
* This class performs expectation maximization for multivariate Gaussian
* Mixture Models (GMMs). A GMM represents a composite distribution of
* independent Gaussian distributions with associated "mixing" weights
* specifying each's contribution to the composite.
*
* Given a set of sample points, this class will maximize the log-likelihood
* for a mixture of k Gaussians, iterating until the log-likelihood changes by
* less than convergenceTol, or until it has reached the max number of iterations.
* While this process is generally guaranteed to converge, it is not guaranteed
* to find a global optimum.
*
* @note This algorithm is limited in its number of features since it requires storing a covariance
* matrix which has size quadratic in the number of features. Even when the number of features does
* not exceed this limit, this algorithm may perform poorly on high-dimensional data.
* This is due to high-dimensional data (a) making it difficult to cluster at all (based
* on statistical/theoretical arguments) and (b) numerical issues with Gaussian distributions.
*/
@Since("2.0.0")
class GaussianMixture @Since("2.0.0") (
@Since("2.0.0") override val uid: String)
extends Estimator[GaussianMixtureModel] with GaussianMixtureParams with DefaultParamsWritable {
setDefault(
k -> 2,
maxIter -> 100,
tol -> 0.01)
@Since("2.0.0")
override def copy(extra: ParamMap): GaussianMixture = defaultCopy(extra)
@Since("2.0.0")
def this() = this(Identifiable.randomUID("GaussianMixture"))
/** @group setParam */
@Since("2.0.0")
def setFeaturesCol(value: String): this.type = set(featuresCol, value)
/** @group setParam */
@Since("2.0.0")
def setPredictionCol(value: String): this.type = set(predictionCol, value)
/** @group setParam */
@Since("2.0.0")
def setProbabilityCol(value: String): this.type = set(probabilityCol, value)
/** @group setParam */
@Since("2.0.0")
def setK(value: Int): this.type = set(k, value)
/** @group setParam */
@Since("2.0.0")
def setMaxIter(value: Int): this.type = set(maxIter, value)
/** @group setParam */
@Since("2.0.0")
def setTol(value: Double): this.type = set(tol, value)
/** @group setParam */
@Since("2.0.0")
def setSeed(value: Long): this.type = set(seed, value)
/**
* Number of samples per cluster to use when initializing Gaussians.
*/
private val numSamples = 5
@Since("2.0.0")
override def fit(dataset: Dataset[_]): GaussianMixtureModel = {
transformSchema(dataset.schema, logging = true)
val sc = dataset.sparkSession.sparkContext
val numClusters = $(k)
val instances: RDD[Vector] = dataset
.select(DatasetUtils.columnToVector(dataset, getFeaturesCol)).rdd.map {
case Row(features: Vector) => features
}.cache()
// Extract the number of features.
val numFeatures = instances.first().size
require(numFeatures < GaussianMixture.MAX_NUM_FEATURES, s"GaussianMixture cannot handle more " +
s"than ${GaussianMixture.MAX_NUM_FEATURES} features because the size of the covariance" +
s" matrix is quadratic in the number of features.")
val instr = Instrumentation.create(this, dataset)
instr.logParams(featuresCol, predictionCol, probabilityCol, k, maxIter, seed, tol)
instr.logNumFeatures(numFeatures)
val shouldDistributeGaussians = GaussianMixture.shouldDistributeGaussians(
numClusters, numFeatures)
// TODO: SPARK-15785 Support users supplied initial GMM.
val (weights, gaussians) = initRandom(instances, numClusters, numFeatures)
var logLikelihood = Double.MinValue
var logLikelihoodPrev = 0.0
var iter = 0
while (iter < $(maxIter) && math.abs(logLikelihood - logLikelihoodPrev) > $(tol)) {
val bcWeights = instances.sparkContext.broadcast(weights)
val bcGaussians = instances.sparkContext.broadcast(gaussians)
// aggregate the cluster contribution for all sample points
val sums = instances.treeAggregate(
new ExpectationAggregator(numFeatures, bcWeights, bcGaussians))(
seqOp = (c, v) => (c, v) match {
case (aggregator, instance) => aggregator.add(instance)
},
combOp = (c1, c2) => (c1, c2) match {
case (aggregator1, aggregator2) => aggregator1.merge(aggregator2)
})
bcWeights.destroy(blocking = false)
bcGaussians.destroy(blocking = false)
/*
Create new distributions based on the partial assignments
(often referred to as the "M" step in literature)
*/
val sumWeights = sums.weights.sum
if (shouldDistributeGaussians) {
val numPartitions = math.min(numClusters, 1024)
val tuples = Seq.tabulate(numClusters) { i =>
(sums.means(i), sums.covs(i), sums.weights(i))
}
val (ws, gs) = sc.parallelize(tuples, numPartitions).map { case (mean, cov, weight) =>
GaussianMixture.updateWeightsAndGaussians(mean, cov, weight, sumWeights)
}.collect().unzip
Array.copy(ws, 0, weights, 0, ws.length)
Array.copy(gs, 0, gaussians, 0, gs.length)
} else {
var i = 0
while (i < numClusters) {
val (weight, gaussian) = GaussianMixture.updateWeightsAndGaussians(
sums.means(i), sums.covs(i), sums.weights(i), sumWeights)
weights(i) = weight
gaussians(i) = gaussian
i += 1
}
}
logLikelihoodPrev = logLikelihood // current becomes previous
logLikelihood = sums.logLikelihood // this is the freshly computed log-likelihood
iter += 1
}
val gaussianDists = gaussians.map { case (mean, covVec) =>
val cov = GaussianMixture.unpackUpperTriangularMatrix(numFeatures, covVec.values)
new MultivariateGaussian(mean, cov)
}
val model = copyValues(new GaussianMixtureModel(uid, weights, gaussianDists)).setParent(this)
val summary = new GaussianMixtureSummary(model.transform(dataset),
$(predictionCol), $(probabilityCol), $(featuresCol), $(k), logLikelihood)
model.setSummary(Some(summary))
instr.logNamedValue("logLikelihood", logLikelihood)
instr.logNamedValue("clusterSizes", summary.clusterSizes)
instr.logSuccess(model)
model
}
@Since("2.0.0")
override def transformSchema(schema: StructType): StructType = {
validateAndTransformSchema(schema)
}
/**
* Initialize weights and corresponding gaussian distributions at random.
*
* We start with uniform weights, a random mean from the data, and diagonal covariance matrices
* using component variances derived from the samples.
*
* @param instances The training instances.
* @param numClusters The number of clusters.
* @param numFeatures The number of features of training instance.
* @return The initialized weights and corresponding gaussian distributions. Note the
* covariance matrix of multivariate gaussian distribution is symmetric and
* we only save the upper triangular part as a dense vector (column major).
*/
private def initRandom(
instances: RDD[Vector],
numClusters: Int,
numFeatures: Int): (Array[Double], Array[(DenseVector, DenseVector)]) = {
val samples = instances.takeSample(withReplacement = true, numClusters * numSamples, $(seed))
val weights: Array[Double] = Array.fill(numClusters)(1.0 / numClusters)
val gaussians: Array[(DenseVector, DenseVector)] = Array.tabulate(numClusters) { i =>
val slice = samples.view(i * numSamples, (i + 1) * numSamples)
val mean = {
val v = new DenseVector(new Array[Double](numFeatures))
var i = 0
while (i < numSamples) {
BLAS.axpy(1.0, slice(i), v)
i += 1
}
BLAS.scal(1.0 / numSamples, v)
v
}
/*
Construct matrix where diagonal entries are element-wise
variance of input vectors (computes biased variance).
Since the covariance matrix of multivariate gaussian distribution is symmetric,
only the upper triangular part of the matrix (column major) will be saved as
a dense vector in order to reduce the shuffled data size.
*/
val cov = {
val ss = new DenseVector(new Array[Double](numFeatures)).asBreeze
slice.foreach(xi => ss += (xi.asBreeze - mean.asBreeze) ^:^ 2.0)
val diagVec = Vectors.fromBreeze(ss)
BLAS.scal(1.0 / numSamples, diagVec)
val covVec = new DenseVector(Array.fill[Double](
numFeatures * (numFeatures + 1) / 2)(0.0))
diagVec.toArray.zipWithIndex.foreach { case (v: Double, i: Int) =>
covVec.values(i + i * (i + 1) / 2) = v
}
covVec
}
(mean, cov)
}
(weights, gaussians)
}
}
@Since("2.0.0")
object GaussianMixture extends DefaultParamsReadable[GaussianMixture] {
/** Limit number of features such that numFeatures^2^ < Int.MaxValue */
private[clustering] val MAX_NUM_FEATURES = math.sqrt(Int.MaxValue).toInt
@Since("2.0.0")
override def load(path: String): GaussianMixture = super.load(path)
/**
* Heuristic to distribute the computation of the [[MultivariateGaussian]]s, approximately when
* numFeatures > 25 except for when numClusters is very small.
*
* @param numClusters Number of clusters
* @param numFeatures Number of features
*/
private[clustering] def shouldDistributeGaussians(
numClusters: Int,
numFeatures: Int): Boolean = {
((numClusters - 1.0) / numClusters) * numFeatures > 25.0
}
/**
* Convert an n * (n + 1) / 2 dimension array representing the upper triangular part of a matrix
* into an n * n array representing the full symmetric matrix (column major).
*
* @param n The order of the n by n matrix.
* @param triangularValues The upper triangular part of the matrix packed in an array
* (column major).
* @return A dense matrix which represents the symmetric matrix in column major.
*/
private[clustering] def unpackUpperTriangularMatrix(
n: Int,
triangularValues: Array[Double]): DenseMatrix = {
val symmetricValues = new Array[Double](n * n)
var r = 0
var i = 0
while (i < n) {
var j = 0
while (j <= i) {
symmetricValues(i * n + j) = triangularValues(r)
symmetricValues(j * n + i) = triangularValues(r)
r += 1
j += 1
}
i += 1
}
new DenseMatrix(n, n, symmetricValues)
}
/**
* Update the weight, mean and covariance of gaussian distribution.
*
* @param mean The mean of the gaussian distribution.
* @param cov The covariance matrix of the gaussian distribution. Note we only
* save the upper triangular part as a dense vector (column major).
* @param weight The weight of the gaussian distribution.
* @param sumWeights The sum of weights of all clusters.
* @return The updated weight, mean and covariance.
*/
private[clustering] def updateWeightsAndGaussians(
mean: DenseVector,
cov: DenseVector,
weight: Double,
sumWeights: Double): (Double, (DenseVector, DenseVector)) = {
BLAS.scal(1.0 / weight, mean)
BLAS.spr(-weight, mean, cov)
BLAS.scal(1.0 / weight, cov)
val newWeight = weight / sumWeights
val newGaussian = (mean, cov)
(newWeight, newGaussian)
}
}
/**
* ExpectationAggregator computes the partial expectation results.
*
* @param numFeatures The number of features.
* @param bcWeights The broadcast weights for each Gaussian distribution in the mixture.
* @param bcGaussians The broadcast array of Multivariate Gaussian (Normal) Distribution
* in the mixture. Note only upper triangular part of the covariance
* matrix of each distribution is stored as dense vector (column major)
* in order to reduce shuffled data size.
*/
private class ExpectationAggregator(
numFeatures: Int,
bcWeights: Broadcast[Array[Double]],
bcGaussians: Broadcast[Array[(DenseVector, DenseVector)]]) extends Serializable {
private val k: Int = bcWeights.value.length
private var totalCnt: Long = 0L
private var newLogLikelihood: Double = 0.0
private lazy val newWeights: Array[Double] = new Array[Double](k)
private lazy val newMeans: Array[DenseVector] = Array.fill(k)(
new DenseVector(Array.fill[Double](numFeatures)(0.0)))
private lazy val newCovs: Array[DenseVector] = Array.fill(k)(
new DenseVector(Array.fill[Double](numFeatures * (numFeatures + 1) / 2)(0.0)))
@transient private lazy val oldGaussians = {
bcGaussians.value.map { case (mean, covVec) =>
val cov = GaussianMixture.unpackUpperTriangularMatrix(numFeatures, covVec.values)
new MultivariateGaussian(mean, cov)
}
}
def count: Long = totalCnt
def logLikelihood: Double = newLogLikelihood
def weights: Array[Double] = newWeights
def means: Array[DenseVector] = newMeans
def covs: Array[DenseVector] = newCovs
/**
* Add a new training instance to this ExpectationAggregator, update the weights,
* means and covariances for each distributions, and update the log likelihood.
*
* @param instance The instance of data point to be added.
* @return This ExpectationAggregator object.
*/
def add(instance: Vector): this.type = {
val localWeights = bcWeights.value
val localOldGaussians = oldGaussians
val prob = new Array[Double](k)
var probSum = 0.0
var i = 0
while (i < k) {
val p = EPSILON + localWeights(i) * localOldGaussians(i).pdf(instance)
prob(i) = p
probSum += p
i += 1
}
newLogLikelihood += math.log(probSum)
val localNewWeights = newWeights
val localNewMeans = newMeans
val localNewCovs = newCovs
i = 0
while (i < k) {
prob(i) /= probSum
localNewWeights(i) += prob(i)
BLAS.axpy(prob(i), instance, localNewMeans(i))
BLAS.spr(prob(i), instance, localNewCovs(i))
i += 1
}
totalCnt += 1
this
}
/**
* Merge another ExpectationAggregator, update the weights, means and covariances
* for each distributions, and update the log likelihood.
* (Note that it's in place merging; as a result, `this` object will be modified.)
*
* @param other The other ExpectationAggregator to be merged.
* @return This ExpectationAggregator object.
*/
def merge(other: ExpectationAggregator): this.type = {
if (other.count != 0) {
totalCnt += other.totalCnt
val localThisNewWeights = this.newWeights
val localOtherNewWeights = other.newWeights
val localThisNewMeans = this.newMeans
val localOtherNewMeans = other.newMeans
val localThisNewCovs = this.newCovs
val localOtherNewCovs = other.newCovs
var i = 0
while (i < k) {
localThisNewWeights(i) += localOtherNewWeights(i)
BLAS.axpy(1.0, localOtherNewMeans(i), localThisNewMeans(i))
BLAS.axpy(1.0, localOtherNewCovs(i), localThisNewCovs(i))
i += 1
}
newLogLikelihood += other.newLogLikelihood
}
this
}
}
/**
* :: Experimental ::
* Summary of GaussianMixture.
*
* @param predictions `DataFrame` produced by `GaussianMixtureModel.transform()`.
* @param predictionCol Name for column of predicted clusters in `predictions`.
* @param probabilityCol Name for column of predicted probability of each cluster
* in `predictions`.
* @param featuresCol Name for column of features in `predictions`.
* @param k Number of clusters.
* @param logLikelihood Total log-likelihood for this model on the given data.
*/
@Since("2.0.0")
@Experimental
class GaussianMixtureSummary private[clustering] (
predictions: DataFrame,
predictionCol: String,
@Since("2.0.0") val probabilityCol: String,
featuresCol: String,
k: Int,
@Since("2.2.0") val logLikelihood: Double)
extends ClusteringSummary(predictions, predictionCol, featuresCol, k) {
/**
* Probability of each cluster.
*/
@Since("2.0.0")
@transient lazy val probability: DataFrame = predictions.select(probabilityCol)
}
| bravo-zhang/spark | mllib/src/main/scala/org/apache/spark/ml/clustering/GaussianMixture.scala | Scala | apache-2.0 | 25,917 |
package com.sksamuel.elastic4s.http.get
object MetaDataFields {
val fields = Set(
"_id",
"_type",
"_index",
"_uid"
)
}
| Tecsisa/elastic4s | elastic4s-http/src/main/scala/com/sksamuel/elastic4s/http/get/MetaDataFields.scala | Scala | apache-2.0 | 140 |
package com.mattrjacobs.rxscala
import rx.lang.scala.Observable
object Producer {
def stream(): Observable[String] = {
val stringList = (1 to 100).map(x => "a")
Observable(stringList: _*)
}
}
| mattrjacobs/RxScalaDemo | src/main/scala/demo/SampleProducer.scala | Scala | apache-2.0 | 206 |
package org.jetbrains.plugins.scala
package annotator
import com.intellij.lang.annotation.AnnotationHolder
import com.intellij.psi.{PsiClass, PsiField, PsiMethod}
import org.jetbrains.plugins.scala.annotator.AnnotatorUtils.registerTypeMismatchError
import org.jetbrains.plugins.scala.codeInspection.varCouldBeValInspection.ValToVarQuickFix
import org.jetbrains.plugins.scala.extensions._
import org.jetbrains.plugins.scala.lang.psi.ScalaPsiUtil
import org.jetbrains.plugins.scala.lang.psi.api.expr._
import org.jetbrains.plugins.scala.lang.psi.api.statements.params.ScClassParameter
import org.jetbrains.plugins.scala.lang.psi.api.statements.{ScFunction, ScValue, ScVariable}
import org.jetbrains.plugins.scala.lang.psi.types._
import org.jetbrains.plugins.scala.lang.resolve.processor.DynamicResolveProcessor
import org.jetbrains.plugins.scala.project.ProjectContext
/**
* Pavel.Fatin, 31.05.2010
*/
trait AssignmentAnnotator {
def annotateAssignment(assignment: ScAssignStmt, holder: AnnotationHolder, advancedHighlighting: Boolean) {
implicit val ctx: ProjectContext = assignment
val left = assignment.getLExpression
val right = assignment.getRExpression
assignment.getLExpression match {
case _: ScMethodCall =>
case ref: ScReferenceExpression =>
ref.bind() match {
case Some(r) if r.isDynamic && r.name == DynamicResolveProcessor.UPDATE_DYNAMIC => //ignore
case Some(r) if !r.isNamedParameter =>
def checkVariable() {
left.`type`().foreach { lType =>
right.foreach { expression =>
expression.getTypeAfterImplicitConversion().tr.foreach { rType =>
if(!ScalaPsiUtil.isUnderscoreEq(assignment, rType)) {
registerTypeMismatchError(rType, lType, holder, expression)
}
}
}
}
}
ScalaPsiUtil.nameContext(r.element) match {
case _: ScVariable =>
if (!advancedHighlighting) return
checkVariable()
case c: ScClassParameter if c.isVar =>
if (!advancedHighlighting) return
checkVariable()
case f: PsiField if !f.hasModifierProperty("final") =>
if (!advancedHighlighting) return
checkVariable()
case fun: ScFunction if ScalaPsiUtil.isViableForAssignmentFunction(fun) =>
if (!advancedHighlighting) return
assignment.resolveAssignment match {
case Some(ra) =>
ra.problems.foreach {
case TypeMismatch(expression, expectedType) =>
expression.`type`().foreach {
registerTypeMismatchError(_, expectedType, holder, expression)
}
case MissedValueParameter(_) => // simultaneously handled above
case UnresolvedParameter(_) => // don't show function inapplicability, unresolved
case WrongTypeParameterInferred => //todo: ?
case ExpectedTypeMismatch => // will be reported later
case _ => holder.createErrorAnnotation(assignment, "Wrong right assignment side")
}
case _ => holder.createErrorAnnotation(assignment, "Reassignment to val")
}
case _: ScFunction => holder.createErrorAnnotation(assignment, "Reassignment to val")
case method: PsiMethod if method.getParameterList.getParametersCount == 0 =>
method.containingClass match {
case c: PsiClass if c.isAnnotationType => //do nothing
case _ => holder.createErrorAnnotation(assignment, "Reassignment to val")
}
case _: ScValue =>
val annotation = holder.createErrorAnnotation(assignment, "Reassignment to val")
annotation.registerFix(new ValToVarQuickFix(ScalaPsiUtil.nameContext(r.element).asInstanceOf[ScValue]))
case _ => holder.createErrorAnnotation(assignment, "Reassignment to val")
}
case _ =>
}
case _ =>
}
}
}
| jastice/intellij-scala | scala/scala-impl/src/org/jetbrains/plugins/scala/annotator/AssignmentAnnotator.scala | Scala | apache-2.0 | 4,291 |
package sync
import java.io._
import scala.collection.mutable.ArrayBuffer
object Constants {
val scalaVersion = "2.11"
val clientoptjs = "client-opt.js"
val clientjsdepsminjs = "client-jsdeps.min.js"
val targetname = "chessapp"
def scaladir = s"scala-$scalaVersion"
}
case class MyPath(parts: String*) {
import Utils._
def toname: String = {
if (parts.length <= 0) return "."
parts.mkString(sep)
}
def topath: String = toname + sep
def getname: String = {
if (parts.length <= 0) return ""
parts.reverse.head
}
def length = parts.length
def -(mp: MyPath): MyPath = {
for (i <- 0 to mp.length - 1) {
if (parts(i) != mp.parts(i)) return MyPath(parts: _*)
}
MyPath(parts.slice(mp.length, parts.length): _*)
}
def +(mp: MyPath): MyPath = {
val allparts = mp.parts.toList ::: parts.toList
MyPath(allparts.filter(!specialdir(_)): _*)
}
def normalized: MyPath = {
val newparts = ArrayBuffer[String]()
for (p <- parts) {
if ((p == parentdir) && (newparts.length > 0)) newparts.remove(newparts.length - 1) else newparts += p
}
MyPath(newparts: _*)
}
}
case class MyDirEntry(
mypath: MyPath = MyPath(),
isdir: Boolean = false,
lastmod: Long = 0,
exists: Boolean = false
) {
import Utils._
def isfile = !isdir
def toname = mypath.toname
def topath = mypath.topath
def getname: String = {
if (!exists) return ""
mypath.getname
}
def tofile = new File(toname)
def listentries: MyDirEntries = {
val md = MyDirEntries()
if ((!exists) || (!isdir)) return md
val files = tofile.listFiles
for (f <- files) md += MyDirEntryFromPath(f.getAbsolutePath)
md
}
def reportPrintable: String = s"""$toname $isdir $lastmod"""
def -(mp: MyPath): MyDirEntry = this.copy(mypath = mypath - mp)
def +(mp: MyPath): MyDirEntry = MyDirEntryFromMyPath(mypath + mp)
def plus(mp: MyPath): MyDirEntry = this.copy(mypath = mypath + mp)
}
case class SyncItem(
from: MyDirEntry = MyDirEntry(),
to: MyDirEntry = MyDirEntry()
)
case class SyncItems() extends ArrayBuffer[SyncItem] {
import Utils._
def addDir(setfrom: MyPath, setto: MyPath, filterrecdirsfunc: FDEF = Some((e: MyDirEntry) => (e.getname != "target"))) {
val from = MyDirEntryFromMyPath(setfrom).mypath
val to = MyDirEntryFromMyPath(setto).mypath.normalized
val entries = Utils.collect(from, filterrecdirsfunc = filterrecdirsfunc)
val dirs = entries.getdirs.sortbycomplexity
for (r <- dirs) {
val d = (r - from) + to
if (!d.exists) {
this += SyncItem(r, (r - from).plus(to))
}
}
val files = entries.getfiles.sortbycomplexity
for (r <- files) {
val d = (r - from) + to
if ((!d.exists) || (r.lastmod > d.lastmod)) {
this += SyncItem(r, (r - from).plus(to))
}
}
}
def addFile(setfrom: MyPath, setto: MyPath, unconditional: Boolean = false): Boolean = {
val from = MyDirEntryFromMyPath(setfrom).mypath
val to = MyDirEntryFromMyPath(setto).mypath.normalized
val fromde = MyDirEntryFromMyPath(from)
val tode = MyDirEntryFromMyPath(to)
if (!fromde.exists) {
println("error, from file does not exist " + fromde.mypath.toname)
return false
} else if ((!tode.exists) || (fromde.lastmod > tode.lastmod) || unconditional) {
this += SyncItem(fromde, tode)
return true
}
false
}
def reportPrintable: String = (for (item <- this) yield item.from.reportPrintable + "\\n -> " + item.to.reportPrintable).mkString("\\n")
def reportWindowsBatch: String = (for (item <- this) yield {
if (item.from.isdir) quotedcommand("mkdir", item.to.mypath.toname) else
quotedcommand("copy", item.from.mypath.toname, item.to.mypath.toname)
}).mkString("\\n")
}
case class MyDirEntries() extends ArrayBuffer[MyDirEntry] {
def myfilter(ff: (MyDirEntry) => Boolean): MyDirEntries = {
val md = MyDirEntries()
for (e <- this) {
if (ff(e)) md += e
}
md
}
def getdirs = myfilter((e: MyDirEntry) => e.isdir)
def getfiles = myfilter((e: MyDirEntry) => e.isfile)
def append(md: MyDirEntries) {
for (e <- md) this += e
}
def reportPrintable: String = (for (e <- this) yield e.reportPrintable).mkString("\\n")
def frombuffer(buff: ArrayBuffer[MyDirEntry]): MyDirEntries = {
val md = MyDirEntries()
for (e <- buff) md += e
md
}
def sortbycomplexity: MyDirEntries = frombuffer(this.sortWith((a, b) => a.mypath.length < b.mypath.length))
def -(mp: MyPath): MyDirEntries = {
val md = MyDirEntries()
for (e <- this) md += (e - mp)
md
}
def +(mp: MyPath): MyDirEntries = {
val md = MyDirEntries()
for (e <- this) md += (e + mp)
md
}
}
object Utils {
val sep = File.separator
val regexsep = "\\\\" + sep
val regexpesc = "\\\\"
val thisdir = "."
val parentdir = ".."
def specialdir(d: String) = ((d == thisdir))
val quote = """""""
def quotedcommand(command: String, args: String*): String = command + " " + (for (arg <- args) yield quote + arg + quote).mkString(" ")
def WriteStringToFile(path: String, content: String) {
org.apache.commons.io.FileUtils.writeStringToFile(
new java.io.File(path),
content,
null.asInstanceOf[String]
)
}
def ReadFileToString(path: String): String = {
val f = new java.io.File(path)
if (!f.exists()) return null
org.apache.commons.io.FileUtils.readFileToString(
f,
null.asInstanceOf[String]
)
}
def MyDirEntryFromMyPath(mypath: MyPath): MyDirEntry = {
val f = new File(mypath.toname)
if (!f.exists) return MyDirEntry()
MyDirEntry(
mypath = MyPathFromPath(f.getAbsolutePath),
isdir = f.isDirectory,
lastmod = f.lastModified,
exists = true
)
}
def MyPathFromPath(path: String): MyPath = {
val parts = path.split(regexsep).filter(!specialdir(_))
if (parts.length <= 1) return MyPath(parts: _*)
val partsrev = parts.reverse
if (partsrev.head == "") return MyPath(partsrev.tail.reverse: _*)
MyPath(parts: _*)
}
def MyDirEntryFromPath(path: String) = MyDirEntryFromMyPath(MyPathFromPath(path))
type FDEF = Option[((MyDirEntry) => Boolean)]
def collect(
mypath: MyPath,
entries: MyDirEntries = MyDirEntries(),
recursive: Boolean = true,
filterrecdirsfunc: FDEF = None
): MyDirEntries = {
val de = MyDirEntryFromMyPath(mypath)
if (!(de.exists && de.isdir)) return entries
entries += de
val le = de.listentries
entries.append(le.getfiles)
if (recursive) for (d <- le.getdirs) {
var ok = true
if (!filterrecdirsfunc.isEmpty) ok = filterrecdirsfunc.get(d)
if (ok) collect(mypath = d.mypath, entries = entries, recursive = recursive, filterrecdirsfunc = filterrecdirsfunc)
}
entries
}
def change_version(
local: MyPath,
remote: MyPath,
jsnames: String*
) {
val localde = MyDirEntryFromMyPath(local)
val remotede = MyDirEntryFromMyPath(remote)
var content = ReadFileToString(if (remotede.exists) remotede.toname else localde.toname)
for (jsname <- jsnames) {
val parts = content.split(s"$jsname$regexpesc?v")
val quote = """""""
val parts2 = parts(1).split(quote)
val version = parts2(0).toInt
val newversion = version + 1
content = parts(0) + s"$jsname?v" + newversion + quote + parts2.tail.mkString(quote)
}
WriteStringToFile(localde.toname, content)
}
}
object Sync extends App {
import Utils._
import Constants._
val commitname = if (args.length > 0) args(0) else "Test"
val syncitems = SyncItems()
syncitems.addDir(MyPath("server", "app"), MyPath("..", targetname, "app"))
syncitems.addDir(MyPath("server", "public"), MyPath("..", targetname, "public"))
syncitems.addDir(MyPath("server", "conf"), MyPath("..", targetname, "conf"))
syncitems.addDir(MyPath("shared", "src", "main", "scala", "shared"), MyPath("..", targetname, "app", "shared"))
val addopt = syncitems.addFile(
MyPath("client", "target", scaladir, clientoptjs),
MyPath("..", targetname, "public", "javascripts", clientoptjs)
)
val adddeps = syncitems.addFile(
MyPath("client", "target", scaladir, clientjsdepsminjs),
MyPath("..", targetname, "public", "javascripts", clientjsdepsminjs)
)
if (addopt || adddeps) {
change_version(
MyPath("chess.scala.html"),
MyPath("..", "chessapp", "app", "views", "chess.scala.html"),
clientjsdepsminjs,
clientoptjs
)
}
syncitems.addFile(MyPath("chess.scala.html"), MyPath("..", targetname, "app", "views"))
val sbat = syncitems.reportWindowsBatch
val bat = s"""
|$sbat
|pause
|call pre.bat
|git add -A .
|git commit -m "$commitname"
|git push origin master
|cd ..\\\\$targetname
|call pre.bat
|git add -A .
|git commit -m "$commitname"
|git push origin master
|pause
""".stripMargin
println(bat)
WriteStringToFile("c.bat", bat)
} | serversideapps/silhmojs | sync/src/main/scala/sync.scala | Scala | apache-2.0 | 9,039 |
package scala
package reflect
package api
/**
* <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
*
* `Universe` provides a complete set of reflection operations which make it possible for one
* to reflectively inspect Scala type relations, such as membership or subtyping.
*
* [[scala.reflect.api.Universe]] has two specialized sub-universes for different scenarios.
* [[scala.reflect.api.JavaUniverse]] adds operations that link symbols and types to the underlying
* classes and runtime values of a JVM instance-- this can be thought of as the `Universe` that
* should be used for all typical use-cases of Scala reflection. [[scala.reflect.macros.Universe]]
* adds operations which allow macros to access selected compiler data structures and operations--
* this type of `Universe` should only ever exist within the implementation of a Scala macro.
*
* `Universe` can be thought of as the entry point to Scala reflection. It mixes-in, and thus provides
* an interface to the following main types:
*
* - [[scala.reflect.api.Types#Type Types]] represent types
* - [[scala.reflect.api.Symbols#Symbol Symbols]] represent definitions
* - [[scala.reflect.api.Trees#Tree Trees]] represent abstract syntax trees
* - [[scala.reflect.api.Names#Name Names]] represent term and type names
* - [[scala.reflect.api.Annotations#Annotation Annotations]] represent annotations
* - [[scala.reflect.api.Positions#Position Positions]] represent source positions of tree nodes
* - [[scala.reflect.api.FlagSets#FlagSet FlagSet]] represent sets of flags that apply to symbols and
* definition trees
* - [[scala.reflect.api.Constants#Constant Constants]] represent compile-time constants.
*
* To obtain a `Universe` to use with Scala runtime reflection, simply make sure to use or import
* `scala.reflect.runtime.universe._`
* {{{
* scala> import scala.reflect.runtime.universe._
* import scala.reflect.runtime.universe._
*
* scala> typeOf[List[Int]]
* res0: reflect.runtime.universe.Type = scala.List[Int]
*
* scala> typeOf[Either[String, Int]]
* res1: reflect.runtime.universe.Type = scala.Either[String,Int]
* }}}
*
* To obtain a `Universe` for use within a Scala macro, use [[scala.reflect.macros.blackbox.Context#universe]].
* or [[scala.reflect.macros.whitebox.Context#universe]]. For example:
* {{{
* def printf(format: String, params: Any*): Unit = macro impl
* def impl(c: Context)(format: c.Expr[String], params: c.Expr[Any]*): c.Expr[Unit] = {
* import c.universe._
* ...
* }
* }}}
*
* For more information about `Universe`s, see the [[http://docs.scala-lang.org/overviews/reflection/environment-universes-mirrors.html Reflection Guide: Universes]]
*
* @groupprio Universe -1
* @group ReflectionAPI
*
* @contentDiagram hideNodes "*Api"
*/
abstract class Universe extends Symbols
with Types
with FlagSets
with Scopes
with Names
with Trees
with Constants
with Annotations
with Positions
with Exprs
with TypeTags
with ImplicitTags
with StandardDefinitions
with StandardNames
with StandardLiftables
with Mirrors
with Printers
with Liftables
with Quasiquotes
with Internals
{
/** Use `reify` to produce the abstract syntax tree representing a given Scala expression.
*
* For example:
*
* {{{
* val five = reify{ 5 } // Literal(Constant(5))
* reify{ 5.toString } // Apply(Select(Literal(Constant(5)), TermName("toString")), List())
* reify{ five.splice.toString } // Apply(Select(five, TermName("toString")), List())
* }}}
*
* The produced tree is path dependent on the Universe `reify` was called from.
*
* Use [[scala.reflect.api.Exprs#Expr.splice]] to embed an existing expression into a `reify` call. Use [[Expr]] to turn a [[Tree]] into an expression that can be spliced.
* @group Universe
*/
// implementation is hardwired to `scala.reflect.reify.Taggers`
// using the mechanism implemented in `scala.tools.reflect.FastTrack`
def reify[T](expr: T): Expr[T] = macro ???
}
| felixmulder/scala | src/reflect/scala/reflect/api/Universe.scala | Scala | bsd-3-clause | 4,567 |