code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
/**
* Copyright (C) 2014 Stratio (http://stratio.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.stratio.streaming.kafka
import java.io.Closeable
import java.util.{Properties, UUID}
import kafka.producer._
import org.slf4j.LoggerFactory
case class KafkaProducer(topic: String,
brokerList: String,
clientId: String = UUID.randomUUID().toString,
synchronously: Boolean = true,
compress: Boolean = true,
batchSize: Integer = 200,
messageSendMaxRetries: Integer = 3,
requestRequiredAcks: Integer = -1
) extends Closeable {
val props = new Properties()
val log = LoggerFactory.getLogger(getClass)
//val codec = if(compress) DefaultCompressionCodec.codec else NoCompressionCodec.codec
//props.put("compression.codec", codec.toString)
props.put("serializer.class", "kafka.serializer.StringEncoder")
props.put("metadata.broker.list", brokerList)
val producer = new Producer[AnyRef, AnyRef](new ProducerConfig(props))
override def close(): Unit = {
producer.close
}
def send(message: String, key: String) = {
try {
log.info("Sending KeyedMessage[key, value]: [" + key + "," + message + "]")
producer.send(new KeyedMessage(topic, key, message))
} catch {
case e: Exception =>
log.error("Error sending KeyedMessage[key, value]: [" + key + "," + message + "]")
log.error("Exception: " + e.getMessage)
}
}
}
| mt0803/streaming-cep-engine | api/src/main/scala/com/stratio/streaming/api/kafka/KafkaProducer.scala | Scala | apache-2.0 | 2,104 |
/*
* The MIT License (MIT)
*
* Copyright (c) 2016 Algolia
* http://www.algolia.com/
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package algolia.integration
import java.time.temporal.ChronoUnit
import java.time.{LocalDateTime, ZoneOffset}
import algolia.AlgoliaDsl._
import algolia.AlgoliaTest
import algolia.inputs.{ABTest, ABTestVariant}
import algolia.`4XXAPIException`
import algolia.objects.{IgnorePlurals, Query}
import org.scalatest.RecoverMethods._
class ABTestIntegrationTest extends AlgoliaTest {
val now: LocalDateTime = LocalDateTime.now
val nowStr: String = now.atOffset(ZoneOffset.UTC).toEpochSecond.toString
val indexName1 = s"indexAbTest1-$nowStr"
val indexName2 = s"indexAbTest2-$nowStr"
before {
waitForMultipleIndexTasks(createIndices(indexName1, indexName2).taskID)
deleteAllABTests()
}
after {
waitForMultipleIndexTasks(clearIndices(indexName1, indexName2).taskID)
}
def dummyABTest = ABTest(
name = s"abTestName-$nowStr",
variants = Seq(
ABTestVariant(indexName1, 60, Some("a description")),
ABTestVariant(indexName2, 40)
),
endAt = now.plus(6, ChronoUnit.DAYS)
)
def dummyAATest = ABTest(
name = s"aaTestName-$nowStr",
variants = Seq(
ABTestVariant(indexName1, 90),
ABTestVariant(
indexName1,
10,
customSearchParameters =
Some(Query(ignorePlurals = Some(IgnorePlurals.`true`)))
)
),
endAt = now.plus(1, ChronoUnit.DAYS)
)
describe("AB testing") {
it("should send an AB test") {
val inputAbTest = dummyABTest
taskShouldBeCreatedAndWaitForIt(
AlgoliaTest.client.execute(add abTest inputAbTest),
indexName1
)
val task = AlgoliaTest.client.execute(get all abTests)
whenReady(task) { abTests =>
abTests.abtests should have size 1
abTests.abtests.map { abTest =>
{
abTest.name should be(inputAbTest.name)
abTest.endAt should be(inputAbTest.endAt)
abTest.variants.size should be(inputAbTest.variants.size)
abTest.variants.forall { variant =>
val isVariantFound =
inputAbTest.variants
.map { expectedVariant =>
expectedVariant.index == variant.index &&
expectedVariant.description.getOrElse("") == variant.description &&
expectedVariant.trafficPercentage == variant.trafficPercentage
}
.reduce((a, b) => a || b)
isVariantFound
}
}
}
}
}
it("should send an AA test") {
val inputAaTest = dummyAATest
taskShouldBeCreatedAndWaitForIt(
AlgoliaTest.client.execute(add abTest inputAaTest),
indexName1
)
val task = AlgoliaTest.client.execute(get all abTests)
whenReady(task) { abTests =>
abTests.abtests should have size 1
abTests.abtests.map { abTest =>
{
abTest.name should be(inputAaTest.name)
abTest.endAt should be(inputAaTest.endAt)
abTest.variants.size should be(inputAaTest.variants.size)
abTest.variants.forall { variant =>
val isVariantFound =
inputAaTest.variants
.map { expectedVariant =>
expectedVariant.index == variant.index &&
expectedVariant.trafficPercentage == variant.trafficPercentage &&
expectedVariant.customSearchParameters == variant.customSearchParameters
}
.reduce((a, b) => a || b)
isVariantFound
}
}
}
}
}
it("should stop an AB test") {
val inputAbTest = dummyABTest
val addTask = AlgoliaTest.client.execute(add abTest inputAbTest)
val abTestID = whenReady(addTask) { res =>
res.abTestID
}
taskShouldBeCreatedAndWaitForIt(addTask, indexName1)
val stopTask = AlgoliaTest.client.execute(stop abTest abTestID)
taskShouldBeCreatedAndWaitForIt(stopTask, indexName1)
val getTask = AlgoliaTest.client.execute(get abTest abTestID)
whenReady(getTask) { abTest =>
abTest.abTestID should be(abTestID)
abTest.status should be("stopped")
}
}
it("should delete an AB test") {
val inputAbTest = dummyABTest
val addTask = AlgoliaTest.client.execute(add abTest inputAbTest)
val abTestID = whenReady(addTask) { res =>
res.abTestID
}
taskShouldBeCreatedAndWaitForIt(addTask, indexName1)
val deleteTask = AlgoliaTest.client.execute(delete abTest abTestID)
whenReady(deleteTask) { res =>
res
}
recoverToSucceededIf[`4XXAPIException`] {
AlgoliaTest.client.execute(get abTest abTestID)
}
}
}
}
| algolia/algoliasearch-client-scala | src/test/scala/algolia/integration/ABTestIntegrationTest.scala | Scala | mit | 5,939 |
package hulk.http.response
import java.io.{StringWriter, StringReader}
import akka.http.scaladsl.model.ContentTypes
import akka.util.ByteString
import com.github.mustachejava.DefaultMustacheFactory
import play.api.libs.json.{Json => PJson, JsValue}
import scala.collection.JavaConverters._
import scala.io.Codec
import scala.xml.Elem
/**
* Created by reweber on 24/12/2015
*/
trait HttpResponseBodyWriter[A <: ResponseFormat] {
def apply(): HttpResponseBody
}
object HttpResponseBodyWriter {
implicit def jsonToHttpResponseBodyWriter(json: JsValue): HttpResponseBodyWriter[Json] = {
new HttpResponseBodyWriter[Json] {
override def apply(): HttpResponseBody = {
HttpResponseBody(ContentTypes.`application/json`, ByteString(PJson.stringify(json)))
}
}
}
implicit def xmlToHttpResponseBodyWriter(xmlElem: Elem): HttpResponseBodyWriter[Xml] = {
new HttpResponseBodyWriter[Xml] {
override def apply(): HttpResponseBody = {
HttpResponseBody(ContentTypes.`text/xml(UTF-8)`, ByteString(xmlElem.mkString))
}
}
}
implicit def stringToHttpResponseBodyWriter(text: String): HttpResponseBodyWriter[Text] = {
new HttpResponseBodyWriter[Text] {
override def apply(): HttpResponseBody = {
HttpResponseBody(ContentTypes.`text/plain(UTF-8)`, ByteString(text))
}
}
}
implicit def stringAsHtmlToHttpResponseBodyWriter(text: String): HttpResponseBodyWriter[Html] = {
new HttpResponseBodyWriter[Html] {
override def apply(): HttpResponseBody = {
HttpResponseBody(ContentTypes.`text/html(UTF-8)`, ByteString(text))
}
}
}
implicit def mustacheAsHtmlToHttpResponseBodyWriter(template: MustacheTemplate[_]): HttpResponseBodyWriter[Html] = {
new HttpResponseBodyWriter[Html] {
override def apply(): HttpResponseBody = {
val reader = template.template.fold(
stringTemplate => new StringReader(stringTemplate),
fileTemplate => fileTemplate.reader(Codec.UTF8)
)
val writer = new StringWriter()
val mf = new DefaultMustacheFactory()
val mustache = mf.compile(reader, "response")
mustache.execute(writer, dataJavaConversion(template.data))
writer.close()
HttpResponseBody(ContentTypes.`text/html(UTF-8)`, ByteString(writer.getBuffer.toString))
}
private def dataJavaConversion[A](data: A): Any = {
data match {
case data: Seq[_] => data.map(dataJavaConversion).asJava
case data: Map[_, _] => data.mapValues(dataJavaConversion).asJava
case d => d
}
}
}
}
implicit def byteArrayAsBinaryToHttpResponseBodyWriter(bytes: Array[Byte]): HttpResponseBodyWriter[Binary] = {
new HttpResponseBodyWriter[Binary] {
override def apply(): HttpResponseBody = {
HttpResponseBody(ContentTypes.`application/octet-stream`, ByteString(bytes))
}
}
}
} | reneweb/hulk | framework/src/main/scala/hulk/http/response/HttpResponseBodyWriter.scala | Scala | apache-2.0 | 2,944 |
package com.seanshubin.iteration.tracker.server
import com.seanshubin.http.values.domain.{Receiver, RequestValue, Route}
class DatabaseRoute(name: String, receiver: Receiver) extends Route(name, receiver) {
override def accept(request: RequestValue): Boolean = {
val shouldAccept = request.uri.path.startsWith("/database/")
shouldAccept
}
}
| SeanShubin/iteration-tracker | server/src/main/scala/com/seanshubin/iteration/tracker/server/DatabaseRoute.scala | Scala | unlicense | 355 |
package cassandra.cql
import org.joda.time.DateTime
trait CqlValue {
def values: String
}
case class CqlType(name: String, fields: (String, CqlValue)*) extends CqlValue {
override lazy val values = s"{${
fields
.map { case (k, v) => s"$k:${v.values}"}
.mkString(",")
}}"
}
case class CqlTuple(fields:CqlValue*) extends CqlValue{
override lazy val values = s"(${fields.map(_.values).mkString(",")})"
}
case class CqlTable(ofType: CqlType) extends CqlValue {
override lazy val values = s"(${
ofType.fields.map(_._2.values).mkString(",")
})"
}
case class CqlText(value: String) extends CqlValue {
override lazy val values = s"'$value'"
}
case class CqlDateTime(value:DateTime) extends CqlValue{
override lazy val values = s"'${value.toString}'"
}
case class CqlNumber[T: Numeric](value: T) extends CqlValue {
override lazy val values = s"$value"
}
abstract class CqlBoolean(value: Boolean) extends CqlValue {
override lazy val values = s"$value"
}
case object CqlTrue extends CqlBoolean(true)
case object CqlFalse extends CqlBoolean(false)
case object CqlNull extends CqlValue {
override lazy val values = "null"
}
case class CqlSet(xs: Set[CqlValue]) extends CqlValue {
override lazy val values = s"{${xs.map(_.values).mkString(",")}}"
}
case class CqlList(xs: Vector[CqlValue]) extends CqlValue {
override lazy val values = s"[${xs.map(_.values).mkString(",")}]"
}
object CqlList {
def apply(xs: CqlValue*) = new CqlList(xs.toVector)
}
object CqlSet {
def apply(xs: CqlValue*) = new CqlSet(xs.toSet)
} | fabianmurariu/cassandra-scala-nuveau-driver | cql/lib/src/main/scala/cassandra/cql/CqlValue.scala | Scala | apache-2.0 | 1,566 |
package cromwell.core.logging
import java.util.regex.Pattern
object LoggingTest {
def escapePattern(pattern: String) = Pattern.quote(pattern)
}
| ohsu-comp-bio/cromwell | core/src/test/scala/cromwell/core/logging/LoggingTest.scala | Scala | bsd-3-clause | 148 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// scalastyle:off println
package org.apache.spark.examples.mllib
import org.apache.spark.{SparkConf, SparkContext}
// $example on$
import org.apache.spark.mllib.regression.{IsotonicRegression, IsotonicRegressionModel}
import org.apache.spark.mllib.util.MLUtils
// $example off$
object IsotonicRegressionExample {
def main(args: Array[String]): Unit = {
val conf = new SparkConf().setAppName("IsotonicRegressionExample")
val sc = new SparkContext(conf)
// $example on$
val data = MLUtils.loadLibSVMFile(sc,
"data/mllib/sample_isotonic_regression_libsvm_data.txt").cache()
// Create label, feature, weight tuples from input data with weight set to default value 1.0.
val parsedData = data.map { labeledPoint =>
(labeledPoint.label, labeledPoint.features(0), 1.0)
}
// Split data into training (60%) and test (40%) sets.
val splits = parsedData.randomSplit(Array(0.6, 0.4), seed = 11L)
val training = splits(0)
val test = splits(1)
// Create isotonic regression model from training data.
// Isotonic parameter defaults to true so it is only shown for demonstration
val model = new IsotonicRegression().setIsotonic(true).run(training)
// Create tuples of predicted and real labels.
val predictionAndLabel = test.map { point =>
val predictedLabel = model.predict(point._2)
(predictedLabel, point._1)
}
// Calculate mean squared error between predicted and real labels.
val meanSquaredError = predictionAndLabel.map { case (p, l) => math.pow((p - l), 2) }.mean()
println("Mean Squared Error = " + meanSquaredError)
// Save and load model
model.save(sc, "target/tmp/myIsotonicRegressionModel")
val sameModel = IsotonicRegressionModel.load(sc, "target/tmp/myIsotonicRegressionModel")
// $example off$
sc.stop()
}
}
// scalastyle:on println
| aokolnychyi/spark | examples/src/main/scala/org/apache/spark/examples/mllib/IsotonicRegressionExample.scala | Scala | apache-2.0 | 2,674 |
/*
* Copyright (c) 2013 Christos KK Loverdos
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ckkloverdos.pipes.collection.mutable
import scala.collection.mutable.Seq
import scala.collection.mutable.Map
/**
*
* @author Christos KK Loverdos <[email protected]>
*/
object PSeq {
@inline final def filter[A](p: (A) ⇒ Boolean): Seq[A] ⇒ Seq[A] = _.filter(p)
@inline final def find[A](p: (A) ⇒ Boolean): Seq[A] ⇒ Option[A] = _.find(p)
@inline final def filterDefined[A]: Seq[Option[A]] ⇒ Seq[A] = _.withFilter(_.isDefined).map(_.get)
@inline final def map[A, B](f: (A) ⇒ B): Seq[A] ⇒ Seq[B] = _.map(f)
@inline final def map_1[A]: Seq[(A, _)] ⇒ Seq[A] = _.map(_._1)
@inline final def map_2[A]: Seq[(_, A)] ⇒ Seq[A] = _.map(_._2)
@inline final def foreach[A](f: (A) ⇒ Unit): Seq[A] ⇒ Unit = _.foreach(f)
@inline final def length[A]: Seq[A] ⇒ Int = _.length
@inline final def size[A]: Seq[A] ⇒ Int = _.length
@inline final def first[A]: Seq[A] ⇒ Option[A] = _.headOption
@inline final def partition[A](f: (A) ⇒ Boolean): Seq[A] ⇒ (Seq[A], Seq[A]) = _.partition(f)
@inline final def mkString[A](sep: String): Seq[A] ⇒ String = _.mkString(sep)
@inline final def mkString[A](start: String, sep: String, end: String): Seq[A] ⇒ String = _.mkString(start, sep, end)
// This is for debugging
@inline final def passThrough[A](f: (A) ⇒ Any): Seq[A] ⇒ Seq[A] = seq ⇒ {
seq.foreach(f)
seq
}
// ML-ish
@inline final def iter[A](f: (A) ⇒ Unit): Seq[A] ⇒ Unit = _.foreach(f)
@inline final def ofOne[A](x: A): Seq[A] = Seq(x)
@inline final def ofIterable[A]: Iterable[A] ⇒ Seq[A] = _.to[Seq]
@inline final def ofIterator[A]: Iterator[A] ⇒ Seq[A] = _.to[Seq]
@inline final def ofList[A]: List[A] ⇒ Seq[A] = _.to[Seq]
@inline final def ofArray[A]: Array[A] ⇒ Seq[A] = _.to[Seq]
@inline final def ofMap[A, B]: Map[A, B] ⇒ Seq[(A, B)] = _.to[Seq]
@inline final def ofMapSortedValuesBy[A, B, C](sortBy: (B) ⇒ C)(implicit ord: Ordering[C]): Map[A, B] ⇒ Seq[B] =
it ⇒ Seq(it.toSeq.sortBy(kv ⇒ sortBy(kv._2)).map(_._2):_*)
@inline final def ofMapFilteredValuesByKey[A, B](p: (A) ⇒ Boolean): Map[A, B] ⇒ Seq[B] =
it ⇒ Seq((for((k, v) ← it if p(k)) yield v).toSeq:_*)
@inline final def ofMapValues[A, B]: Map[A, B] ⇒ Seq[B] = it ⇒ Seq(it.values.toSeq:_*)
@inline final def ofJava[E]: java.util.Collection[E] ⇒ Seq[E] = it ⇒ {
import scala.collection.JavaConverters._
it.asScala.to[Seq]
}
@inline final def ofOption[A]: Option[A] ⇒ Seq[A] = {
case Some(value) ⇒ Seq(value)
case None ⇒ Seq()
}
}
| loverdos/scalapipes | src/main/scala/com/ckkloverdos/pipes/collection/mutable/PSeq.scala | Scala | apache-2.0 | 3,200 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.parser
import java.sql.{Date, Timestamp}
import org.apache.spark.sql.catalyst.FunctionIdentifier
import org.apache.spark.sql.catalyst.analysis.{UnresolvedAttribute, _}
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.plans.PlanTest
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.CalendarInterval
/**
* Test basic expression parsing. If a type of expression is supported it should be tested here.
*
* Please note that some of the expressions test don't have to be sound expressions, only their
* structure needs to be valid. Unsound expressions should be caught by the Analyzer or
* CheckAnalysis classes.
*/
class ExpressionParserSuite extends PlanTest {
import CatalystSqlParser._
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.dsl.plans._
def assertEqual(sqlCommand: String, e: Expression): Unit = {
compareExpressions(parseExpression(sqlCommand), e)
}
def intercept(sqlCommand: String, messages: String*): Unit = {
val e = intercept[ParseException](parseExpression(sqlCommand))
messages.foreach { message =>
assert(e.message.contains(message))
}
}
test("star expressions") {
// Global Star
assertEqual("*", UnresolvedStar(None))
// Targeted Star
assertEqual("a.b.*", UnresolvedStar(Option(Seq("a", "b"))))
}
// NamedExpression (Alias/Multialias)
test("named expressions") {
// No Alias
val r0 = 'a
assertEqual("a", r0)
// Single Alias.
val r1 = 'a as "b"
assertEqual("a as b", r1)
assertEqual("a b", r1)
// Multi-Alias
assertEqual("a as (b, c)", MultiAlias('a, Seq("b", "c")))
assertEqual("a() (b, c)", MultiAlias('a.function(), Seq("b", "c")))
// Numeric literals without a space between the literal qualifier and the alias, should not be
// interpreted as such. An unresolved reference should be returned instead.
// TODO add the JIRA-ticket number.
assertEqual("1SL", Symbol("1SL"))
// Aliased star is allowed.
assertEqual("a.* b", UnresolvedStar(Option(Seq("a"))) as 'b)
}
test("binary logical expressions") {
// And
assertEqual("a and b", 'a && 'b)
// Or
assertEqual("a or b", 'a || 'b)
// Combination And/Or check precedence
assertEqual("a and b or c and d", ('a && 'b) || ('c && 'd))
assertEqual("a or b or c and d", 'a || 'b || ('c && 'd))
// Multiple AND/OR get converted into a balanced tree
assertEqual("a or b or c or d or e or f", (('a || 'b) || 'c) || (('d || 'e) || 'f))
assertEqual("a and b and c and d and e and f", (('a && 'b) && 'c) && (('d && 'e) && 'f))
}
test("long binary logical expressions") {
def testVeryBinaryExpression(op: String, clazz: Class[_]): Unit = {
val sql = (1 to 1000).map(x => s"$x == $x").mkString(op)
val e = parseExpression(sql)
assert(e.collect { case _: EqualTo => true }.size === 1000)
assert(e.collect { case x if clazz.isInstance(x) => true }.size === 999)
}
testVeryBinaryExpression(" AND ", classOf[And])
testVeryBinaryExpression(" OR ", classOf[Or])
}
test("not expressions") {
assertEqual("not a", !'a)
assertEqual("!a", !'a)
assertEqual("not true > true", Not(GreaterThan(true, true)))
}
test("exists expression") {
assertEqual(
"exists (select 1 from b where b.x = a.x)",
Exists(table("b").where(Symbol("b.x") === Symbol("a.x")).select(1)))
}
test("comparison expressions") {
assertEqual("a = b", 'a === 'b)
assertEqual("a == b", 'a === 'b)
assertEqual("a <=> b", 'a <=> 'b)
assertEqual("a <> b", 'a =!= 'b)
assertEqual("a != b", 'a =!= 'b)
assertEqual("a < b", 'a < 'b)
assertEqual("a <= b", 'a <= 'b)
assertEqual("a !> b", 'a <= 'b)
assertEqual("a > b", 'a > 'b)
assertEqual("a >= b", 'a >= 'b)
assertEqual("a !< b", 'a >= 'b)
}
test("between expressions") {
assertEqual("a between b and c", 'a >= 'b && 'a <= 'c)
assertEqual("a not between b and c", !('a >= 'b && 'a <= 'c))
}
test("in expressions") {
assertEqual("a in (b, c, d)", 'a in ('b, 'c, 'd))
assertEqual("a not in (b, c, d)", !('a in ('b, 'c, 'd)))
}
test("in sub-query") {
assertEqual(
"a in (select b from c)",
In('a, Seq(ListQuery(table("c").select('b)))))
}
test("like expressions") {
assertEqual("a like 'pattern%'", 'a like "pattern%")
assertEqual("a not like 'pattern%'", !('a like "pattern%"))
assertEqual("a rlike 'pattern%'", 'a rlike "pattern%")
assertEqual("a not rlike 'pattern%'", !('a rlike "pattern%"))
assertEqual("a regexp 'pattern%'", 'a rlike "pattern%")
assertEqual("a not regexp 'pattern%'", !('a rlike "pattern%"))
}
test("is null expressions") {
assertEqual("a is null", 'a.isNull)
assertEqual("a is not null", 'a.isNotNull)
assertEqual("a = b is null", ('a === 'b).isNull)
assertEqual("a = b is not null", ('a === 'b).isNotNull)
}
test("binary arithmetic expressions") {
// Simple operations
assertEqual("a * b", 'a * 'b)
assertEqual("a / b", 'a / 'b)
assertEqual("a DIV b", ('a / 'b).cast(LongType))
assertEqual("a % b", 'a % 'b)
assertEqual("a + b", 'a + 'b)
assertEqual("a - b", 'a - 'b)
assertEqual("a & b", 'a & 'b)
assertEqual("a ^ b", 'a ^ 'b)
assertEqual("a | b", 'a | 'b)
// Check precedences
assertEqual(
"a * t | b ^ c & d - e + f % g DIV h / i * k",
'a * 't | ('b ^ ('c & ('d - 'e + (('f % 'g / 'h).cast(LongType) / 'i * 'k)))))
}
test("unary arithmetic expressions") {
assertEqual("+a", 'a)
assertEqual("-a", -'a)
assertEqual("~a", ~'a)
assertEqual("-+~~a", -(~(~'a)))
}
test("cast expressions") {
// Note that DataType parsing is tested elsewhere.
assertEqual("cast(a as int)", 'a.cast(IntegerType))
assertEqual("cast(a as timestamp)", 'a.cast(TimestampType))
assertEqual("cast(a as array<int>)", 'a.cast(ArrayType(IntegerType)))
assertEqual("cast(cast(a as int) as long)", 'a.cast(IntegerType).cast(LongType))
}
test("function expressions") {
assertEqual("foo()", 'foo.function())
assertEqual("foo.bar()",
UnresolvedFunction(FunctionIdentifier("bar", Some("foo")), Seq.empty, isDistinct = false))
assertEqual("foo(*)", 'foo.function(star()))
assertEqual("count(*)", 'count.function(1))
assertEqual("foo(a, b)", 'foo.function('a, 'b))
assertEqual("foo(all a, b)", 'foo.function('a, 'b))
assertEqual("foo(distinct a, b)", 'foo.distinctFunction('a, 'b))
assertEqual("grouping(distinct a, b)", 'grouping.distinctFunction('a, 'b))
assertEqual("`select`(all a, b)", 'select.function('a, 'b))
assertEqual("foo(a as x, b as e)", 'foo.function('a as 'x, 'b as 'e))
}
test("window function expressions") {
val func = 'foo.function(star())
def windowed(
partitioning: Seq[Expression] = Seq.empty,
ordering: Seq[SortOrder] = Seq.empty,
frame: WindowFrame = UnspecifiedFrame): Expression = {
WindowExpression(func, WindowSpecDefinition(partitioning, ordering, frame))
}
// Basic window testing.
assertEqual("foo(*) over w1", UnresolvedWindowExpression(func, WindowSpecReference("w1")))
assertEqual("foo(*) over ()", windowed())
assertEqual("foo(*) over (partition by a, b)", windowed(Seq('a, 'b)))
assertEqual("foo(*) over (distribute by a, b)", windowed(Seq('a, 'b)))
assertEqual("foo(*) over (cluster by a, b)", windowed(Seq('a, 'b)))
assertEqual("foo(*) over (order by a desc, b asc)", windowed(Seq.empty, Seq('a.desc, 'b.asc )))
assertEqual("foo(*) over (sort by a desc, b asc)", windowed(Seq.empty, Seq('a.desc, 'b.asc )))
assertEqual("foo(*) over (partition by a, b order by c)", windowed(Seq('a, 'b), Seq('c.asc)))
assertEqual("foo(*) over (distribute by a, b sort by c)", windowed(Seq('a, 'b), Seq('c.asc)))
// Test use of expressions in window functions.
assertEqual(
"sum(product + 1) over (partition by ((product) + (1)) order by 2)",
WindowExpression('sum.function('product + 1),
WindowSpecDefinition(Seq('product + 1), Seq(Literal(2).asc), UnspecifiedFrame)))
assertEqual(
"sum(product + 1) over (partition by ((product / 2) + 1) order by 2)",
WindowExpression('sum.function('product + 1),
WindowSpecDefinition(Seq('product / 2 + 1), Seq(Literal(2).asc), UnspecifiedFrame)))
// Range/Row
val frameTypes = Seq(("rows", RowFrame), ("range", RangeFrame))
val boundaries = Seq(
("10 preceding", ValuePreceding(10), CurrentRow),
("3 + 1 following", ValueFollowing(4), CurrentRow), // Will fail during analysis
("unbounded preceding", UnboundedPreceding, CurrentRow),
("unbounded following", UnboundedFollowing, CurrentRow), // Will fail during analysis
("between unbounded preceding and current row", UnboundedPreceding, CurrentRow),
("between unbounded preceding and unbounded following",
UnboundedPreceding, UnboundedFollowing),
("between 10 preceding and current row", ValuePreceding(10), CurrentRow),
("between current row and 5 following", CurrentRow, ValueFollowing(5)),
("between 10 preceding and 5 following", ValuePreceding(10), ValueFollowing(5))
)
frameTypes.foreach {
case (frameTypeSql, frameType) =>
boundaries.foreach {
case (boundarySql, begin, end) =>
val query = s"foo(*) over (partition by a order by b $frameTypeSql $boundarySql)"
val expr = windowed(Seq('a), Seq('b.asc), SpecifiedWindowFrame(frameType, begin, end))
assertEqual(query, expr)
}
}
// We cannot use non integer constants.
intercept("foo(*) over (partition by a order by b rows 10.0 preceding)",
"Frame bound value must be a constant integer.")
// We cannot use an arbitrary expression.
intercept("foo(*) over (partition by a order by b rows exp(b) preceding)",
"Frame bound value must be a constant integer.")
}
test("row constructor") {
// Note that '(a)' will be interpreted as a nested expression.
assertEqual("(a, b)", CreateStruct(Seq('a, 'b)))
assertEqual("(a, b, c)", CreateStruct(Seq('a, 'b, 'c)))
assertEqual("(a as b, b as c)", CreateStruct(Seq('a as 'b, 'b as 'c)))
}
test("scalar sub-query") {
assertEqual(
"(select max(val) from tbl) > current",
ScalarSubquery(table("tbl").select('max.function('val))) > 'current)
assertEqual(
"a = (select b from s)",
'a === ScalarSubquery(table("s").select('b)))
}
test("case when") {
assertEqual("case a when 1 then b when 2 then c else d end",
CaseKeyWhen('a, Seq(1, 'b, 2, 'c, 'd)))
assertEqual("case (a or b) when true then c when false then d else e end",
CaseKeyWhen('a || 'b, Seq(true, 'c, false, 'd, 'e)))
assertEqual("case 'a'='a' when true then 1 end",
CaseKeyWhen("a" === "a", Seq(true, 1)))
assertEqual("case when a = 1 then b when a = 2 then c else d end",
CaseWhen(Seq(('a === 1, 'b.expr), ('a === 2, 'c.expr)), 'd))
assertEqual("case when (1) + case when a > b then c else d end then f else g end",
CaseWhen(Seq((Literal(1) + CaseWhen(Seq(('a > 'b, 'c.expr)), 'd.expr), 'f.expr)), 'g))
}
test("dereference") {
assertEqual("a.b", UnresolvedAttribute("a.b"))
assertEqual("`select`.b", UnresolvedAttribute("select.b"))
assertEqual("(a + b).b", ('a + 'b).getField("b")) // This will fail analysis.
assertEqual("struct(a, b).b", 'struct.function('a, 'b).getField("b"))
}
test("reference") {
// Regular
assertEqual("a", 'a)
// Starting with a digit.
assertEqual("1a", Symbol("1a"))
// Quoted using a keyword.
assertEqual("`select`", 'select)
// Unquoted using an unreserved keyword.
assertEqual("columns", 'columns)
}
test("subscript") {
assertEqual("a[b]", 'a.getItem('b))
assertEqual("a[1 + 1]", 'a.getItem(Literal(1) + 1))
assertEqual("`c`.a[b]", UnresolvedAttribute("c.a").getItem('b))
}
test("parenthesis") {
assertEqual("(a)", 'a)
assertEqual("r * (a + b)", 'r * ('a + 'b))
}
test("type constructors") {
// Dates.
assertEqual("dAte '2016-03-11'", Literal(Date.valueOf("2016-03-11")))
intercept("DAtE 'mar 11 2016'")
// Timestamps.
assertEqual("tImEstAmp '2016-03-11 20:54:00.000'",
Literal(Timestamp.valueOf("2016-03-11 20:54:00.000")))
intercept("timestamP '2016-33-11 20:54:00.000'")
// Binary.
assertEqual("X'A'", Literal(Array(0x0a).map(_.toByte)))
assertEqual("x'A10C'", Literal(Array(0xa1, 0x0c).map(_.toByte)))
intercept("x'A1OC'")
// Unsupported datatype.
intercept("GEO '(10,-6)'", "Literals of type 'GEO' are currently not supported.")
}
test("literals") {
def testDecimal(value: String): Unit = {
assertEqual(value, Literal(BigDecimal(value).underlying))
}
// NULL
assertEqual("null", Literal(null))
// Boolean
assertEqual("trUe", Literal(true))
assertEqual("False", Literal(false))
// Integral should have the narrowest possible type
assertEqual("787324", Literal(787324))
assertEqual("7873247234798249234", Literal(7873247234798249234L))
testDecimal("78732472347982492793712334")
// Decimal
testDecimal("7873247234798249279371.2334")
// Scientific Decimal
testDecimal("9.0e1")
testDecimal(".9e+2")
testDecimal("0.9e+2")
testDecimal("900e-1")
testDecimal("900.0E-1")
testDecimal("9.e+1")
intercept(".e3")
// Tiny Int Literal
assertEqual("10Y", Literal(10.toByte))
intercept("-1000Y", s"does not fit in range [${Byte.MinValue}, ${Byte.MaxValue}]")
// Small Int Literal
assertEqual("10S", Literal(10.toShort))
intercept("40000S", s"does not fit in range [${Short.MinValue}, ${Short.MaxValue}]")
// Long Int Literal
assertEqual("10L", Literal(10L))
intercept("78732472347982492793712334L",
s"does not fit in range [${Long.MinValue}, ${Long.MaxValue}]")
// Double Literal
assertEqual("10.0D", Literal(10.0D))
intercept("-1.8E308D", s"does not fit in range")
intercept("1.8E308D", s"does not fit in range")
// BigDecimal Literal
assertEqual("90912830918230182310293801923652346786BD",
Literal(BigDecimal("90912830918230182310293801923652346786").underlying()))
assertEqual("123.0E-28BD", Literal(BigDecimal("123.0E-28").underlying()))
assertEqual("123.08BD", Literal(BigDecimal("123.08").underlying()))
intercept("1.20E-38BD", "DecimalType can only support precision up to 38")
}
test("strings") {
// Single Strings.
assertEqual("\\"hello\\"", "hello")
assertEqual("'hello'", "hello")
// Multi-Strings.
assertEqual("\\"hello\\" 'world'", "helloworld")
assertEqual("'hello' \\" \\" 'world'", "hello world")
// 'LIKE' string literals. Notice that an escaped '%' is the same as an escaped '\\' and a
// regular '%'; to get the correct result you need to add another escaped '\\'.
// TODO figure out if we shouldn't change the ParseUtils.unescapeSQLString method?
assertEqual("'pattern%'", "pattern%")
assertEqual("'no-pattern\\\\%'", "no-pattern\\\\%")
assertEqual("'pattern\\\\\\\\%'", "pattern\\\\%")
assertEqual("'pattern\\\\\\\\\\\\%'", "pattern\\\\\\\\%")
// Escaped characters.
// See: http://dev.mysql.com/doc/refman/5.7/en/string-literals.html
assertEqual("'\\\\0'", "\\u0000") // ASCII NUL (X'00')
assertEqual("'\\\\''", "\\'") // Single quote
assertEqual("'\\\\\\"'", "\\"") // Double quote
assertEqual("'\\\\b'", "\\b") // Backspace
assertEqual("'\\\\n'", "\\n") // Newline
assertEqual("'\\\\r'", "\\r") // Carriage return
assertEqual("'\\\\t'", "\\t") // Tab character
assertEqual("'\\\\Z'", "\\u001A") // ASCII 26 - CTRL + Z (EOF on windows)
// Octals
assertEqual("'\\\\110\\\\145\\\\154\\\\154\\\\157\\\\041'", "Hello!")
// Unicode
assertEqual("'\\\\u0057\\\\u006F\\\\u0072\\\\u006C\\\\u0064\\\\u0020\\\\u003A\\\\u0029'", "World :)")
}
test("intervals") {
def intervalLiteral(u: String, s: String): Literal = {
Literal(CalendarInterval.fromSingleUnitString(u, s))
}
// Empty interval statement
intercept("interval", "at least one time unit should be given for interval literal")
// Single Intervals.
val units = Seq(
"year",
"month",
"week",
"day",
"hour",
"minute",
"second",
"millisecond",
"microsecond")
val forms = Seq("", "s")
val values = Seq("0", "10", "-7", "21")
units.foreach { unit =>
forms.foreach { form =>
values.foreach { value =>
val expected = intervalLiteral(unit, value)
assertEqual(s"interval $value $unit$form", expected)
assertEqual(s"interval '$value' $unit$form", expected)
}
}
}
// Hive nanosecond notation.
assertEqual("interval 13.123456789 seconds", intervalLiteral("second", "13.123456789"))
assertEqual("interval -13.123456789 second", intervalLiteral("second", "-13.123456789"))
// Non Existing unit
intercept("interval 10 nanoseconds", "No interval can be constructed")
// Year-Month intervals.
val yearMonthValues = Seq("123-10", "496-0", "-2-3", "-123-0")
yearMonthValues.foreach { value =>
val result = Literal(CalendarInterval.fromYearMonthString(value))
assertEqual(s"interval '$value' year to month", result)
}
// Day-Time intervals.
val datTimeValues = Seq(
"99 11:22:33.123456789",
"-99 11:22:33.123456789",
"10 9:8:7.123456789",
"1 0:0:0",
"-1 0:0:0",
"1 0:0:1")
datTimeValues.foreach { value =>
val result = Literal(CalendarInterval.fromDayTimeString(value))
assertEqual(s"interval '$value' day to second", result)
}
// Unknown FROM TO intervals
intercept("interval 10 month to second", "Intervals FROM month TO second are not supported.")
// Composed intervals.
assertEqual(
"interval 3 months 22 seconds 1 millisecond",
Literal(new CalendarInterval(3, 22001000L)))
assertEqual(
"interval 3 years '-1-10' year to month 3 weeks '1 0:0:2' day to second",
Literal(new CalendarInterval(14,
22 * CalendarInterval.MICROS_PER_DAY + 2 * CalendarInterval.MICROS_PER_SECOND)))
}
test("composed expressions") {
assertEqual("1 + r.r As q", (Literal(1) + UnresolvedAttribute("r.r")).as("q"))
assertEqual("1 - f('o', o(bar))", Literal(1) - 'f.function("o", 'o.function('bar)))
intercept("1 - f('o', o(bar)) hello * world", "mismatched input '*'")
}
test("current date/timestamp braceless expressions") {
assertEqual("current_date", CurrentDate())
assertEqual("current_timestamp", CurrentTimestamp())
}
test("SPARK-17364, fully qualified column name which starts with number") {
assertEqual("123_", UnresolvedAttribute("123_"))
assertEqual("1a.123_", UnresolvedAttribute("1a.123_"))
// ".123" should not be treated as token of type DECIMAL_VALUE
assertEqual("a.123A", UnresolvedAttribute("a.123A"))
// ".123E3" should not be treated as token of type SCIENTIFIC_DECIMAL_VALUE
assertEqual("a.123E3_column", UnresolvedAttribute("a.123E3_column"))
// ".123D" should not be treated as token of type DOUBLE_LITERAL
assertEqual("a.123D_column", UnresolvedAttribute("a.123D_column"))
// ".123BD" should not be treated as token of type BIGDECIMAL_LITERAL
assertEqual("a.123BD_column", UnresolvedAttribute("a.123BD_column"))
}
test("SPARK-17832 function identifier contains backtick") {
val complexName = FunctionIdentifier("`ba`r", Some("`fo`o"))
assertEqual(complexName.quotedString, UnresolvedAttribute("`fo`o.`ba`r"))
intercept(complexName.unquotedString, "mismatched input")
// Function identifier contains countious backticks should be treated correctly.
val complexName2 = FunctionIdentifier("ba``r", Some("fo``o"))
assertEqual(complexName2.quotedString, UnresolvedAttribute("fo``o.ba``r"))
}
}
| sachintyagi22/spark | sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala | Scala | apache-2.0 | 20,964 |
// Copyright (C) 2015 ENSIME Authors
// License: GPL 3.0
package org.ensime.util
import Predef.{ any2stringadd => _ }
import java.io.ByteArrayInputStream
import java.io.ByteArrayOutputStream
import java.io.IOException
import org.scalatest._
class IoSpec extends FlatSpec with Matchers {
import io._
val bytes = Array[Byte](0, 1, -2, 127, -128)
"io._" should "convert to a byte array" in {
val in = new ByteArrayInputStream(bytes)
in.toByteArray() shouldEqual bytes
}
it should "drain an output stream to an input stream" in {
val in = new ByteArrayInputStream(bytes)
val out = new ByteArrayOutputStream()
out.drain(in)
out.toByteArray() shouldEqual bytes
// no way to confirm that the streams are closed, thanks for that J2SE
}
}
| eddsteel/ensime | util/src/test/scala/org/ensime/util/IoSpec.scala | Scala | gpl-3.0 | 779 |
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** @author Matthew Saltz, John Miller
* @version 1.3
* @date Fri Jul 10 12:39:33 EDT 2015
* @see LICENSE (MIT style license file).
*/
package scalation.graphalytics
package mutable
import java.io.PrintWriter
import scala.collection.mutable.Map
import scala.collection.mutable.{Set => SET}
import scala.io.Source.fromFile
import scala.reflect.ClassTag
import scalation.util.Error
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `MGraphIO` class is used to write multi-digraphs to a file.
* @param g the multi-digraph to write
*/
class MGraphIO [TLabel: ClassTag] (g: MGraph [TLabel])
extends Error
{
private val DEBUG = true // debug flag
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Write multi-digraph 'g' to a file in the following format:
* <p>
* MGraph (<name>, <inverse>, <nVertices>
* <vertexId> <label> <chVertex0> <chVertex1> ...
* ...
* )
* <p>
* @param name the file-name containing the graph's vertex, edge and label information
* @param base the base sub-directory for storing graphs
* @param ext the standard file extension for graph
*/
def write (name: String = g.name, base: String = BASE_DIR, ext: String = EXT)
{
val gFile = base + name + ext // relative path-name for file
val pw = new PrintWriter (gFile)
if (DEBUG) println (s"write: gFile = $gFile")
pw.println (s"MGraph (${g.name}, ${g.inverse}, ${g.size}")
for (i <- g.ch.indices) pw.println (g.toLine (i))
for ((k, v) <- g.elabel) pw.println (s"$k -> $v")
pw.println (")")
pw.close ()
} // write
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Write the graph to TWO 'igraph' compatible files.
* @see igraph.sourceforge.net
*/
def write2IgraphFiles (prefix: String): (String, String) =
{
val lFile = prefix + "igl.txt"
val eFile = prefix + "ige.txt"
val lOut = new PrintWriter (lFile)
g.label.foreach (lOut.println (_))
lOut.close
val eOut = new PrintWriter (eFile)
for (i <- g.ch.indices) g.ch(i).foreach (x => eOut.println (i + " " + x))
eOut.close
(lFile, eFile)
} // write2IgraphFiles
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Write the graph to TWO 'Neo4J' compatible files: 'lFile' and 'eFile' so that
* they may be fed into 'Neo4j' with one of its utilities.
* FIX: need to handle multiple edge types.
* @param lFile the file containing the graph labels (line: vertex-id TAB label)
* @param eFile the file the edges (line: start-id TAB end-id TAB type)
*/
def write2Neo4JFiles (lFile: String, eFile: String)
{
val vertexLine = new PrintWriter (lFile) // write the vertex ids and their labels
vertexLine.println ("id\tlabel")
g.label.foldLeft (1) { (i, l) => vertexLine.println (i + "\t" + l); i + 1 }
vertexLine.close
val edgeLine = new PrintWriter (eFile) // write the edges and their types.
edgeLine.println ("start\tend\ttype")
g.ch.foldLeft (1) { (i, v) =>
v.foreach { c => edgeLine.println (i + "\t" + (c+1) + "\tEDGE") }
i + 1
} // foldLeft
edgeLine.close
} // write2Neo4JFiles
} // MGraphIO class
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `MGraphIO` object is the companion object to the `MGraphIO` class and
* is used for reading graphs from files or graph databases.
*/
object MGraphIO
extends Error
{
/** The standard file extension for digraphs
*/
val EXT = ".mdg"
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Make a set of child vertices '(v_0, v_1, ...)' from a string array.
* @param strArr the string array
*/
def makeSet (strArr: Array [String]): SET [Int] =
{
if (strArr(0) == "") SET [Int] () else SET (strArr.map (_.toInt): _*)
} // makeSet
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Make an edge tuple '(u, v)' from a string array.
* @param strArr the string array
*/
def makeTuple (strArr: Array [String]): Tuple2 [Int, Int] =
{
(strArr(0).replace ("(", "").toInt, strArr(1).replace (")", "").toInt)
} // makeTuple
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Convert a string into a label according to the type `TLabel`.
* @param s the string to convert
*/
def toLabel [TLabel] (s: String): TLabel =
{
val t: TLabel = null.asInstanceOf [TLabel]
t match {
case _: Int => s.toInt.asInstanceOf [TLabel]
case _: Double => s.toDouble.asInstanceOf [TLabel]
case _: String => s.asInstanceOf [TLabel]
// case _: VectorD => VectorD (s.split (","))
case _ => { flaw ("toLabel", "label type not supported"); null.asInstanceOf [TLabel] }
} // match
} // toLabel
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Read a multi-digraph from a file based on the format used by 'print' and 'write':
* <p>
* MGraph (<name>, <inverse>, <nVertices>
* <vertexId>, <label>, <chVertex0>, <chVertex1> ...
* ...
* )
* <p>
* @param name the file-name containing the graph's vertex, edge and label information
* @param base the base sub-directory for storing graphs
* @param ext the standard file extension for graph
* @param sep the character separating the values (e.g., ',', ' ', '\t')
*/
def apply [TLabel: ClassTag] (name: String, base: String = BASE_DIR, ext: String = EXT, sep: Char = ','): MGraph [TLabel] =
{
val gFile = base + name + ext // relative path-name for file
val l = fromFile (gFile).getLines.toArray // get the lines from gFile
var l0 = l(0).split ('(')(1).split (sep).map (_.trim) // array for line 0
val n = l0(2).toInt // number of vertices
val ch = Array.ofDim [SET [Int]] (n) // adjacency: array of children (ch)
val label = Array.ofDim [TLabel] (n) // array of vertex labels
val elabel = Map [Tuple2 [Int, Int], TLabel] () // map of edge labels
println (s"apply: read $n vertices from $gFile")
for (i <- ch.indices) {
val li = l(i+1).split (sep).map (_.trim) // line i (>0) splits into i, label, ch
label(i) = toLabel (li(1)) // make vertex label
ch(i) = makeSet (li.slice (2, li.length) ) // make ch set
} // for
for (i <- n+1 until l.length-1) {
val li = l(i).split ("->").map (_.trim) // line i (>n) splits into (u, v) -> elabel
elabel += makeTuple (li(0).split (sep)) -> toLabel (li(1))
} // for
new MGraph [TLabel] (ch, label, elabel, l0(1) == "true", l0(0))
} // apply
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Read a graph from TWO files:
* 'lFile' is a file with one label per line, where each line represents
* the vertex with id <lineNumber>.
* 'eFile' is a file with each line representing the vertex with id
* <lineNumber>, and each line contains a space-separated list of vertices
* to which the current vertex is adjacent.
* @param lFile the file containing the graph labels
* @param eFile the file the edges (to create adjacency sets)
* @param inverse whether to store inverse adjacency sets (parents)
*/
def read2Files [TLabel: ClassTag] (lFile: String, eFile: String, inverse: Boolean = false): MGraph [TLabel] =
{
val lLines = fromFile (lFile).getLines // get the lines from lFile
val label = lLines.map (x => toLabel (x.trim)).toArray // make the label array
val eLines = fromFile (eFile).getLines // get the lines from eFile
val ch = eLines.map ( line => // make the adj array
if (line.trim != "") line.split (" ").map (x => x.trim.toInt).toSet.asInstanceOf [SET [Int]]
else SET [Int] ()
).toArray
new MGraph [TLabel] (ch, label, null) // FIX: elabels?
} // read2Files
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Read a graph from TWO specially formatted Pajek files.
* @param lFile the file containing the graph labels
* @param eFile the file the edges (to create adjacency sets)
* @param inverse whether to store inverse adjacency sets (parents)
*/
def read2PajekFile [TLabel: ClassTag] (lFile: String, eFile: String, inverse: Boolean = false): MGraph [TLabel] =
{
val lLines = fromFile (lFile).getLines // get the lines from lFile
val label = lLines.map (x => toLabel (x.trim)).toArray
val ch = Array.ofDim [SET [Int]] (label.size)
for (i <- ch.indices) ch(i) = SET [Int] ()
val eLines = fromFile (eFile).getLines // get the lines from eFile
for (line <- eLines) {
val splitL = line.split (" ").map (_.trim)
val adjs = splitL.slice (1, splitL.length).map(_.trim.toInt).toSet
ch(splitL(0).toInt-1) ++= adjs
} // for
new MGraph [TLabel] (ch, label, null) // FIX: elabels?
} // read2PajekFile
} // MGraphIO class
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `MGraphIOTest` object is used to test the `MGraphIO` class and object.
* > run-main scalation.graphalytics.mutable.MGraphIOTest
*/
object MGraphIOTest extends App
{
val mgGen = new MGraphGen [Int]
val name = "ran_graph" // the name of the graph
val size = 50 // size of the graph
val nLabels = 10 // number of distinct vertex labels
val eLabels = 5 // number of distinct edge labels
val avDegree = 3 // average vertex out degree for the graph
val inverse = false
// Create a random graph and print it out
val ran_graph = mgGen.genRandomGraph (size, nLabels, eLabels, avDegree, inverse, "ran_graph")
println (s"ran_graph = $ran_graph")
ran_graph.printG (false)
ran_graph.printG ()
// Write the graph to a file
val mgIO = new MGraphIO (ran_graph)
println ("start writing graph to " + name)
mgIO.write ()
println ("end writing graph to " + name)
// Read the file to create a new identical graph
val g = MGraphIO (name)
println (s"g = $g")
g.printG ()
} // MGraphIOTest object
| NBKlepp/fda | scalation_1.3/scalation_modeling/src/main/scala/scalation/graphalytics/mutable/MGraphIO.scala | Scala | mit | 11,333 |
/*
* Copyright (C) 2020 MapRoulette contributors (see CONTRIBUTORS.md).
* Licensed under the Apache License, Version 2.0 (see LICENSE).
*/
package org.maproulette.framework.service
import javax.inject.{Inject, Singleton}
import org.maproulette.data.{UserType}
import org.maproulette.exception.{NotFoundException, InvalidException}
import org.maproulette.framework.model.{User, Follower, Group, MemberObject}
import org.maproulette.permissions.Permission
import org.maproulette.provider.websockets.{WebSocketMessages, WebSocketProvider}
/**
* Service for handling following and followers for users
*
* @author nrotstan
*/
@Singleton
class FollowService @Inject() (
serviceManager: ServiceManager,
webSocketProvider: WebSocketProvider,
permission: Permission
) {
/**
* Follow a user, updating the follower's "following" group and the
* followed user's "followers" group
*
* @param follower The user who is to be the follower
* @param followed The user to be followed
* @param user The user making the request
*/
def follow(follower: User, followed: User, user: User): Unit = {
this.permission.hasWriteAccess(UserType(), user)(follower.id)
if (!followed.settings.allowFollowing.getOrElse(true)) {
throw new InvalidException("User cannot be followed")
}
this.serviceManager.group.addGroupMember(
this.getFollowingGroup(follower, user),
MemberObject.user(followed.id)
)
this.serviceManager.group.addGroupMember(
this.getFollowersGroup(followed, user),
MemberObject.user(follower.id)
)
webSocketProvider.sendMessage(
WebSocketMessages.followUpdate(
WebSocketMessages.FollowUpdateData(Some(follower.id), Some(followed.id))
)
)
this.serviceManager.notification.createFollowedNotification(follower, followed.id)
}
/**
* Stop following a user, updating the follower's "following" group and the
* followed user's "followers" group
*
* @param follower The user who is following
* @param followed The user being followed
* @param user The user making the request
*/
def stopFollowing(follower: User, followed: User, user: User): Unit = {
this.permission.hasWriteAccess(UserType(), user)(followed.id)
this.serviceManager.group.removeGroupMember(
this.getFollowingGroup(follower, user),
MemberObject.user(followed.id)
)
this.serviceManager.group.removeGroupMember(
this.getFollowersGroup(followed, user),
MemberObject.user(follower.id)
)
webSocketProvider.sendMessage(
WebSocketMessages.followUpdate(
WebSocketMessages.FollowUpdateData(Some(follower.id), Some(followed.id))
)
)
}
/**
* Clear all of a user's followers
*
* @param followed The user for which followers are to be cleared
* @param user The user making the request
*/
def clearFollowers(followed: User, user: User): Unit = {
this.permission.hasWriteAccess(UserType(), user)(followed.id)
this.serviceManager.group.clearGroupMembers(this.getFollowersGroup(followed, user))
this.serviceManager.group.removeGroupMemberAcrossGroupType(
MemberObject.user(followed.id),
Group.GROUP_TYPE_FOLLOWING
)
webSocketProvider.sendMessage(
WebSocketMessages.followUpdate(
WebSocketMessages.FollowUpdateData(None, Some(followed.id))
)
)
}
/**
* Block a follower from following a user, updating their status in the "followers"
* group to BLOCKED
*
* @param follower The user to be blocked
* @param followed The user being followed
* @param user The user making the request
*/
def blockFollower(followerId: Long, followed: User, user: User): Unit = {
this.permission.hasWriteAccess(UserType(), user)(followed.id)
this.serviceManager.group.updateGroupMemberStatus(
this.getFollowersGroup(followed, user),
MemberObject.user(followerId),
Follower.STATUS_BLOCKED
)
webSocketProvider.sendMessage(
WebSocketMessages.followUpdate(
WebSocketMessages.FollowUpdateData(Some(followerId), Some(followed.id))
)
)
}
/**
* Unblock a follower, allowing them to follow a user again
*
* @param follower The user to be unblocked
* @param followed The user being followed
* @param user The user making the request
*/
def unblockFollower(followerId: Long, followed: User, user: User): Unit = {
this.permission.hasWriteAccess(UserType(), user)(followed.id)
this.serviceManager.group.updateGroupMemberStatus(
this.getFollowersGroup(followed, user),
MemberObject.user(followerId),
Follower.STATUS_FOLLOWING
)
webSocketProvider.sendMessage(
WebSocketMessages.followUpdate(
WebSocketMessages.FollowUpdateData(Some(followerId), Some(followed.id))
)
)
}
/**
* Retrieve list of Users being followed by a user
*
* @param follower The user following the desired users
* @param user The user making the request
*/
def getUsersFollowedBy(followerId: Long, user: User): List[User] =
this.getGroupUsers(
this.getFollowingGroup(
retrieveUserOrError(followerId),
user
)
)
/**
* Retrieve list of Followers who are following a user
*
* @param followed The user being followed by the desired users
* @param user The user making the request
*/
def getUserFollowers(followedId: Long, user: User): List[Follower] = {
val userMembers = this.serviceManager.group.membersOfType(
this.getFollowersGroup(retrieveUserOrError(followedId), user),
UserType()
)
val users = this.serviceManager.user.retrieveListById(userMembers.map(_.memberId))
userMembers
.map(member => {
users.find(u => u.id == member.memberId) match {
case Some(user) => Some(Follower(member.id, user, member.status))
case None => None
}
})
.flatten
}
/**
* Retrieves the Following group for a User, creating it first if it doesn't
* exist
*
* @param follower The user whose following group is desired
* @param user The user making the request
*/
def getFollowingGroup(follower: User, user: User): Group = {
this.serviceManager.group
.retrieve(
follower.followingGroupId match {
case Some(groupId) => groupId
case None =>
this.serviceManager.user.addFollowingGroup(follower, user).get
}
)
.get
}
/**
* Retrieves the Followers group for a User, creating it first if it doesn't
* exist
*
* @param followed The user whose followers group is desired
* @param user The user making the request
*/
def getFollowersGroup(followed: User, user: User): Group = {
this.serviceManager.group
.retrieve(
followed.followersGroupId match {
case Some(groupId) => groupId
case None =>
this.serviceManager.user.addFollowersGroup(followed, user).get
}
)
.get
}
/**
* Retrieves a User or throws a NotFoundException if the user doesn't exist
*
* @param id The identifier for the object
* @return An optional object, None if not found
*/
private def retrieveUserOrError(userId: Long): User =
this.serviceManager.user.retrieve(userId) match {
case Some(u) => u
case None => throw new NotFoundException(s"No user with id ${userId} found")
}
/**
* Retrieves User representations of user members of a group
*/
private def getGroupUsers(group: Group): List[User] =
this.serviceManager.user.retrieveListById(
this.serviceManager.group.membersOfType(group, UserType()).map(_.memberId)
)
}
| mgcuthbert/maproulette2 | app/org/maproulette/framework/service/FollowService.scala | Scala | apache-2.0 | 7,828 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.classification
import org.apache.spark.SparkFunSuite
import org.apache.spark.mllib.classification.LogisticRegressionSuite._
import org.apache.spark.mllib.classification.LogisticRegressionWithLBFGS
import org.apache.spark.mllib.evaluation.MulticlassMetrics
import org.apache.spark.mllib.linalg.Vectors
import org.apache.spark.mllib.util.MLlibTestSparkContext
import org.apache.spark.mllib.util.TestingUtils._
import org.apache.spark.sql.Row
class MultilayerPerceptronClassifierSuite extends SparkFunSuite with MLlibTestSparkContext {
test("XOR function learning as binary classification problem with two outputs.") {
val dataFrame = sqlContext.createDataFrame(Seq(
(Vectors.dense(0.0, 0.0), 0.0),
(Vectors.dense(0.0, 1.0), 1.0),
(Vectors.dense(1.0, 0.0), 1.0),
(Vectors.dense(1.0, 1.0), 0.0))
).toDF("features", "label")
val layers = Array[Int](2, 5, 2)
val trainer = new MultilayerPerceptronClassifier()
.setLayers(layers)
.setBlockSize(1)
.setSeed(11L)
.setMaxIter(100)
val model = trainer.fit(dataFrame)
val result = model.transform(dataFrame)
val predictionAndLabels = result.select("prediction", "label").collect()
predictionAndLabels.foreach { case Row(p: Double, l: Double) =>
assert(p == l)
}
}
// TODO: implement a more rigorous test
test("3 class classification with 2 hidden layers") {
val nPoints = 1000
// The following weights are taken from OneVsRestSuite.scala
// they represent 3-class iris dataset
val weights = Array(
-0.57997, 0.912083, -0.371077, -0.819866, 2.688191,
-0.16624, -0.84355, -0.048509, -0.301789, 4.170682)
val xMean = Array(5.843, 3.057, 3.758, 1.199)
val xVariance = Array(0.6856, 0.1899, 3.116, 0.581)
val rdd = sc.parallelize(generateMultinomialLogisticInput(
weights, xMean, xVariance, true, nPoints, 42), 2)
val dataFrame = sqlContext.createDataFrame(rdd).toDF("label", "features")
val numClasses = 3
val numIterations = 100
val layers = Array[Int](4, 5, 4, numClasses)
val trainer = new MultilayerPerceptronClassifier()
.setLayers(layers)
.setBlockSize(1)
.setSeed(11L)
.setMaxIter(numIterations)
val model = trainer.fit(dataFrame)
val mlpPredictionAndLabels = model.transform(dataFrame).select("prediction", "label")
.map { case Row(p: Double, l: Double) => (p, l) }
// train multinomial logistic regression
val lr = new LogisticRegressionWithLBFGS()
.setIntercept(true)
.setNumClasses(numClasses)
lr.optimizer.setRegParam(0.0)
.setNumIterations(numIterations)
val lrModel = lr.run(rdd)
val lrPredictionAndLabels = lrModel.predict(rdd.map(_.features)).zip(rdd.map(_.label))
// MLP's predictions should not differ a lot from LR's.
val lrMetrics = new MulticlassMetrics(lrPredictionAndLabels)
val mlpMetrics = new MulticlassMetrics(mlpPredictionAndLabels)
assert(mlpMetrics.confusionMatrix ~== lrMetrics.confusionMatrix absTol 100)
}
}
| practice-vishnoi/dev-spark-1 | mllib/src/test/scala/org/apache/spark/ml/classification/MultilayerPerceptronClassifierSuite.scala | Scala | apache-2.0 | 3,877 |
object ORSet {
def empty[A]: ORSet[A] = ???
}
final class ORSet[A] {
def add(node: Long, element: A): ORSet[A] = ???
def add(node: Int, element: A): ORSet[A] = ???
}
class Test {
ORSet.empty.add(42, "A")
}
| scala/scala | test/files/pos/t11511.scala | Scala | apache-2.0 | 216 |
package org.decaf.distributed.server
import akka.http.scaladsl.Http
import akka.http.scaladsl.server.Route
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.model.StatusCodes
import akka.stream.ActorMaterializer
trait HttpServer {
def routes: Route
def startHttpServer(): Unit = {
implicit val system = ServerActorSystem
implicit val materializer = ActorMaterializer()
implicit val ec = system.dispatcher
def fullRoute = routes ~ complete(StatusCodes.NotFound)
val bindingFuture = Http().bindAndHandle(fullRoute, "0.0.0.0", 8080)
println(s"Server online at http://0.0.0.0:8080")
sys.addShutdownHook {
bindingFuture
.flatMap(_.unbind()) // trigger unbinding from the port
.onComplete(_ ⇒ system.terminate()) // and shutdown when done
}
}
}
| adamdecaf/distributed | server/src/main/scala/HttpServer.scala | Scala | apache-2.0 | 828 |
package org.jetbrains.plugins.scala
package lang
package psi
package stubs
package index
import _root_.org.jetbrains.plugins.scala.lang.psi.impl.search.ScSourceFilterScope
import com.intellij.psi.stubs.StringStubIndexExtension
import com.intellij.openapi.project.Project
import com.intellij.psi.search.GlobalSearchScope
import api.toplevel.templates.ScExtendsBlock
import api.base.types.ScSelfTypeElement
/**
* User: Alexander Podkhalyuzin
* Date: 24.10.2008
*/
class ScDirectInheritorsIndex extends StringStubIndexExtension[ScExtendsBlock] {
override def get(int: String, project: Project, scope: GlobalSearchScope): java.util.Collection[ScExtendsBlock] =
super.get(int, project, new ScSourceFilterScope(scope, project))
def getKey = ScDirectInheritorsIndex.KEY
}
object ScDirectInheritorsIndex {
val KEY = ScalaIndexKeys.SUPER_CLASS_NAME_KEY
}
class ScSelfTypeInheritorsIndex extends StringStubIndexExtension[ScSelfTypeElement] {
override def get(int: String, project: Project, scope: GlobalSearchScope): java.util.Collection[ScSelfTypeElement] =
super.get(int, project, new ScSourceFilterScope(scope, project))
def getKey = ScSelfTypeInheritorsIndex.KEY
}
object ScSelfTypeInheritorsIndex {
val KEY = ScalaIndexKeys.SELF_TYPE_CLASS_NAME_KEY
} | consulo/consulo-scala | src/org/jetbrains/plugins/scala/lang/psi/stubs/index/ScDirectInheritorsIndex.scala | Scala | apache-2.0 | 1,276 |
//: ----------------------------------------------------------------------------
//: Copyright (C) 2015 Verizon. All Rights Reserved.
//:
//: Licensed under the Apache License, Version 2.0 (the "License");
//: you may not use this file except in compliance with the License.
//: You may obtain a copy of the License at
//:
//: http://www.apache.org/licenses/LICENSE-2.0
//:
//: Unless required by applicable law or agreed to in writing, software
//: distributed under the License is distributed on an "AS IS" BASIS,
//: WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//: See the License for the specific language governing permissions and
//: limitations under the License.
//:
//: ----------------------------------------------------------------------------
package journal
import org.slf4j.{Logger => Backend, LoggerFactory}
import scala.reflect.macros.Context
import java.util.concurrent.{ThreadFactory, Executors}
sealed class Logger(val backend: Backend, val handler: LogMessage => Unit) {
def error(message: String): Unit =
macro LoggerMacro.errorMessage
def error(message: String, cause: Throwable): Unit =
macro LoggerMacro.errorMessageCause
def warn(message: String): Unit =
macro LoggerMacro.warnMessage
def warn(message: String, cause: Throwable): Unit =
macro LoggerMacro.warnMessageCause
def info(message: String): Unit =
macro LoggerMacro.infoMessage
def info(message: String, cause: Throwable): Unit =
macro LoggerMacro.infoMessageCause
def debug(message: String): Unit =
macro LoggerMacro.debugMessage
def debug(message: String, cause: Throwable): Unit =
macro LoggerMacro.debugMessageCause
}
/**
* Borrowed generously and adapted from scala-logging (https://github.com/typesafehub/scala-logging)
* which is under the Apache 2.0 license.
*
* Thanks to Heiko Seeberger et al for creating Scala Logging.
*/
private object LoggerMacro {
type LoggerContext = Context { type PrefixType = Logger }
def errorMessage(c: LoggerContext)(message: c.Expr[String]) =
c.universe.reify {
if (c.prefix.splice.backend.isErrorEnabled)
c.prefix.splice.handler(Error(message.splice))
}
def errorMessageCause(c: LoggerContext)(message: c.Expr[String], cause: c.Expr[Throwable]) =
c.universe.reify {
if (c.prefix.splice.backend.isErrorEnabled)
c.prefix.splice.handler(Error(message.splice, Some(cause.splice)))
}
def warnMessage(c: LoggerContext)(message: c.Expr[String]) =
c.universe.reify {
if (c.prefix.splice.backend.isWarnEnabled)
c.prefix.splice.handler(Warn(message.splice))
}
def warnMessageCause(c: LoggerContext)(message: c.Expr[String], cause: c.Expr[Throwable]) =
c.universe.reify {
if (c.prefix.splice.backend.isWarnEnabled)
c.prefix.splice.handler(Warn(message.splice, Some(cause.splice)))
}
def debugMessage(c: LoggerContext)(message: c.Expr[String]) =
c.universe.reify {
if (c.prefix.splice.backend.isDebugEnabled)
c.prefix.splice.handler(Debug(message.splice))
}
def debugMessageCause(c: LoggerContext)(message: c.Expr[String], cause: c.Expr[Throwable]) =
c.universe.reify {
if (c.prefix.splice.backend.isDebugEnabled)
c.prefix.splice.handler(Debug(message.splice, Some(cause.splice)))
}
def infoMessage(c: LoggerContext)(message: c.Expr[String]) =
c.universe.reify {
if (c.prefix.splice.backend.isInfoEnabled)
c.prefix.splice.handler(Info(message.splice))
}
def infoMessageCause(c: LoggerContext)(message: c.Expr[String], cause: c.Expr[Throwable]) =
c.universe.reify {
if (c.prefix.splice.backend.isInfoEnabled)
c.prefix.splice.handler(Info(message.splice, Some(cause.splice)))
}
}
object Logger {
def apply[A](implicit A: Manifest[A]): Logger =
apply(LoggerFactory.getLogger(A.runtimeClass))
def apply(name: String): Logger = apply(LoggerFactory.getLogger(name))
def apply(backend: Backend): Logger =
new Logger(backend, {
case Error(message, None) => backend.error(message)
case Error(message, Some(e)) => backend.error(message, e)
case Info(message, None) => backend.info(message)
case Info(message, Some(e)) => backend.info(message, e)
case Warn(message, None) => backend.warn(message)
case Warn(message, Some(e)) => backend.warn(message, e)
case Debug(message, None) => backend.debug(message)
case Debug(message, Some(e)) => backend.debug(message, e)
})
}
| Verizon/journal | core/src/main/scala/journal/Logger.scala | Scala | apache-2.0 | 4,554 |
package org.ucombinator.jaam.interpreter.snowflakes
import scala.collection.JavaConversions._
import scala.collection.mutable
import soot.{Main => SootMain, Unit => SootUnit, Value => SootValue, _}
import soot.jimple.{Stmt => SootStmt, _}
import org.ucombinator.jaam.util.Log
import org.ucombinator.jaam.interpreter._
import org.ucombinator.jaam.util.{Stmt, Soot}
object DefaultReturnSnowflake {
// A list contains statically initialized classes
var initializedClasses = List[SootClass]()
// A list contains instantiated classes
var instantiatedClasses = List[SootClass]()
def createArray(t: soot.Type, sizes: List[D], addrs: Set[Addr]) {
val bp = t match {
case at: ArrayType => Snowflakes.malloc(at)
case _ => Snowflakes.malloc(t.toString) //TODO
}
createArray(t, sizes, addrs, bp)
}
def createArray(at: soot.Type, sizes: List[D], addrs: Set[Addr], bp: BasePointer) {
sizes match {
case Nil =>
at match {
case pt: PrimType => System.store.update(addrs, D.atomicTop)
case rt: RefType => DefaultReturnSnowflake.createObject(Some(addrs), Soot.getSootClass(rt.getClassName))
}
case (s :: ss) =>
createArray(at.asInstanceOf[ArrayType].getElementType, ss, Set(ArrayRefAddr(bp)))
System.store.update(addrs, D(Set(ArrayValue(at, bp))))
System.store.update(ArrayLengthAddr(bp), s)
}
}
/*
TODO: createObjectOrThrow vs createObject
def createObjectOrThrow(sootClass: SootClass) : D = {
if (!initializedClasses.contains(sootClass)) {
throw UninitializedSnowflakeObjectException(sootClass)
}
D(Set(ObjectValue(sootClass, Snowflakes.malloc(sootClass))))
}
*/
def initField(addrs: Set[Addr], field: SootField) {
field.getType match {
case pt: PrimType => System.store.update(addrs, D.atomicTop)
case at: ArrayType => createArray(at, List.fill(at.numDimensions)(D.atomicTop), addrs)
case rt: RefType => createObject(Some(addrs), Soot.getSootClass(rt.getClassName))
case t => Log.error("Unknown field type " + t)
}
}
def initStaticFields(sootClass: SootClass) {
if (initializedClasses.contains(sootClass)) return
if (sootClass.hasSuperclass) initStaticFields(sootClass.getSuperclass)
for (field <- sootClass.getFields; if field.isStatic) {
initField(Set(StaticFieldAddr(field)), field)
}
initializedClasses = sootClass::initializedClasses
}
def initInstanceFields(sootClass: SootClass, bp: BasePointer) {
val className = sootClass.getName
for (field <- sootClass.getFields; if !field.isStatic) {
val addrs: Set[Addr] = Set(InstanceFieldAddr(bp, field))
initField(addrs, field)
}
}
/* DefaultReturnSnowflake.createObject should only used for instantiating Java library or
application library classes.
*/
def createObject(destAddr: Option[Set[Addr]], sootClass: SootClass) {
def allSuperClasses(sootClass: SootClass, supers: Set[SootClass]): Set[SootClass] = {
if (sootClass.hasSuperclass) allSuperClasses(sootClass.getSuperclass, supers + sootClass.getSuperclass)
else supers
}
def allInterfaces(sootClass: SootClass): Set[SootClass] = {
val ifs = sootClass.getInterfaces().toSet
ifs.foldLeft(ifs)((acc, interfaceClass) => allInterfaces(interfaceClass)++acc)
}
def allImplementers(sootClass: SootClass): Set[SootClass] = {
Scene.v.getOrMakeFastHierarchy.getAllImplementersOfInterface(sootClass).toSet
}
def allSubclasses(sootClass: SootClass): Set[SootClass] = {
val sub = Scene.v.getOrMakeFastHierarchy.getSubclassesOf(sootClass).toSet
sub.foldLeft(sub)((acc, subclass) => allSubclasses(subclass)++acc)
}
if (!System.isLibraryClass(sootClass)) {
//throw new RuntimeException("Trying to use Snowflake to instantiate a non-library class: " + sootClass.getName + ", abort.")
Log.error("Trying to use Snowflake to instantiate a non-library class: " + sootClass.getName)
//return
}
val objectBP = Snowflakes.malloc(sootClass)
destAddr match {
case Some(addr) => System.store.update(destAddr, D(Set(ObjectValue(sootClass, objectBP))))
case None => {}
}
if (instantiatedClasses.contains(sootClass)) return
instantiatedClasses = sootClass::instantiatedClasses
if (sootClass.isInterface) {
//Log.error("Can not instantiate interface " + sootClass.getName + ".")
val impls = allImplementers(sootClass)
for (impl <- impls) { createObject(destAddr, impl) }
if (impls.isEmpty) {
//Log.error("interface " + sootClass.getName + " has no implementers, continue.")
for (iface <- allInterfaces(sootClass)) {
initStaticFields(iface)
}
initStaticFields(sootClass)
}
return
}
if (!sootClass.isInterface && sootClass.isAbstract) {
val subs = allSubclasses(sootClass)
for (subclass <- subs) { createObject(destAddr, subclass) }
if (subs.nonEmpty) return
//Log.error("abstract class " + sootClass.getName + " has no subclass, continue.")
}
for (superClass <- allSuperClasses(sootClass, Set())) {
initInstanceFields(superClass, objectBP)
initStaticFields(superClass)
}
for (iface <- allInterfaces(sootClass)) {
initStaticFields(iface)
}
initInstanceFields(sootClass, objectBP)
initStaticFields(sootClass)
}
}
object NoOpSnowflake extends SnowflakeHandler {
override def apply(state : State, nextStmt : Stmt, self : Option[Value], args : List[D]) : Set[AbstractState] =
Set(state.copy(stmt = nextStmt))
}
object ReturnAtomicSnowflake extends SnowflakeHandler {
override def apply(state : State, nextStmt : Stmt, self : Option[Value], args : List[D]) : Set[AbstractState] = {
state.stmt.sootStmt match {
case sootStmt : DefinitionStmt => System.store.update(state.addrsOf(sootStmt.getLeftOp()), D.atomicTop)
case sootStmt : InvokeStmt => {}
}
Set(state.copy(stmt = nextStmt))
}
}
case class ReturnObjectSnowflake(name : String) extends SnowflakeHandler {
override def apply(state : State, nextStmt : Stmt, self : Option[Value], args : List[D]) : Set[AbstractState] = {
val addrs: Option[Set[Addr]] = state.stmt.sootStmt match {
case stmt: DefinitionStmt => Some(state.addrsOf(stmt.getLeftOp))
case stmt: InvokeStmt => None
}
DefaultReturnSnowflake.createObject(addrs, Soot.getSootClass(name))
Set[AbstractState](state.copy(stmt = nextStmt))
}
}
case class ReturnArraySnowflake(baseType: String, dim: Int) extends SnowflakeHandler {
override def apply(state : State, nextStmt : Stmt, self : Option[Value], args : List[D]) : Set[AbstractState] = {
val sizes = List.fill(dim)(D.atomicTop)
val sootBaseType = Soot.getSootType(baseType)
val at = soot.ArrayType.v(sootBaseType, dim)
state.stmt.sootStmt match {
case stmt : DefinitionStmt => DefaultReturnSnowflake.createArray(at, sizes, state.addrsOf(stmt.getLeftOp))
case stmt : InvokeStmt =>
}
Set(state.copy(stmt=nextStmt))
}
}
case class DefaultReturnSnowflake(meth : SootMethod) extends SnowflakeHandler {
import DefaultReturnSnowflake._
def typesToDs(types: List[Type]): List[D] = {
def typeToD(ty: Type): D = {
ty match {
case _ : PrimType => D.atomicTop
case at : ArrayType => D(Set(ArrayValue(at, Snowflakes.malloc(at))))
case rt : RefType =>
D(Set(ObjectValue(Soot.getSootClass(rt.getClassName), Snowflakes.malloc(rt.getSootClass))))
}
}
types map typeToD
}
override def apply(state : State, nextStmt : Stmt, self : Option[Value], args : List[D]) : Set[AbstractState] = {
// TODO: options for controlling which parts flow into the global address
for (arg <- args) {
val d = GlobalD.update(arg.getValues)
System.store.strongUpdate(GlobalSnowflakeAddr, d, GlobalD.modified)
}
self match {
case Some(target) =>
val d = GlobalD.update(Set[Value](target))
System.store.strongUpdate(GlobalSnowflakeAddr, d, GlobalD.modified) // TODO: unneeded?
case None => {}
}
val exceptions = for (exception <- meth.getExceptions) yield {
ObjectValue(exception, Snowflakes.malloc(exception))
}
val exceptionStates = (exceptions map {
state.kontStack.handleException(_, state.stmt, state.fp)
}).flatten
val normalStates = meth.getReturnType match {
case _ : VoidType => NoOpSnowflake(state, nextStmt, self, args)
case _ : PrimType =>
// NOTE: if we eventually do something other than D.atomicTop, we need
// to track where in the store our return value comes from
ReturnAtomicSnowflake(state, nextStmt, self, args)
case at : ArrayType =>
val states = ReturnArraySnowflake(at.baseType.toString, at.numDimensions)(state, nextStmt, self, args)
val values = System.store.getOrElseBot(GlobalSnowflakeAddr).getValues
val bp = Snowflakes.malloc(at)
state.stmt.sootStmt match {
case stmt : DefinitionStmt =>
stmt.getLeftOp.getType match {
case leftAt : ArrayType =>
val newValues = values.filter(_ match {
case ArrayValue(at, bp) => Soot.canStoreType(at, leftAt)
case _ => false
})
System.store.update(Set[Addr](ArrayRefAddr(bp)), D(newValues))
case _ => Log.warn("Can not assign an ArrayType value to non-ArrayType. stmt: " + stmt + " meth: " + meth)
}
case _ =>
System.store.update(Set[Addr](ArrayRefAddr(bp)), D(values))
}
states
case rt : RefType =>
val states = ReturnObjectSnowflake(rt.getClassName)(state, nextStmt, self, args)
state.stmt.sootStmt match {
case stmt : DefinitionStmt =>
val parentClass = stmt.getLeftOp.getType match {
case rt : RefType => rt.getSootClass
case _ => throw new RuntimeException("Can not assign a RefType value to non-RefType. stmt: " + stmt + " meth: " + meth)
}
val values: Set[Value] = System.store.getOrElseBot(GlobalSnowflakeAddr).getValues
val newValues = values.filter(_ match {
case ObjectValue(sootClass, bp) => Soot.canStoreClass(sootClass, parentClass)
case _ => false
})
System.store.update(state.addrsOf(stmt.getLeftOp), D(newValues))
case _ =>
}
states
}
// If the argument type is an interface or abstract class, then we try to call
// each method from the definition of interface/abstract class.
// TODO: options to control saturation
// TODO: log what objects are being saturated
val methodsOfArgs = (for {
(arg, ty) <- args zip meth.getParameterTypes if ty.isInstanceOf[RefType];
sootClass = ty.asInstanceOf[RefType].getSootClass;
if (sootClass.isInterface || sootClass.isAbstract) && System.isLibraryClass(sootClass)
} yield {
val newValues = arg.getValues.filter(_ match {
case ObjectValue(objClass, bp) =>
!System.isLibraryClass(objClass) && Soot.canStoreClass(objClass, sootClass)
case _ => false
})
(D(newValues), sootClass.getMethods) //TODO: maybe not include <init>?
})
//println("methodsOfArgs: " + methodsOfArgs)
Log.warn("Saturating due to: "+meth)
val methStates = (for {
(base, meths) <- methodsOfArgs
if base.getValues.nonEmpty
meth <- meths
} yield {
Log.warn("Saturating: "+base+" meth: "+meth)
val params = typesToDs(meth.getParameterTypes.toList)
state.handleInvoke2(Some((base, false)), meth, params, ZeroCFAFramePointer(meth), None, nextStmt)
}).flatten
///////////////////////////////
normalStates ++ exceptionStates ++ methStates
}
}
| Ucombinator/jaam | src/main/scala/org/ucombinator/jaam/interpreter/snowflakes/DefaultSnowflakes.scala | Scala | bsd-2-clause | 11,939 |
package libref.proof
import libref.collection._
import libref.proof.MinOps._
import libref.proof.SortedListOps._
import leon.lang._
import scala.language.postfixOps
import scala.language.implicitConversions
object SetLemmas {
// implicit def setAsList[A] (set: Set[A]): List[A] = choose {
// (x: List[A]) => set == x.content
// }
def set_min (set1 : Set[BigInt], set2 : Set[BigInt]) : Boolean = {
require(set1 == set2 && !set1.isEmpty)
min(set1.toList) == min(set2.toList)
} holds
def set_forall[A] (set1 : Set[A], set2 : Set[A], p : A => Boolean) : Boolean = {
require(set1 == set2)
set1.forall(p) == set2.forall(p)
} holds
def set_exists[A] (set1 : Set[A], set2 : Set[A], p : A => Boolean) : Boolean = {
require(set1 == set2)
set1.exists(p) == set2.exists(p)
} holds
def set_filter[A] (set1 : Set[A], set2 : Set[A], p : A => Boolean) : Boolean = {
require(set1 == set2 && !set1.isEmpty)
set1.filter(p) == set2.filter(p)
} holds
def set_sort (set1 : Set[BigInt], set2 : Set[BigInt]) : Boolean = {
require(set1 == set2 && set1.size == set2.size)
sort(set1.toList) == sort(set2.toList)
} holds
}
| fmlab-iis/LibRef | proof/set.scala | Scala | gpl-3.0 | 1,177 |
object ListC {
class T1[A]
class T2[A]
implicit def getT[A]: T1[A] with T2[A] = null
}
class Test {
import ListC._
def foo[T[_], A](implicit a: T[A]): T[A] = a
val bar: T1[Int] = /*start*/foo/*end*/
}
//ListC.T1[Int] | ilinum/intellij-scala | testdata/typeInference/higherKinds/SCL10857_1.scala | Scala | apache-2.0 | 229 |
/**
* Copyright (c) 2015, Cloudera, Inc. All Rights Reserved.
*
* Cloudera, Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"). You may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for
* the specific language governing permissions and limitations under the
* License.
*/
package com.cloudera.sparkts
import com.cloudera.sparkts.DateTimeIndex._
import java.time._
import java.time.temporal._
class BusinessDayRichInt(n: Int, firstDayOfWeek: Int = DayOfWeek.MONDAY.getValue) {
def businessDays: BusinessDayFrequency = new BusinessDayFrequency(n, firstDayOfWeek)
}
/**
* A frequency for a uniform index.
*/
trait Frequency extends Serializable {
/**
* Advances the given DateTime by this frequency n times.
*/
def advance(dt: ZonedDateTime, n: Int): ZonedDateTime
/**
* The number of times this frequency occurs between the two DateTimes, rounded down.
*/
def difference(dt1: ZonedDateTime, dt2: ZonedDateTime): Int
}
class DurationFrequency(val duration: Duration) extends Frequency {
val durationNanos = duration.getSeconds * 1000000000L + duration.getNano
def advance(dt: ZonedDateTime, n: Int): ZonedDateTime = dt.plus(duration.multipliedBy(n))
override def difference(dt1: ZonedDateTime, dt2: ZonedDateTime): Int = {
val between = Duration.between(dt1, dt2)
val betweenNanos = between.getSeconds * 1000000000L + between.getNano
(betweenNanos / durationNanos).toInt
}
override def equals(other: Any): Boolean = {
other match {
case frequency: DurationFrequency => frequency.duration == duration
case _ => false
}
}
override def hashCode(): Int = {
duration.hashCode()
}
}
abstract class PeriodFrequency(val period: Period) extends Frequency {
def advance(dt: ZonedDateTime, n: Int): ZonedDateTime = dt.plus(period.multipliedBy(n))
override def equals(other: Any): Boolean = {
other match {
case frequency: PeriodFrequency => frequency.period == period
case _ => false
}
}
override def hashCode(): Int = {
period.hashCode()
}
}
class MillisecondFrequency(val ms: Int)
extends DurationFrequency(ChronoUnit.MILLIS.getDuration.multipliedBy(ms)) {
override def toString: String = s"milliseconds $ms"
}
class MicrosecondFrequency(val us: Int)
extends DurationFrequency(ChronoUnit.MICROS.getDuration.multipliedBy(us)) {
override def toString: String = s"microseconds $us"
}
class MonthFrequency(val months: Int)
extends PeriodFrequency(Period.ofMonths(months)) {
override def difference(dt1: ZonedDateTime, dt2: ZonedDateTime): Int = {
val diffMonths = ChronoUnit.MONTHS.between(dt1.toLocalDate, dt2.toLocalDate)
(diffMonths / months).toInt
}
override def toString: String = s"months $months"
}
class YearFrequency(val years: Int)
extends PeriodFrequency(Period.ofYears(years)) {
override def difference(dt1: ZonedDateTime, dt2: ZonedDateTime): Int = {
val diffYears = ChronoUnit.YEARS.between(dt1.toLocalDate, dt2.toLocalDate)
(diffYears / years).toInt
}
override def toString: String = s"years $years"
}
class DayFrequency(val days: Int)
extends PeriodFrequency(Period.ofDays(days)) {
override def difference(dt1: ZonedDateTime, dt2: ZonedDateTime): Int = {
val diffDays = ChronoUnit.DAYS.between(dt1, dt2)
(diffDays / days).toInt
}
override def toString: String = s"days $days"
}
class HourFrequency(val hours: Int)
extends DurationFrequency(ChronoUnit.HOURS.getDuration.multipliedBy(hours)) {
override def toString: String = s"hours $hours"
}
class MinuteFrequency(val minutes: Int)
extends DurationFrequency(ChronoUnit.MINUTES.getDuration.multipliedBy(minutes)) {
override def toString: String = s"minutes $minutes"
}
class SecondFrequency(val seconds: Int)
extends DurationFrequency(ChronoUnit.SECONDS.getDuration.multipliedBy(seconds)) {
override def toString: String = s"seconds $seconds"
}
class BusinessDayFrequency(
val days: Int,
val firstDayOfWeek: Int = DayOfWeek.MONDAY.getValue)
extends Frequency {
/**
* Advances the given DateTime by (n * days) business days.
*/
def advance(dt: ZonedDateTime, n: Int): ZonedDateTime = {
val dayOfWeek = dt.getDayOfWeek
val alignedDayOfWeek = rebaseDayOfWeek(dayOfWeek.getValue, firstDayOfWeek)
if (alignedDayOfWeek > 5) {
throw new IllegalArgumentException(s"$dt is not a business day")
}
val totalDays = n * days
val standardWeekendDays = (totalDays / 5) * 2
val remaining = totalDays % 5
val extraWeekendDays = if (alignedDayOfWeek + remaining > 5) 2 else 0
dt.plusDays(totalDays + standardWeekendDays + extraWeekendDays)
}
def difference(dt1: ZonedDateTime, dt2: ZonedDateTime): Int = {
if (dt2.isBefore(dt1)) {
return -difference(dt2, dt1)
}
val daysBetween = ChronoUnit.DAYS.between(dt1, dt2)
val dayOfWeek1 = dt1.getDayOfWeek
val alignedDayOfWeek1 = rebaseDayOfWeek(dayOfWeek1.getValue, firstDayOfWeek)
if (alignedDayOfWeek1 > 5) {
throw new IllegalArgumentException(s"$dt1 is not a business day")
}
val standardWeekendDays = (daysBetween / 7) * 2
val remaining = daysBetween % 7
val extraWeekendDays = if (alignedDayOfWeek1 + remaining > 5) 2 else 0
((daysBetween - standardWeekendDays - extraWeekendDays) / days).toInt
}
override def equals(other: Any): Boolean = {
other match {
case frequency: BusinessDayFrequency => frequency.days == days
case _ => false
}
}
override def hashCode(): Int = days
override def toString: String = s"businessDays $days firstDayOfWeek $firstDayOfWeek"
}
| cloudera/spark-timeseries | src/main/scala/com/cloudera/sparkts/Frequency.scala | Scala | apache-2.0 | 5,904 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import SharedHelpers.thisLineNumber
import java.io.FileNotFoundException
import Matchers._
import exceptions.TestFailedException
class ShorthandShouldNotBeThrownBySpec extends Spec {
val fileName: String = "ShorthandShouldNotBeThrownBySpec.scala"
def exceptionNotExpected(clz: Class[_]): String =
Resources.exceptionNotExpected(clz.getName)
def exceptionExpected(clz: Class[_]): String =
Resources.exceptionExpected(clz.getName)
def hadExpectedMessage(left: Throwable, expectedMessage: String): String =
FailureMessages.hadExpectedMessage(left, expectedMessage)
object `the [Exception] 'should not have message' syntax should` {
def `do nothing when 'should have message' exception's message not equal expected` {
the [FileNotFoundException] thrownBy {
throw new FileNotFoundException("purposely")
} shouldNot have message ("accidentally")
}
def `throw new TestFailedException with correct message and stack depth when used with 'should not have message' and provided code produced exception that has message equal to expected` {
val fnfe =
the [FileNotFoundException] thrownBy {
throw new FileNotFoundException("purposely")
}
val e = intercept[TestFailedException] {
fnfe shouldNot have message ("purposely")
}
assert(e.message === Some(hadExpectedMessage(fnfe, "purposely")))
assert(e.failedCodeFileName === Some(fileName))
assert(e.failedCodeLineNumber === Some(thisLineNumber - 4))
}
def `throw new TestFailedException with correct message and stack depth when used with 'should not have message' and provided code does not produce any exception` {
val e = intercept[TestFailedException] {
the [RuntimeException] thrownBy {
assert(1 === 1)
} shouldNot have message ("purposely")
}
assert(e.message === Some(exceptionExpected(classOf[RuntimeException])))
assert(e.failedCodeFileName === Some(fileName))
assert(e.failedCodeLineNumber === Some(thisLineNumber - 6))
}
}
}
| rahulkavale/scalatest | scalatest-test/src/test/scala/org/scalatest/ShorthandShouldNotBeThrownBySpec.scala | Scala | apache-2.0 | 2,731 |
package example
import scala.annotation.tailrec
object Lists {
/**
* This method computes the sum of all elements in the list xs. There are
* multiple techniques that can be used for implementing this method, and
* you will learn during the class.
*
* For this example assignment you can use the following methods in class
* `List`:
*
* - `xs.isEmpty: Boolean` returns `true` if the list `xs` is empty
* - `xs.head: Int` returns the head element of the list `xs`. If the list
* is empty an exception is thrown
* - `xs.tail: List[Int]` returns the tail of the list `xs`, i.e. the the
* list `xs` without its `head` element
*
* ''Hint:'' instead of writing a `for` or `while` loop, think of a recursive
* solution.
*
* @param xs A list of natural numbers
* @return The sum of all elements in `xs`
*/
def sum(xs: List[Int]): Int = {
@tailrec
def sumRec(xs: List[Int], acc: Int): Int = {
if (xs.isEmpty) acc
else sumRec(xs.tail, acc + xs.head)
}
sumRec(xs.tail, xs.head)
}
/**
* This method returns the largest element in a list of integers. If the
* list `xs` is empty it throws a `java.util.NoSuchElementException`.
*
* You can use the same methods of the class `List` as mentioned above.
*
* ''Hint:'' Again, think of a recursive solution instead of using looping
* constructs. You might need to define an auxiliary method.
*
* @param xs A list of natural numbers
* @return The largest element in `xs`
* @throws java.util.NoSuchElementException if `xs` is an empty list
*/
def max(xs: List[Int]): Int = {
@tailrec
def maxRec(xs: List[Int], max: Int): Int = {
if (xs.isEmpty) max
else maxRec(xs.tail, if (xs.head > max) xs.head else max)
}
maxRec(xs.tail, xs.head)
}
}
| masipauskas/coursera-scala | progfun2/example/src/main/scala/example/Lists.scala | Scala | unlicense | 1,882 |
package no.uio.musit.functional
import no.uio.musit.MusitResults.{
MusitError,
MusitResult,
MusitSuccess,
MusitValidationError
}
import scala.concurrent.{ExecutionContext, Future}
/* Some common functional signatures:
Assume M is a typical structure having the given operation, the "elevated" world.
(sometimes Monad is required, sometimes Applicative or Functor or something else)
map[A, B] M[A], f: A=>B : M[B]
filter[T] M[T], f: T=>Bool : M[T]
fold[T] M[T] : T
flatten[T] M[M[T]] : M[T]
flatMap[A, B] M[A], f: A=>M[B] : M[B]
sequence[T] Seq[M[T]] : M[Seq[T]]
traverse[A,B] Seq[A], f: A=> M[B] : M[Seq[B]]
Some FutureMusitResult (FMR) and MusitResult (MR) signatures:
(Better names are welcomed!)
mapAndFlattenMusitResult[A,B] FMR[A], f: A=MR[B] : FMR[B]
pairWise[A,B] Seq[A], f: A => FMR[Option[B]] : FMR[Seq[(A, Option[B])]]
collectAllOrFail[A, B] Seq[A], f: A => FMR[Option[B]], Seq[A] => MusitError : FMR[Seq[B]]
getOrError FMR[Option[T]], => MusitError : FMR[T]
flatMapInsideOption[A,B] FMR[Option[A]], f: A=>FMR[B] : FRM[Option[B]]
*/
case class FutureMusitResult[A](value: Future[MusitResult[A]]) {
def map[B](f: A => B)(implicit ec: ExecutionContext): FutureMusitResult[B] = {
val res = value.map(_.map(f))
FutureMusitResult(res)
}
def flatMap[B](f: A => FutureMusitResult[B])(implicit ec: ExecutionContext) = {
val res = value.flatMap { musitResult =>
musitResult match {
case MusitSuccess(t) => f(t).value
case err: MusitError => Future.successful(err)
}
}
FutureMusitResult(res)
}
def mapAndFlattenMusitResult[B](
f: A => MusitResult[B]
)(implicit ec: ExecutionContext): FutureMusitResult[B] = {
val temp = map(f).value
val res = temp.map(mrMr => mrMr.flatten)
FutureMusitResult(res)
}
}
object Extensions {
implicit class FutMrOption[T](val value: FutureMusitResult[Option[T]]) extends AnyVal {
def getOrError(
error: => MusitError
)(implicit ec: ExecutionContext): FutureMusitResult[T] = {
value.mapAndFlattenMusitResult { opt =>
opt.fold[MusitResult[T]](error)(t => MusitSuccess(t))
}
}
def flatMapInsideOption[B](
f: T => FutureMusitResult[B]
)(implicit ec: ExecutionContext): FutureMusitResult[Option[B]] = {
value.flatMap[Option[B]] {
case Some(v) => {
f(v).map(Some(_))
}
case None => {
FutureMusitResult.successful[Option[B]](None)
}
}
}
}
implicit class FutureExtensions[T](val value: Future[T]) extends AnyVal {
def toMusitFuture()(implicit ec: ExecutionContext): FutureMusitResult[T] = {
val temp = FutureMusitResult(value.map(MusitSuccess[T](_)))
temp
}
}
}
object FutureMusitResult {
def successful[A](
a: A
): FutureMusitResult[A] = {
FutureMusitResult(Future.successful(MusitSuccess(a)))
}
def failed[A](
err: MusitError
): FutureMusitResult[A] = {
FutureMusitResult(Future.successful[MusitResult[A]](err))
}
def from[A](mr: MusitResult[A]) = FutureMusitResult(Future.successful(mr))
def from[A](a: A) = successful(a)
def from[A](futA: Future[A])(implicit ec: ExecutionContext) =
FutureMusitResult(futA.map(a => MusitSuccess(a))) //Is this correct
// or should we also do some recover and if fails return something else than MusitSuccess?
def requireFromClient(test: Boolean, errorMsg: String): FutureMusitResult[Unit] = {
if (test) {
successful(())
} else {
failed(MusitValidationError(errorMsg))
}
}
def flatten[A](
futMrFutMr: FutureMusitResult[FutureMusitResult[A]]
)(implicit ec: ExecutionContext) = {
futMrFutMr.flatMap(identity)
}
def sequence[A](seqFutMr: Seq[FutureMusitResult[A]])(
implicit ec: ExecutionContext
): FutureMusitResult[Seq[A]] = {
FutureMusitResult(
Future.sequence(seqFutMr.map(_.value)).map { results =>
MusitResult.sequence(results)
}
)
}
def traverse[A, B](
seqA: Seq[A],
f: A => FutureMusitResult[B]
)(implicit ec: ExecutionContext): FutureMusitResult[Seq[B]] = {
val res = seqA.map(f)
sequence(res)
}
// Some less standard helpers
/** Gitt en liste med et eller annet, f.eks. ID-er og en funksjon som tar en slik Id og gir tilbake en
* FutureMusitResult[Option[T]] (typisk et database-kall ala findById, getById etc), returnerer en liste med par av "id" og evt svar.
* Merk at vi feiler om et (eller flere) av f-kallene gir en MusitError. Vi kjører uansett alle kallene, så feilene blir akkumulert.
*/
def pairwise[A, B](
seq: Seq[A],
f: A => FutureMusitResult[Option[B]]
)(implicit ec: ExecutionContext): FutureMusitResult[Seq[(A, Option[B])]] = {
def makePair(a: A): FutureMusitResult[(A, Option[B])] = f(a).map(optB => (a, optB))
traverse(seq, makePair)
}
/** Gitt en liste med et eller annet, f.eks. ID-er og en funksjon som tar en slik Id og gir tilbake en
* FutureMusitResult[Option[T]] (typisk et database-kall ala findById, getById etc), returnerer en liste med de som ble funnet.
* Merk at vi feiler om et (eller flere) av f-kallene gir en MusitError. Vi kjører uansett alle kallene, så feilene blir akkumulert.
* Og vi feiler også dersom vi ikke finner alle objektene!
*/
def collectAllOrFail[A, B](
seqA: Seq[A],
f: A => FutureMusitResult[Option[B]],
errorFactory: Seq[A] => MusitError
)(implicit ec: ExecutionContext): FutureMusitResult[Seq[B]] = {
val pairs = pairwise(seqA, f)
pairs.mapAndFlattenMusitResult { seqOfPairs =>
val asWithoutBs = seqOfPairs.collect { case (a, None) => a }
if (asWithoutBs.isEmpty) {
MusitSuccess(seqOfPairs.collect { case (_, Some(b)) => b })
} else errorFactory(asWithoutBs)
}
}
}
| MUSIT-Norway/musit | musit-models/src/main/scala/no/uio/musit/functional/FutureMusitResult.scala | Scala | gpl-2.0 | 6,030 |
package com.blogspot.ramannanda.scala.algorithms.cp3.adhoc.anagrams
import scala.collection.mutable.ListBuffer
import scala.io.StdIn
//uva 00195
object Permutations {
def permute(word: String): Seq[String] = {
def permuteRec(prefix: String, word: String): Seq[String] = {
if (word.length == 1) {
Seq(prefix + word)
}
else {
var lb = ListBuffer[String]()
for (i <- 0 until word.length) {
lb ++= permuteRec(prefix + word.charAt(i),
word.substring(0, i) + word.substring(i + 1, word.length))
}
lb
}
}
permuteRec("", word)
}
def main(args: Array[String]): Unit = {
val cases = StdIn.readLine().trim.toInt
for (i <- 0 until cases) {
val word = StdIn.readLine()
permute(word).sorted.toSet.foreach(println)
}
}
}
| ramannanda9/algorithms-in-scala | src/main/scala/com/blogspot/ramannanda/scala/algorithms/cp3/adhoc/anagrams/Permutations.scala | Scala | gpl-3.0 | 841 |
package views.html
package auth
import lila.api.Context
import lila.app.templating.Environment._
import lila.app.ui.ScalatagsTemplate._
import controllers.routes
object signup {
def apply(form: lila.security.HcaptchaForm[_])(implicit ctx: Context) =
views.html.base.layout(
title = trans.signUp.txt(),
moreJs = frag(
jsModule("login"),
embedJsUnsafeLoadThen("""loginSignup.signupStart()"""),
views.html.base.hcaptcha.script(form),
fingerprintTag
),
moreCss = cssTag("auth"),
csp = defaultCsp.withHcaptcha.some
) {
main(cls := "auth auth-signup box box-pad")(
h1(trans.signUp()),
postForm(
id := "signup-form",
cls := List(
"form3" -> true,
"h-captcha-enabled" -> form.config.enabled
),
action := routes.Auth.signupPost
)(
auth.bits.formFields(form("username"), form("password"), form("email").some, register = true),
input(id := "signup-fp-input", name := "fp", tpe := "hidden"),
div(cls := "form-group text", dataIcon := "")(
trans.computersAreNotAllowedToPlay(),
br,
small(
trans.byRegisteringYouAgreeToBeBoundByOur(a(href := routes.Page.tos)(trans.termsOfService())),
br,
trans.readAboutOur(a(href := routes.Page.menuBookmark("privacy"))(trans.privacyPolicy())),
br
)
),
agreement(form("agreement"), form.form.errors.exists(_.key startsWith "agreement.")),
views.html.base.hcaptcha.tag(form),
button(cls := "submit button text big")(trans.signUp())
)
)
}
private def agreement(form: play.api.data.Field, error: Boolean)(implicit ctx: Context) =
div(cls := "agreement")(
error option p(
strong(cls := "error")(
"You must agree to the Lichess policies listed below:"
)
),
agreements.map { case (field, i18n) =>
form3.checkbox(form(field), i18n())
}
)
private val agreements = List(
"assistance" -> trans.agreementAssistance,
"nice" -> trans.agreementNice,
"account" -> trans.agreementAccount,
"policy" -> trans.agreementPolicy
)
}
| luanlv/lila | app/views/auth/signup.scala | Scala | mit | 2,312 |
/**
* scala-relaxng
* For all details and documentation:
* http://github.com/inkling/scala-relaxng
*
* Copyright 2011 Inkling Systems, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.inkling.relaxng.test
import com.inkling.relaxng.AST._
import com.inkling.relaxng.ArbitraryInstances._
import org.scalatest._
import org.scalatest.prop._
import org.scalacheck.{Gen, Prop}
import org.scalacheck.Gen._
import org.scalacheck.Prop._
import org.scalacheck.Arbitrary._
class ArbitraryInstancesSpec extends Spec with Checkers {
def checkit(description: String)(prop: =>Prop) {
it(description) {
check(prop)
}
}
def nonFailing[T](g: Gen[T]) : Prop = noneFailing(Seq(g))
describe("The RelaxNg Compact Syntax Arbitrary instances") {
describe("Should all be non-failing (checking for excess recursion)") {
checkit("unary operators") { nonFailing(arbitrary[UnOp]) }
checkit("binary operators") { nonFailing(arbitrary[BinOp]) }
checkit("non-colon names") { nonFailing(arbitrary[NCName]) }
checkit("colon names") { nonFailing(arbitrary[CName]) }
checkit("name classes") { nonFailing(arbitrary[NameClass]) }
checkit("patterns") { nonFailing(arbitrary[Pattern]) }
checkit("declarations") { nonFailing(arbitrary[Declaration]) }
checkit("grammar content") { nonFailing(arbitrary[GrammarContent]) }
checkit("schema") { nonFailing(arbitrary[Schema]) }
}
}
}
| inkling/scala-relaxng | src/test/scala/ArbitraryInstancesSpec.scala | Scala | apache-2.0 | 2,000 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.runtime.aggregate
import java.lang.Iterable
import org.apache.flink.api.common.functions.RichGroupReduceFunction
import org.apache.flink.configuration.Configuration
import org.apache.flink.table.codegen.{Compiler, GeneratedAggregationsFunction}
import org.apache.flink.table.util.Logging
import org.apache.flink.types.Row
import org.apache.flink.util.Collector
/**
* It wraps the aggregate logic inside of
* [[org.apache.flink.api.java.operators.GroupReduceOperator]].
* It is only used for tumbling count-window on batch.
*
* @param genAggregations Code-generated [[GeneratedAggregations]]
* @param windowSize Tumble count window size
*/
class DataSetTumbleCountWindowAggReduceGroupFunction(
private val genAggregations: GeneratedAggregationsFunction,
private val windowSize: Long)
extends RichGroupReduceFunction[Row, Row]
with Compiler[GeneratedAggregations]
with Logging {
private var output: Row = _
private var accumulators: Row = _
private var function: GeneratedAggregations = _
override def open(config: Configuration) {
LOG.debug(s"Compiling AggregateHelper: $genAggregations.name \\n\\n " +
s"Code:\\n$genAggregations.code")
val clazz = compile(
getRuntimeContext.getUserCodeClassLoader,
genAggregations.name,
genAggregations.code)
LOG.debug("Instantiating AggregateHelper.")
function = clazz.newInstance()
output = function.createOutputRow()
accumulators = function.createAccumulators()
}
override def reduce(records: Iterable[Row], out: Collector[Row]): Unit = {
var count: Long = 0
val iterator = records.iterator()
while (iterator.hasNext) {
if (count == 0) {
function.resetAccumulator(accumulators)
}
val record = iterator.next()
count += 1
accumulators = function.mergeAccumulatorsPair(accumulators, record)
if (windowSize == count) {
// set group keys value to final output.
function.setForwardedFields(record, output)
function.setAggregationResults(accumulators, output)
// emit the output
out.collect(output)
count = 0
}
}
}
}
| tzulitai/flink | flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/runtime/aggregate/DataSetTumbleCountWindowAggReduceGroupFunction.scala | Scala | apache-2.0 | 3,021 |
package pl.touk.nussknacker.engine.avro.schemaregistry.confluent.client
import com.typesafe.scalalogging.LazyLogging
import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException
import io.confluent.kafka.schemaregistry.client.{SchemaMetadata, MockSchemaRegistryClient => CMockSchemaRegistryClient}
import java.util
/**
* Extended Confluent MockSchemaRegistryClient - base one throws wrong exceptions when version or subject doesn't exist
*/
class MockSchemaRegistryClient extends CMockSchemaRegistryClient with LazyLogging {
import ConfluentSchemaRegistryClient._
override def getSchemaMetadata(subject: String, version: Int): SchemaMetadata = {
verify(subject, Some(version))
super.getSchemaMetadata(subject, version)
}
override def getLatestSchemaMetadata(subject: String): SchemaMetadata = {
verify(subject, None)
super.getLatestSchemaMetadata(subject)
}
override def getAllVersions(subject: String): util.List[Integer] = {
verify(subject, None)
super.getAllVersions(subject)
}
/**
* MockSchemaRegistryClient doesn't throw right exception if subject or version doesn't exist
*/
private def verify(subject: String, version: Option[Int]): Unit = {
if (!getAllSubjects.contains(subject)) {
throw new RestClientException("Subject not found", 404, subjectNotFoundCode)
}
if (!version.forall(getAllVersions(subject).contains(_))) {
throw new RestClientException("Version not found", 404, versionNotFoundCode)
}
}
}
| TouK/nussknacker | utils/avro-components-utils/src/main/scala/pl/touk/nussknacker/engine/avro/schemaregistry/confluent/client/MockSchemaRegistryClient.scala | Scala | apache-2.0 | 1,529 |
package chandu0101.scalajs.rn.apis
import scala.scalajs.js
trait NetInfo extends js.Object{
val isConnected : NetInfoIsConnected = js.native
val reachabilityIOS : NetInfoReachabilityIOS = js.native
}
trait NetInfoIsConnected extends js.Object{
def addEventListener(eventName : String,handler : (Boolean) => _) : Unit = js.native
def removeEventListener(eventName : String,handler : (Boolean) => _) : Unit = js.native
def fetch() : js.Dynamic = js.native
}
trait NetInfoReachabilityIOS extends js.Object{
def addEventListener(eventName : String,handler : (String) => _) : Unit = js.native
def removeEventListener(eventName : String,handler : (String) => _) : Unit = js.native
def fetch() : js.Dynamic = js.native
} | beni55/scalajs-react-native | core/src/main/scala/chandu0101/scalajs/rn/apis/NetInfo.scala | Scala | apache-2.0 | 739 |
package ammonite.session
import ammonite.TestUtils._
import ammonite.TestRepl
import ammonite.util.{Res, Util}
import utest._
object AdvancedTests extends TestSuite{
val tests = TestSuite{
println("AdvancedTests")
val check = new TestRepl()
'pprint{
check.session(s"""
@ Seq.fill(10)(Seq.fill(3)("Foo"))
res0: Seq[Seq[String]] = List(
List("Foo", "Foo", "Foo"),
List("Foo", "Foo", "Foo"),
List("Foo", "Foo", "Foo"),
List("Foo", "Foo", "Foo"),
List("Foo", "Foo", "Foo"),
List("Foo", "Foo", "Foo"),
List("Foo", "Foo", "Foo"),
List("Foo", "Foo", "Foo"),
List("Foo", "Foo", "Foo"),
List("Foo", "Foo", "Foo")
)
@ case class Foo(i: Int, s0: String, s1: Seq[String])
defined class Foo
@ Foo(1, "", Nil)
res2: ${sessionPrefix}Foo = Foo(1, "", List())
@ Foo(
@ 1234567,
@ "I am a cow, hear me moo",
@ Seq("I weigh twice as much as you", "and I look good on the barbecue")
@ )
res3: ${sessionPrefix}Foo = Foo(
1234567,
"I am a cow, hear me moo",
List("I weigh twice as much as you", "and I look good on the barbecue")
)
""")
}
'exit{
check.result("exit", Res.Exit())
}
'skip{
check.result("", Res.Skip)
}
'predef{
val check2 = new TestRepl{
override def predef = (
"""
import math.abs
val x = 1
val y = "2"
""",
None
)
}
check2.session("""
@ -x
res0: Int = -1
@ y
res1: String = "2"
@ x + y
res2: String = "12"
@ abs(-x)
res3: Int = 1
""")
}
'predefSettings{
val check2 = new TestRepl{
override def predef = (
"""
interp.configureCompiler(_.settings.Xexperimental.value = true)
""",
None
)
}
check2.session("""
@ repl.compiler.settings.Xexperimental.value
res0: Boolean = true
""")
}
// 'macros{
// check.session("""
// @ import language.experimental.macros
//
// @ import reflect.macros.Context
//
// @ def impl(c: Context): c.Expr[String] = {
// @ import c.universe._
// @ c.Expr[String](Literal(Constant("Hello!")))
// @ }
// defined function impl
//
// @ def m: String = macro impl
// defined function m
//
// @ m
// res4: String = "Hello!"
// """)
// }
'typeScope{
// Fancy type-printing isn't implemented at all in 2.10.x
if (!scala2_10) check.session("""
@ collection.mutable.Buffer(1)
res0: collection.mutable.Buffer[Int] = ArrayBuffer(1)
@ import collection.mutable
@ collection.mutable.Buffer(1)
res2: mutable.Buffer[Int] = ArrayBuffer(1)
@ mutable.Buffer(1)
res3: mutable.Buffer[Int] = ArrayBuffer(1)
@ import collection.mutable.Buffer
@ mutable.Buffer(1)
res5: Buffer[Int] = ArrayBuffer(1)
""")
}
'customTypePrinter{
check.session("""
@ Array(1)
res0: Array[Int] = Array(1)
@ import pprint.TPrint
@ implicit def ArrayTPrint[T: TPrint]: TPrint[Array[T]] = TPrint.lambda( c =>
@ implicitly[TPrint[T]].render(c) +
@ " " +
@ c.typeColor("Array").render
@ )
@ Array(1)
res3: Int Array = Array(1)
""")
}
'unwrapping{
check.session("""
@ {
@ val x = 1
@ val y = 2
@ x + y
@ }
x: Int = 1
y: Int = 2
res0_2: Int = 3
""")
}
'forceWrapping{
check.session("""
@ {{
@ val x = 1
@ val y = 2
@ x + y
@ }}
res0: Int = 3
""")
}
'truncation {
// Need a way to capture stdout in tests to make these tests work
if(false) check.session("""
@ Seq.fill(20)(100)
res0: Seq[Int] = List(
100,
100,
100,
100,
100,
100,
100,
100,
100,
100,
100,
100,
100,
100,
...
@ show(Seq.fill(20)(100))
res1: ammonite.pprint.Show[Seq[Int]] = List(
100,
100,
100,
100,
100,
100,
100,
100,
100,
100,
100,
100,
100,
100,
100,
100,
100,
100,
100,
100
)
@ show(Seq.fill(20)(100), height = 3)
res2: ammonite.pprint.Show[Seq[Int]] = List(
100,
100,
...
@ pprintConfig() = pprintConfig().copy(height = 5 )
@ Seq.fill(20)(100)
res4: Seq[Int] = List(
100,
100,
100,
100,
...
""")
}
/* - issues with this and class-wrapping
'private{
check.session("""
@ private val x = 1; val y = x + 1
x: Int = 1
y: Int = 2
@ y
res1: Int = 2
@ x
error: not found: value x
@ {
@ private[this] val a = 3
@ val b = a * 4
@ }
@ a
error: not found: value a
@ b
""")
}
*/
'compilerPlugin - retry(3){
if (!scala2_12) check.session("""
@ // Compiler plugins imported without `.$plugin` are not loaded
@ import $ivy.`org.spire-math::kind-projector:0.6.3`
@ trait TC0[F[_]]
defined trait TC0
@ type TC0EitherStr = TC0[Either[String, ?]]
error: not found: type ?
@ // You need to use `import $ivy.$plugin`
@ import $plugin.$ivy.`org.spire-math::kind-projector:0.6.3`
@ trait TC[F[_]]
defined trait TC
@ type TCEitherStr = TC[Either[String, ?]]
defined type TCEitherStr
@ // Importing plugins doesn't affect the run-time classpath
@ import $plugin.$ivy.`com.lihaoyi::scalatags:0.6.2`
@ import scalatags.Text
error: not found: value scalatags
""")
}
'replApiUniqueness{
// Make sure we can instantiate multiple copies of Interpreter, with each
// one getting its own `ReplBridge`. This ensures that the various
// Interpreters are properly encapsulated and don't interfere with each
// other.
val c1 = new TestRepl()
val c2 = new TestRepl()
c1.session("""
@ repl.prompt() = "A"
""")
c2.session("""
@ repl.prompt() = "B"
""")
c1.session("""
@ assert(repl.prompt() == "A")
""")
c2.session("""
@ assert(repl.prompt() == "B")
""")
}
'macroParadiseWorks{
val scalaVersion: String = scala.util.Properties.versionNumberString
val c1: TestRepl = new TestRepl()
c1.session(s"""
@ interp.load.plugin.ivy("org.scalamacros" % "paradise_${scalaVersion}" % "2.1.0")
""")
c1.session("""
@ val x = 1
""")
}
'desugar{
if (!scala2_10) check.session("""
@ desugar{1 + 2 max 3}
res0: Desugared = scala.Predef.intWrapper(3).max(3)
""")
}
'loadingModulesInPredef{
import ammonite.ops._
val dir = pwd/'amm/'src/'test/'resources/'scripts/'predefWithLoad
'loadExec {
val c1 = new TestRepl() {
override def predef = (
read! dir/"PredefLoadExec.sc",
Some(dir/"PredefLoadExec.sc")
)
}
c1.session("""
@ val previouslyLoaded = predefDefinedValue
previouslyLoaded: Int = 1337
""")
}
'loadModule{
val c2 = new TestRepl(){
override def predef = (
read! dir/"PredefLoadModule.sc",
Some(dir/"PredefLoadModule.sc")
)
}
c2.session("""
@ val previouslyLoaded = predefDefinedValue
previouslyLoaded: Int = 1337
""")
}
'importIvy{
val c2 = new TestRepl(){
override def predef = (
read! dir/"PredefMagicImport.sc",
Some(dir/"PredefMagicImport.sc")
)
}
c2.session("""
@ val previouslyLoaded = predefDefinedValue
previouslyLoaded: Int = 1337
@ val loadedDirect = Loaded.loadedDefinedValue
loadedDirect: Int = 1337
""")
}
}
'bytecodeForReplClasses{
check.session("""
@ case class Child(name: String)
@ val cls = classOf[Child]
@ val resName = cls.getName.replace('.', '/') + ".class"
resName: String = "$sess/cmd0Wrapper$Helper$Child.class"
@ cls.getClassLoader.getResource(resName)
res3: java.net.URL = memory:$sess/cmd0Wrapper$Helper$Child.class
@ cls.getClassLoader.getResourceAsStream(resName) != null
res4: Boolean = true
""")
}
}
}
| alexarchambault/ammonium | amm/src/test/scala/ammonite/session/AdvancedTests.scala | Scala | mit | 9,241 |
package gitbucket.core.plugin
import gitbucket.core.controller.Context
import gitbucket.core.service.RepositoryService.RepositoryInfo
/**
* The base trait of suggestion providers which supplies completion proposals in some text areas.
*/
trait SuggestionProvider {
/**
* The identifier of this suggestion provider.
* You must specify the unique identifier in the all suggestion providers.
*/
val id: String
/**
* The trigger of this suggestion provider. When user types this character, the proposal list would be displayed.
* Also this is used as the prefix of the replaced string.
*/
val prefix: String
/**
* The suffix of the replaced string. The default is `" "`.
*/
val suffix: String = " "
/**
* Which contexts is this suggestion provider enabled. Currently, available contexts are `"issues"` and `"wiki"`.
*/
val context: Seq[String]
/**
* If this suggestion provider has static proposal list, override this method to return it.
*
* The returned sequence is rendered as follows:
* <pre>
* [
* {
* "label" -> "value1",
* "value" -> "value1"
* },
* {
* "label" -> "value2",
* "value" -> "value2"
* },
* ]
* </pre>
*
* Each element can be accessed as `option` in `template()` or `replace()` method.
*/
def values(repository: RepositoryInfo): Seq[String] = Nil
/**
* If this suggestion provider has static proposal list, override this method to return it.
*
* If your proposals have label and value, use this method instead of `values()`.
* The first element of tuple is used as a value, and the second element is used as a label.
*
* The returned sequence is rendered as follows:
* <pre>
* [
* {
* "label" -> "label1",
* "value" -> "value1"
* },
* {
* "label" -> "label2",
* "value" -> "value2"
* },
* ]
* </pre>
*
* Each element can be accessed as `option` in `template()` or `replace()` method.
*/
def options(repository: RepositoryInfo): Seq[(String, String)] = values(repository).map { value => (value, value) }
/**
* JavaScript fragment to generate a label of completion proposal. The default is: `option.label`.
*/
def template(implicit context: Context): String = "option.label"
/**
* JavaScript fragment to generate a replaced value of completion proposal. The default is: `option.value`
*/
def replace(implicit context: Context): String = "option.value"
/**
* If this suggestion provider needs some additional process to assemble the proposal list (e.g. It need to use Ajax
* to get a proposal list from the server), then override this method and return any JavaScript code.
*/
def additionalScript(implicit context: Context): String = ""
}
class UserNameSuggestionProvider extends SuggestionProvider {
override val id: String = "user"
override val prefix: String = "@"
override val context: Seq[String] = Seq("issues")
override def additionalScript(implicit context: Context): String =
s"""$$.get('${context.path}/_user/proposals', { query: '', user: true, group: false }, function (data) { user = data.options; });"""
}
| gencer/gitbucket | src/main/scala/gitbucket/core/plugin/SuggestionProvider.scala | Scala | apache-2.0 | 3,213 |
/*
* Copyright 2014 Treode, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.treode.store.catalog
import java.util.ArrayDeque
import scala.collection.JavaConversions._
import com.nothome.delta.{Delta, GDiffPatcher}
import com.treode.async.{Async, Callback, Scheduler}
import com.treode.async.misc.materialize
import com.treode.disk.{Disk, PageDescriptor, Position, RecordDescriptor}
import com.treode.store.{Bytes, CatalogId, StaleException, TxClock}
import Async.{guard, when}
import Callback.ignore
import Handler.{Meta, pager}
private class Chronicle (
var version: Int,
var checksum: Int,
var bytes: Bytes,
var history: ArrayDeque [Bytes]
) {
def diff (other: Int): Update = {
val start = other - version + history.size
if (start >= history.size) {
Patch (version, checksum, Seq.empty)
} else if (start < 0) {
Assign (version, bytes, history.toSeq)
} else {
Patch (version, checksum, history.drop (start) .toSeq)
}
}
def diff (version: Int, bytes: Bytes): Patch = {
if (version != this.version + 1)
throw new StaleException (TxClock.MinValue)
Patch (version, bytes.murmur32, Seq (Patch.diff (this.bytes, bytes)))
}
def patch (version: Int, bytes: Bytes, history: Seq [Bytes]): Option [Update] = {
if (version <= this.version)
return None
this.version = version
this.checksum = bytes.murmur32
this.bytes = bytes
this.history.clear()
this.history.addAll (history)
Some (Assign (version, bytes, history))
}
def patch (end: Int, checksum: Int, patches: Seq [Bytes]) : Option [Update] = {
val span = end - version
if (span <= 0 || patches.length < span)
return None
val future = patches drop (patches.length - span)
var bytes = this.bytes
for (patch <- future)
bytes = Patch.patch (bytes, patch)
assert (bytes.murmur32 == checksum, "Patch application went awry.")
this.version += span
this.checksum = checksum
this.bytes = bytes
for (_ <- 0 until history.size + span - catalogHistoryLimit)
history.remove()
history.addAll (future)
Some (Patch (version, checksum, future))
}
}
private object Chronicle {
def apply : Chronicle =
new Chronicle (0, Bytes.empty.murmur32, Bytes.empty, new ArrayDeque)
}
| Treode/store | store/src/com/treode/store/catalog/Chronicle.scala | Scala | apache-2.0 | 2,829 |
/*
* Copyright (C) 2009-2017 Lightbend Inc. <https://www.lightbend.com>
*/
package play.api.db
import java.sql.Connection
import javax.sql.DataSource
/**
* Database API.
*/
trait Database {
/**
* The configuration name for this database.
*/
def name: String
/**
* The underlying JDBC data source for this database.
*/
def dataSource: DataSource
/**
* The JDBC connection URL this database, i.e. `jdbc:...`
* Normally retrieved via a connection.
*/
def url: String
/**
* Get a JDBC connection from the underlying data source.
* Autocommit is enabled by default.
*
* Don't forget to release the connection at some point by calling close().
*
* @return a JDBC connection
*/
def getConnection(): Connection
/**
* Get a JDBC connection from the underlying data source.
*
* Don't forget to release the connection at some point by calling close().
*
* @param autocommit determines whether to autocommit the connection
* @return a JDBC connection
*/
def getConnection(autocommit: Boolean): Connection
/**
* Execute a block of code, providing a JDBC connection.
* The connection and all created statements are automatically released.
*
* @param block code to execute
* @return the result of the code block
*/
def withConnection[A](block: Connection => A): A
/**
* Execute a block of code, providing a JDBC connection.
* The connection and all created statements are automatically released.
*
* @param autocommit determines whether to autocommit the connection
* @param block code to execute
* @return the result of the code block
*/
def withConnection[A](autocommit: Boolean)(block: Connection => A): A
/**
* Execute a block of code in the scope of a JDBC transaction.
* The connection and all created statements are automatically released.
* The transaction is automatically committed, unless an exception occurs.
*
* @param block code to execute
* @return the result of the code block
*/
def withTransaction[A](block: Connection => A): A
/**
* Shutdown this database, closing the underlying data source.
*/
def shutdown(): Unit
}
| wsargent/playframework | framework/src/play-jdbc-api/src/main/scala/play/api/db/Database.scala | Scala | apache-2.0 | 2,204 |
package objsets
import TweetReader._
/**
* A class to represent tweets.
*/
class Tweet(val user: String, val text: String, val retweets: Int) {
override def toString: String =
"User: " + user + "\n" +
"Text: " + text + " [" + retweets + "]"
}
/**
* This represents a set of objects of type `Tweet` in the form of a binary search
* tree. Every branch in the tree has two children (two `TweetSet`s). There is an
* invariant which always holds: for every branch `b`, all elements in the left
* subtree are smaller than the tweet at `b`. The elements in the right subtree are
* larger.
*
* Note that the above structure requires us to be able to compare two tweets (we
* need to be able to say which of two tweets is larger, or if they are equal). In
* this implementation, the equality / order of tweets is based on the tweet's text
* (see `def incl`). Hence, a `TweetSet` could not contain two tweets with the same
* text from different users.
*
*
* The advantage of representing sets as binary search trees is that the elements
* of the set can be found quickly. If you want to learn more you can take a look
* at the Wikipedia page [1], but this is not necessary in order to solve this
* assignment.
*
* [1] http://en.wikipedia.org/wiki/Binary_search_tree
*/
abstract class TweetSet {
/**
* This method takes a predicate and returns a subset of all the elements
* in the original set for which the predicate is true.
*
* Question: Can we implment this method here, or should it remain abstract
* and be implemented in the subclasses?
*/
def filter(p: Tweet => Boolean): TweetSet = filterAcc(p, new Empty)
/**
* This is a helper method for `filter` that propagetes the accumulated tweets.
*/
def filterAcc(p: Tweet => Boolean, acc: TweetSet): TweetSet
/**
* Returns a new `TweetSet` that is the union of `TweetSet`s `this` and `that`.
*
* Question: Should we implment this method here, or should it remain abstract
* and be implemented in the subclasses?
*/
def union(that: TweetSet): TweetSet
/**
* Returns the tweet from this set which has the greatest retweet count.
*
* Calling `mostRetweeted` on an empty set should throw an exception of
* type `java.util.NoSuchElementException`.
*
* Question: Should we implment this method here, or should it remain abstract
* and be implemented in the subclasses?
*/
def mostRetweeted: Tweet
/**
* Returns a list containing all tweets of this set, sorted by retweet count
* in descending order. In other words, the head of the resulting list should
* have the highest retweet count.
*
* Hint: the method `remove` on TweetSet will be very useful.
* Question: Should we implment this method here, or should it remain abstract
* and be implemented in the subclasses?
*/
def descendingByRetweet: TweetList = {
if(isEmpty) Nil
else {
val most = mostRetweeted
val list = remove(most).descendingByRetweet
new Cons(most, list)
}
}
/**
* The following methods are already implemented
*/
/**
* Returns a new `TweetSet` which contains all elements of this set, and the
* the new element `tweet` in case it does not already exist in this set.
*
* If `this.contains(tweet)`, the current set is returned.
*/
def incl(tweet: Tweet): TweetSet
/**
* Returns a new `TweetSet` which excludes `tweet`.
*/
def remove(tweet: Tweet): TweetSet
/**
* Tests if `tweet` exists in this `TweetSet`.
*/
def contains(tweet: Tweet): Boolean
/**
* This method takes a function and applies it to every element in the set.
*/
def foreach(f: Tweet => Unit): Unit
def isEmpty: Boolean
}
class Empty extends TweetSet {
def filterAcc(p: Tweet => Boolean, acc: TweetSet): TweetSet = acc
/**
* The following methods are already implemented
*/
def contains(tweet: Tweet): Boolean = false
def incl(tweet: Tweet): TweetSet = new NonEmpty(tweet, new Empty, new Empty)
def remove(tweet: Tweet): TweetSet = this
def foreach(f: Tweet => Unit): Unit = ()
override def union(that: TweetSet): TweetSet = that
override def mostRetweeted = throw new NoSuchElementException
override def isEmpty = true
}
class NonEmpty(elem: Tweet, left: TweetSet, right: TweetSet) extends TweetSet {
def filterAcc(p: Tweet => Boolean, acc: TweetSet): TweetSet = {
val filtered = left.filter(p) union right.filter(p)
if(p(elem)) filtered union acc incl elem else filtered union acc
}
/**
* The following methods are already implemented
*/
def contains(x: Tweet): Boolean =
if (x.text < elem.text) left.contains(x)
else if (elem.text < x.text) right.contains(x)
else true
def incl(x: Tweet): TweetSet = {
if (x.text < elem.text) new NonEmpty(elem, left.incl(x), right)
else if (elem.text < x.text) new NonEmpty(elem, left, right.incl(x))
else this
}
def remove(tw: Tweet): TweetSet =
if (tw.text < elem.text) new NonEmpty(elem, left.remove(tw), right)
else if (elem.text < tw.text) new NonEmpty(elem, left, right.remove(tw))
else left.union(right)
def foreach(f: Tweet => Unit): Unit = {
f(elem)
left.foreach(f)
right.foreach(f)
}
override def union(that: TweetSet): TweetSet = left.union(right).union(that).incl(elem)
override def mostRetweeted: Tweet = {
val leftMax = if(!left.isEmpty && left.mostRetweeted.retweets > elem.retweets) left.mostRetweeted else elem
val rightMax = if(!right.isEmpty && right.mostRetweeted.retweets > elem.retweets) right.mostRetweeted else elem
if (leftMax.retweets > rightMax.retweets) leftMax
else rightMax
}
override def isEmpty = false
}
trait TweetList {
def head: Tweet
def tail: TweetList
def isEmpty: Boolean
def foreach(f: Tweet => Unit): Unit =
if (!isEmpty) {
f(head)
tail.foreach(f)
}
}
object Nil extends TweetList {
def head = throw new java.util.NoSuchElementException("head of EmptyList")
def tail = throw new java.util.NoSuchElementException("tail of EmptyList")
def isEmpty = true
}
class Cons(val head: Tweet, val tail: TweetList) extends TweetList {
def isEmpty = false
}
object GoogleVsApple {
val google = List("android", "Android", "galaxy", "Galaxy", "nexus", "Nexus")
val apple = List("ios", "iOS", "iphone", "iPhone", "ipad", "iPad")
lazy val googleTweets: TweetSet = TweetReader.allTweets.filter(t => google.exists(x => t.text.contains(x)))
lazy val appleTweets: TweetSet = TweetReader.allTweets.filter(t => apple.exists(x => t.text.contains(x)))
/**
* A list of all tweets mentioning a keyword from either apple or google,
* sorted by the number of retweets.
*/
lazy val trending: TweetList = (googleTweets union appleTweets).descendingByRetweet
}
object Main extends App {
// Print the trending tweets
GoogleVsApple.trending foreach println
}
| jan-kelemen/oc-coursera-progfun1 | w3/objsets/src/main/scala/objsets/TweetSet.scala | Scala | mit | 6,964 |
package dotty.tools.dotc
package transform
import core._
import Contexts._, Symbols._, Types._, Flags._, Decorators._, StdNames._, Constants._
import MegaPhase._
import SymUtils._
import ast.Trees._
import dotty.tools.dotc.reporting.messages.TypeMismatch
import dotty.tools.dotc.util.Spans.Span
/** Expand SAM closures that cannot be represented by the JVM as lambdas to anonymous classes.
* These fall into five categories
*
* 1. Partial function closures, we need to generate isDefinedAt and applyOrElse methods for these.
* 2. Closures implementing non-trait classes
* 3. Closures implementing classes that inherit from a class other than Object
* (a lambda cannot not be a run-time subtype of such a class)
* 4. Closures that implement traits which run initialization code.
* 5. Closures that get synthesized abstract methods in the transformation pipeline. These methods can be
* (1) superaccessors, (2) outer references, (3) accessors for fields.
*
* However, implicit function types do not count as SAM types.
*/
class ExpandSAMs extends MiniPhase {
override def phaseName: String = "expandSAMs"
import ast.tpd._
/** Is the SAMType `cls` also a SAM under the rules of the platform? */
def isPlatformSam(cls: ClassSymbol)(implicit ctx: Context): Boolean =
ctx.platform.isSam(cls)
override def transformBlock(tree: Block)(implicit ctx: Context): Tree = tree match {
case Block(stats @ (fn: DefDef) :: Nil, Closure(_, fnRef, tpt)) if fnRef.symbol == fn.symbol =>
tpt.tpe match {
case NoType =>
tree // it's a plain function
case tpe if defn.isContextFunctionType(tpe) =>
tree
case tpe @ SAMType(_) if tpe.isRef(defn.PartialFunctionClass) =>
val tpe1 = checkRefinements(tpe, fn)
toPartialFunction(tree, tpe1)
case tpe @ SAMType(_) if isPlatformSam(tpe.classSymbol.asClass) =>
checkRefinements(tpe, fn)
tree
case tpe =>
val tpe1 = checkRefinements(tpe, fn)
val Seq(samDenot) = tpe1.possibleSamMethods
cpy.Block(tree)(stats,
AnonClass(tpe1 :: Nil, fn.symbol.asTerm :: Nil, samDenot.symbol.asTerm.name :: Nil))
}
case _ =>
tree
}
/** A partial function literal:
*
* ```
* val x: PartialFunction[A, B] = { case C1 => E1; ...; case Cn => En }
* ```
*
* which desugars to:
*
* ```
* val x: PartialFunction[A, B] = {
* def $anonfun(x: A): B = x match { case C1 => E1; ...; case Cn => En }
* closure($anonfun: PartialFunction[A, B])
* }
* ```
*
* is expanded to an anomymous class:
*
* ```
* val x: PartialFunction[A, B] = {
* class $anon extends AbstractPartialFunction[A, B] {
* final def isDefinedAt(x: A): Boolean = x match {
* case C1 => true
* ...
* case Cn => true
* case _ => false
* }
*
* final def applyOrElse[A1 <: A, B1 >: B](x: A1, default: A1 => B1): B1 = x match {
* case C1 => E1
* ...
* case Cn => En
* case _ => default(x)
* }
* }
*
* new $anon
* }
* ```
*/
private def toPartialFunction(tree: Block, tpe: Type)(implicit ctx: Context): Tree = {
/** An extractor for match, either contained in a block or standalone. */
object PartialFunctionRHS {
def unapply(tree: Tree): Option[Match] = tree match {
case Block(Nil, expr) => unapply(expr)
case m: Match => Some(m)
case _ => None
}
}
val closureDef(anon @ DefDef(_, _, List(List(param)), _, _)) = tree
anon.rhs match {
case PartialFunctionRHS(pf) =>
val anonSym = anon.symbol
val anonTpe = anon.tpe.widen
val parents = List(
defn.AbstractPartialFunctionClass.typeRef.appliedTo(anonTpe.firstParamTypes.head, anonTpe.resultType),
defn.SerializableType)
val pfSym = ctx.newNormalizedClassSymbol(anonSym.owner, tpnme.ANON_CLASS, Synthetic | Final, parents, coord = tree.span)
def overrideSym(sym: Symbol) = sym.copy(
owner = pfSym,
flags = Synthetic | Method | Final | Override,
info = tpe.memberInfo(sym),
coord = tree.span).asTerm.entered
val isDefinedAtFn = overrideSym(defn.PartialFunction_isDefinedAt)
val applyOrElseFn = overrideSym(defn.PartialFunction_applyOrElse)
def translateMatch(tree: Match, pfParam: Symbol, cases: List[CaseDef], defaultValue: Tree)(implicit ctx: Context) = {
val selector = tree.selector
val selectorTpe = selector.tpe.widen
val defaultSym = ctx.newSymbol(pfParam.owner, nme.WILDCARD, Synthetic | Case, selectorTpe)
val defaultCase =
CaseDef(
Bind(defaultSym, Underscore(selectorTpe)),
EmptyTree,
defaultValue)
val unchecked = selector.annotated(New(ref(defn.UncheckedAnnot.typeRef)))
cpy.Match(tree)(unchecked, cases :+ defaultCase)
.subst(param.symbol :: Nil, pfParam :: Nil)
// Needed because a partial function can be written as:
// param => param match { case "foo" if foo(param) => param }
// And we need to update all references to 'param'
}
def isDefinedAtRhs(paramRefss: List[List[Tree]])(implicit ctx: Context) = {
val tru = Literal(Constant(true))
def translateCase(cdef: CaseDef) =
cpy.CaseDef(cdef)(body = tru).changeOwner(anonSym, isDefinedAtFn)
val paramRef = paramRefss.head.head
val defaultValue = Literal(Constant(false))
translateMatch(pf, paramRef.symbol, pf.cases.map(translateCase), defaultValue)
}
def applyOrElseRhs(paramRefss: List[List[Tree]])(implicit ctx: Context) = {
val List(paramRef, defaultRef) = paramRefss.head
def translateCase(cdef: CaseDef) =
cdef.changeOwner(anonSym, applyOrElseFn)
val defaultValue = defaultRef.select(nme.apply).appliedTo(paramRef)
translateMatch(pf, paramRef.symbol, pf.cases.map(translateCase), defaultValue)
}
val constr = ctx.newConstructor(pfSym, Synthetic, Nil, Nil).entered
val isDefinedAtDef = transformFollowingDeep(DefDef(isDefinedAtFn, isDefinedAtRhs(_)(ctx.withOwner(isDefinedAtFn))))
val applyOrElseDef = transformFollowingDeep(DefDef(applyOrElseFn, applyOrElseRhs(_)(ctx.withOwner(applyOrElseFn))))
val pfDef = ClassDef(pfSym, DefDef(constr), List(isDefinedAtDef, applyOrElseDef))
cpy.Block(tree)(pfDef :: Nil, New(pfSym.typeRef, Nil))
case _ =>
val found = tpe.baseType(defn.FunctionClass(1))
ctx.error(TypeMismatch(found, tpe), tree.sourcePos)
tree
}
}
private def checkRefinements(tpe: Type, tree: Tree)(implicit ctx: Context): Type = tpe.dealias match {
case RefinedType(parent, name, _) =>
if (name.isTermName && tpe.member(name).symbol.ownersIterator.isEmpty) // if member defined in the refinement
ctx.error("Lambda does not define " + name, tree.sourcePos)
checkRefinements(parent, tree)
case tpe =>
tpe
}
}
| som-snytt/dotty | compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala | Scala | apache-2.0 | 7,261 |
package is.hail.types.physical
import is.hail.annotations.Region
import is.hail.asm4s._
import is.hail.types.virtual.TLocus
import is.hail.variant._
abstract class PLocus extends PType {
def rgBc: BroadcastRG
lazy val virtualType: TLocus = TLocus(rgBc)
def rg: ReferenceGenome
def contig(value: Long): String
def contigType: PString
def position(value: Code[Long]): Code[Int]
def position(value: Long): Int
def positionType: PInt32
def unstagedStoreLocus(addr: Long, contig: String, position: Int, region: Region): Unit
} | hail-is/hail | hail/src/main/scala/is/hail/types/physical/PLocus.scala | Scala | mit | 549 |
package edu.berkeley.nlp.summ.data
import scala.collection.mutable.ArrayBuffer
import scala.collection.mutable.HashMap
import scala.collection.mutable.HashSet
import edu.berkeley.nlp.futile.tokenizer.PTBLineLexer
case class DiscourseTree(val name: String,
val rootNode: DiscourseNode) extends Serializable {
// Set parent of each node
DiscourseTree.setParentsHelper(rootNode)
// Set head of each node
// This is according to the method described in Hirao et al. (2013),
// but it doesn't actually lead to sensible heads.
val leaves = DiscourseTree.getLeaves(rootNode)
def numLeaves = leaves.size
def leafWords = leaves.map(_.leafWords.toSeq)
val leafStatuses = leaves.map(_.label)
private def setHiraoHeadsHelper(node: DiscourseNode) {
// TODO: Actually percolate heads, only break ties if two Ns
var leftmostN = -1
for (idx <- node.span._1 until node.span._2) {
if (leftmostN == -1 && leafStatuses(idx) == "Nucleus") {
leftmostN = idx;
}
}
node.hiraoHead = if (leftmostN == -1) {
node.span._1
} else {
leftmostN
}
for (child <- node.children) {
setHiraoHeadsHelper(child)
}
}
setHiraoHeadsHelper(rootNode)
DiscourseTree.setRecursiveHeadsHelper(rootNode)
// Determine dependency structure
// This is the method specified in Hirao et al. but it
// doesn't seem to do well; you tend to end up with pretty
// shallow structures and they look a bit weird overall.
val hiraoParents = Array.tabulate(leafStatuses.size)(i => {
val leaf = leaves(i)
if (leafStatuses(i) == DiscourseNode.Nucleus) {
// Find the nearest dominating S, then assign to
// the head that S's parent. This is because only
// S's are in subordinating relations so we need to find
// one in order to establish the hierarchy.
var node = leaf.parent
while (node != null && node.label != DiscourseNode.Satellite) {
node = node.parent
}
if (node == null) {
-1
} else {
node.parent.head
}
} else {
require(leafStatuses(i) == DiscourseNode.Satellite)
// Find the first parent with a head that's not this
var node = leaf.parent
while (node.head == leaf.span._1) {
node = node.parent
}
node.head
}
})
// "Best" parent method, where your depth ends up being the number of
// Ss between you and the root + 1
private val advancedDepTree = DiscourseTree.setAdvancedParents(rootNode, leafStatuses.size, false)
private val advancedParents = advancedDepTree.parents
private val advancedLabels = advancedDepTree.labels.map(DiscourseTree.getCoarseLabel(_))
private val apRootIndices = advancedParents.zipWithIndex.filter(_._1 == -1).map(_._2)
private val advancedParentsOneRoot = Array.tabulate(advancedParents.size)(i => if (apRootIndices.contains(i) && apRootIndices(0) != i) apRootIndices(0) else advancedParents(i))
private val advancedDepTreeMNLinks = DiscourseTree.setAdvancedParents(rootNode, leafStatuses.size, true)
private val advancedParentsMNLinks = advancedDepTreeMNLinks.parents
private val advancedLabelsMNLinks = advancedDepTreeMNLinks.labels.map(DiscourseTree.getCoarseLabel(_))
val parents = advancedParentsOneRoot // Current best
val parentsMultiRoot = advancedParents
val labels = advancedLabels
val childrenMap = DiscourseTree.makeChildrenMap(parents)
def getParents(useMultinuclearLinks: Boolean) = {
if (useMultinuclearLinks) advancedDepTreeMNLinks.parents else advancedParentsOneRoot
}
def getLabels(useMultinuclearLinks: Boolean) = {
if (useMultinuclearLinks) advancedLabelsMNLinks else advancedLabels
}
}
object DiscourseTree {
def getCoarseLabel(label: String) = {
if (label == null) {
"root"
} else if (label.contains("-")) {
label.substring(0, label.indexOf("-"))
} else {
label
}
}
def getLeaves(rootNode: DiscourseNode): ArrayBuffer[DiscourseNode] = {
val leaves = new ArrayBuffer[DiscourseNode]
getLeavesHelper(rootNode, leaves)
}
def getLeavesHelper(rootNode: DiscourseNode, leaves: ArrayBuffer[DiscourseNode]): ArrayBuffer[DiscourseNode] = {
if (rootNode.isLeaf) {
leaves += rootNode
leaves
} else {
for (child <- rootNode.children) {
getLeavesHelper(child, leaves)
}
leaves
}
}
def setParentsHelper(node: DiscourseNode) {
for (child <- node.children) {
child.parent = node
setParentsHelper(child)
}
}
def setRecursiveHeadsHelper(node: DiscourseNode): Int = {
if (node.isLeaf) {
node.head = node.span._1
node.head
} else {
var parentHeadIdx = -1
for (child <- node.children) {
val childHead = setRecursiveHeadsHelper(child)
if (parentHeadIdx == -1 && child.label == DiscourseNode.Nucleus) {
parentHeadIdx = childHead
}
}
require(parentHeadIdx != -1)
node.head = parentHeadIdx
parentHeadIdx
}
}
def setAdvancedParents(node: DiscourseNode, numLeaves: Int, addMultinuclearLinks: Boolean): DiscourseDependencyTree = {
val depTree = new DiscourseDependencyTree(Array.fill(numLeaves)(-1), new Array[String](numLeaves), new ArrayBuffer[(Int,Int)])
setAdvancedParentsHelper(node, depTree, addMultinuclearLinks)
depTree
}
/**
* Set parents according to the "advanced" strategy, which by definition
* produces a tree such that the depth of each node is 1 + the number of Ss
* between it and the root. This helper method returns the set of unbound
* nodes at this point in the recursion; ordinarily in parsing this would just
* be one head, but it can be multiple in the case of N => N N rules.
*/
def setAdvancedParentsHelper(node: DiscourseNode, depTree: DiscourseDependencyTree, addMultinuclearLinks: Boolean): Seq[Int] = {
// Leaf node
if (node.children.size == 0) {
Seq(node.span._1)
} else if (node.children.size == 2) {
////////////
// BINARY //
////////////
// Identify the satellite (if it exists) and link up all exposed heads from
// the satellite to the nucleus. The rel2par of the satellite encodes the relation.
val leftExposed = setAdvancedParentsHelper(node.children(0), depTree, addMultinuclearLinks)
val rightExposed = setAdvancedParentsHelper(node.children(1), depTree, addMultinuclearLinks)
val ruleType = node.children(0).label + " " + node.children(1).label
// BINUCLEAR
if (ruleType == DiscourseNode.Nucleus + " " + DiscourseNode.Nucleus) {
if (addMultinuclearLinks) {
require(leftExposed.size == 1 && rightExposed.size == 1, "Bad structure!")
depTree.parents(rightExposed(0)) = leftExposed.head
// All labels of multinuclear things start with =
depTree.labels(rightExposed(0)) = "=" + node.children(1).rel2par
leftExposed
} else {
if (node.children(0).rel2par == "Same-Unit" && node.children(1).rel2par == "Same-Unit") {
// There can be multiple if one Same-Unit contains some coordination
for (leftIdx <- leftExposed) {
for (rightIdx <- rightExposed) {
depTree.sameUnitPairs += leftIdx -> rightIdx
}
}
}
leftExposed ++ rightExposed
}
} else if (ruleType == DiscourseNode.Nucleus + " " + DiscourseNode.Satellite) {
// Mononuclear, left-headed
val head = leftExposed.head
// val head = leftExposed.last // This works a bit worse
for (rightIdx <- rightExposed) {
depTree.parents(rightIdx) = head
depTree.labels(rightIdx) = node.children(1).rel2par
}
leftExposed
} else {
// Mononuclear, right-headed
require(ruleType == DiscourseNode.Satellite + " " + DiscourseNode.Nucleus)
val head = rightExposed.head
for (leftIdx <- leftExposed) {
depTree.parents(leftIdx) = head
depTree.labels(leftIdx) = node.children(0).rel2par
}
rightExposed
}
} else {
//////////////////
// HIGHER ARITY //
//////////////////
val allChildrenAreNuclei = !node.children.map(_.label == DiscourseNode.Satellite).reduce(_ || _)
val oneChildIsNucleus = node.children.map(_.label).filter(_ == DiscourseNode.Nucleus).size == 1
require(allChildrenAreNuclei || oneChildIsNucleus, "Bad higher-arity: " + node.children.map(_.label).toSeq)
// Higher-arity, all nuclei. Can be Same-Unit, mostly List
if (allChildrenAreNuclei) {
val allChildrenExposedIndices = node.children.map(child => setAdvancedParentsHelper(child, depTree, addMultinuclearLinks))
// Link up all pairs of exposed indices across the children
val allExposed = new ArrayBuffer[Int]
if (addMultinuclearLinks) {
// Add links in sequence a <- b <- c ... (child points to parent here)
// There should only be one exposed index in this case
for (childIdx <- 0 until allChildrenExposedIndices.size) {
require(allChildrenExposedIndices(childIdx).size == 1)
if (childIdx > 0) {
depTree.parents(allChildrenExposedIndices(childIdx).head) = allChildrenExposedIndices(childIdx - 1).head
// All labels of multinuclear things start with =
depTree.labels(allChildrenExposedIndices(childIdx).head) = "=" + node.children(childIdx).rel2par
}
}
allExposed += allChildrenExposedIndices(0).head
} else {
// Pass all children up
for (exposedIndices <- allChildrenExposedIndices) {
allExposed ++= exposedIndices
}
}
allExposed
} else {
// Higher-arity, one nucleus. Typically standard relations that simply have arity > 2
val nucleusIdx = node.children.map(_.label).zipWithIndex.filter(_._1 == DiscourseNode.Nucleus).head._2
val nucleusExposed = setAdvancedParentsHelper(node.children(nucleusIdx), depTree, addMultinuclearLinks)
for (i <- 0 until node.children.size) {
if (i != nucleusIdx) {
val satelliteExposed = setAdvancedParentsHelper(node.children(i), depTree, addMultinuclearLinks)
// val nucleusHead = if (i < nucleusIdx) nucleusExposed.head else nucleusExposed.last // This works a bit worse
val nucleusHead = nucleusExposed.head
for (satelliteIdx <- satelliteExposed) {
depTree.parents(satelliteIdx) = nucleusHead
depTree.labels(satelliteIdx) = node.children(i).rel2par
}
}
}
nucleusExposed
}
}
}
def makeChildrenMap(parents: Seq[Int]) = {
val childrenMap = new HashMap[Int,ArrayBuffer[Int]]
for (i <- 0 until parents.size) {
childrenMap.put(i, new ArrayBuffer[Int])
}
for (i <- 0 until parents.size) {
if (parents(i) != -1) {
childrenMap(parents(i)) += i
}
}
childrenMap
}
def computeDepths(parents: Seq[Int]): Array[Int] = computeDepths(parents, Array.fill(parents.size)(""), false)
def computeDepths(parents: Seq[Int], labels: Seq[String], flattenMultinuclear: Boolean): Array[Int] = {
val depths = Array.tabulate(parents.size)(i => -1)
var unassignedDepths = true
while (unassignedDepths) {
unassignedDepths = false
for (i <- 0 until parents.size) {
if (depths(i) == -1) {
if (parents(i) == -1) {
depths(i) = 1
} else if (depths(parents(i)) != -1) {
depths(i) = if (flattenMultinuclear && labels(i).startsWith("=")) depths(parents(i)) else depths(parents(i)) + 1
} else {
unassignedDepths = true
}
}
}
}
// for (i <- 0 until depths.size) {
// require(depths(i) == computeDepth(parents, labels, flattenMultinuclear, i))
// }
depths
}
def computeDepth(parents: Seq[Int], labels: Seq[String], flattenMultinuclear: Boolean, idx: Int) = {
var node = idx
var depth = 0
// The root of the tree is at depth 1
while (node != -1) {
if (!flattenMultinuclear || !labels(node).startsWith("=")) {
depth += 1
}
node = parents(node)
}
depth
}
def computeNumDominated(parents: Seq[Int], idx: Int) = {
val childrenMap = makeChildrenMap(parents)
val children = childrenMap(idx)
var totalChildren = 0
var newFrontier = new HashSet[Int] ++ children
var frontier = new HashSet[Int]
while (!newFrontier.isEmpty) {
frontier = newFrontier
newFrontier = new HashSet[Int]
for (child <- frontier) {
totalChildren += 1
newFrontier ++= childrenMap(child)
}
}
totalChildren
}
}
case class DiscourseDependencyTree(val parents: Array[Int],
val labels: Array[String],
val sameUnitPairs: ArrayBuffer[(Int,Int)]) {
}
case class DiscourseNode(val label: String,
val rel2par: String,
val span: (Int,Int),
val leafText: String,
val children: ArrayBuffer[DiscourseNode]) extends Serializable {
var head: Int = -1
var hiraoHead: Int = -1
var parent: DiscourseNode = null
// N.B. If anything changes here, should rerun EDUAligner and make sure things aren't worse
val leafTextPreTok = leafText.replace("<P>", "")
val leafWordsWhitespace = leafTextPreTok.split("\\\\s+").filter(_ != "<P>")
// Adding the period fixes a bug where "buy-outs" is treated differently sentence-internally than it is
// when it ends an utterance; generally this makes the tokenizer more consistent on fragments
val leafWordsPTBLL = if (leafTextPreTok.split("\\\\s+").last.contains("-")) {
new PTBLineLexer().tokenize(leafTextPreTok + " .").toArray(Array[String]()).dropRight(1)
} else {
new PTBLineLexer().tokenize(leafTextPreTok).toArray(Array[String]())
}
// There are still some spaces in some tokens; get rid of these
val leafWordsPTB = if (leafTextPreTok != "") {
leafWordsPTBLL.flatMap(_.split("\\\\s+")).filter(_ != "<P>")
} else {
Array[String]()
}
// def leafWords = leafWordsWhitespace
def leafWords = leafWordsPTB
// def leafWords = leafWordsPTBLL
def isLeaf = span._2 - span._1 == 1
}
object DiscourseNode {
val Nucleus = "Nucleus"
val Satellite = "Satellite"
val Root = "Root"
}
| gregdurrett/berkeley-doc-summarizer | src/main/scala/edu/berkeley/nlp/summ/data/DiscourseTree.scala | Scala | gpl-3.0 | 14,624 |
/******************************************************************************
Copyright (c) 2012-2013, KAIST, S-Core.
All rights reserved.
Use is subject to license terms.
This distribution may include materials developed by third parties.
******************************************************************************/
package kr.ac.kaist.jsaf.bug_detector
import kr.ac.kaist.jsaf.analysis.cfg._
import kr.ac.kaist.jsaf.analysis.typing._
import kr.ac.kaist.jsaf.analysis.typing.domain._
import kr.ac.kaist.jsaf.nodes.ASTNode
import kr.ac.kaist.jsaf.nodes_util.{NodeUtil => NU, JSAstToConcrete}
import kr.ac.kaist.jsaf.widl.WIDLChecker
object BugHelper {
////////////////////////////////////////////////////////////////
// Get function argument size
////////////////////////////////////////////////////////////////
def getBuiltinArgumentSize(funcName: String): (Int, Int) = {
// Built-in function
argSizeMap.get(funcName) match {
case Some(as) => return as
case None =>
}
// WIDL function
WIDLChecker.argSizeMap.get(funcName) match {
case Some(as) => return as
case None =>
}
// println("* Unknown argument size of \\"" + funcName + "\\".")
(-1, -1)
}
////////////////////////////////////////////////////////////////
// Get function name
////////////////////////////////////////////////////////////////
def getFuncName(funcName: String, varManager: VarManager = null, expr: CFGNode = null): String = {
if (funcName.startsWith("<>arguments<>")) return "arguments"
if (!NU.isFunExprName(funcName)) return funcName
if (varManager != null && expr != null) {
expr match {
case expr: CFGExpr =>
val bugVar0 = varManager.getUserVarAssign(expr)
if (bugVar0 != null) return bugVar0.toString
case expr: CFGFunExpr =>
var isFirst = true
val funcName = new StringBuilder
for (rhs <- varManager.getUserVarAssignR(expr.lhs)) {
if (isFirst) isFirst = false else funcName.append(", ")
funcName.append(rhs.toString)
}
if (funcName.length > 0) return funcName.toString
case _ =>
}
}
"anonymous_function"
}
////////////////////////////////////////////////////////////////
// Get [[Function]] or [[Construct]] property
////////////////////////////////////////////////////////////////
def getFuncOrConstPropName(heap: Heap, funLoc: Loc, isCall: Boolean): String = {
// Function must have [[Function]] or [[Construct]] property
if (isCall) {
if (BoolTrue <= Helper.IsCallable(heap, funLoc)) return "@function"
}
else {
if (BoolTrue <= Helper.HasConstruct(heap, funLoc)) return "@construct"
}
null
}
////////////////////////////////////////////////////////////////
// Get omitted code from a AST node
////////////////////////////////////////////////////////////////
def getOmittedCode(ast: ASTNode, maxLength: Int): (String, Boolean) = getOmittedCode(JSAstToConcrete.doit(ast), maxLength)
def getOmittedCode(code: String, maxLength: Int): (String, Boolean) = {
var newCode = ""
var isFirst = true
for (line <- code.split('\\n')) {
val trimedLine = line.replace('\\t', ' ').trim
if (newCode.length < maxLength && trimedLine.length > 0) {
if (isFirst) isFirst = false else newCode+= ' '
newCode+= trimedLine
}
}
if (newCode.length > maxLength) (newCode.substring(0, maxLength), true)
else (newCode, false)
}
////////////////////////////////////////////////////////////////
// Convert property name from AbsString
////////////////////////////////////////////////////////////////
def getPropName(name: AbsString): String =
AbsString.concretize(name) match {
case Some(propName) => propName
case _ => "unknown_property"
}
////////////////////////////////////////////////////////////////
// IsCallable for locations
////////////////////////////////////////////////////////////////
def isCallable(heap: Heap, locSet: LocSet): AbsBool = {
if(locSet.size == 0) return BoolFalse
var isCallable: AbsBool = BoolBot
for(loc <- locSet) isCallable+= Helper.IsCallable(heap, loc)
isCallable
}
////////////////////////////////////////////////////////////////
// Get a set of property names (String) from an AbsString
////////////////////////////////////////////////////////////////
def props(heap: Heap, loc: Loc, absString: AbsString): Set[String] = {
if (!heap.domIn(loc)) Set()
else {
absString match {
// ignore @default
case StrTop => heap(loc).map.keySet.filter(s => !s.take(1).equals("@"))
case NumStr => heap(loc).map.keySet.filter(s => !s.take(1).equals("@") && AbsString.alpha(s) <= NumStr)
case OtherStr => heap(loc).map.keySet.filter(s => !s.take(1).equals("@") && AbsString.alpha(s) <= OtherStr)
case NumStrSingle(s) => Set(s)
case OtherStrSingle(s) => Set(s)
case StrBot => Set()
}
}
}
////////////////////////////////////////////////////////////////
// PValue to String
////////////////////////////////////////////////////////////////
def pvalueToString(pvalue: PValue, concreteOnly: Boolean = true): String = {
var result = ""
pvalue.foreach(absValue => {
if (!absValue.isBottom && (!concreteOnly || absValue.isConcrete)) {
if (result.length == 0) result+= absValue.toString
else result+= ", " + absValue.toString
}
})
result
}
////////////////////////////////////////////////////////////////
// 9.6 ToUint32: (Unsigned 32 Bit Integer)
////////////////////////////////////////////////////////////////
def toUint32(n: Double): Long = {
def modulo(x: Double, y: Long): Long = {
val result = math.abs(x.toLong) % math.abs(y)
if(math.signum(x) < 0) return math.signum(y) * (math.abs(y) - result)
math.signum(y) * result
}
// 1. Let number be the result of calling ToNumber on the input argument.
// 2. If number is NaN, +0, -0, +INF or -INF, return +0.
if(n.isNaN || n == 0 || n.isInfinite) return 0
// 3. Let posInt be sign(number) * floor(abs(number))
val posInt = math.signum(n) * math.floor(math.abs(n))
// 4. Let int32bit be posInt modulo 2^32; that is, ...
val int32bit = modulo(posInt, 0x100000000L)
// 5. Return int32bit.
int32bit.toLong
}
}
| daejunpark/jsaf | src/kr/ac/kaist/jsaf/bug_detector/BugHelper.scala | Scala | bsd-3-clause | 6,484 |
package mesosphere.marathon
import com.github.fge.jackson.JsonLoader
import com.github.fge.jsonschema.core.report.ProcessingReport
import com.github.fge.jsonschema.main.JsonSchemaFactory
import mesosphere.marathon.Protos.MarathonTask
import mesosphere.marathon.api.JsonTestHelper
import mesosphere.marathon.api.v2.json.V2AppDefinition
import mesosphere.marathon.state.{ Timestamp, AppDefinition }
import mesosphere.marathon.state.PathId._
import mesosphere.marathon.tasks.MarathonTasks
import mesosphere.mesos.protos._
import org.apache.mesos.Protos.{ CommandInfo, TaskID, TaskInfo, Offer }
import play.api.libs.json.Json
trait MarathonTestHelper {
import mesosphere.mesos.protos.Implicits._
def makeConfig(args: String*): AllConf = {
val opts = new AllConf(args) {
// scallop will trigger sys exit
override protected def onError(e: Throwable): Unit = throw e
}
opts.afterInit()
opts
}
def defaultConfig(
maxTasksPerOffer: Int = 1,
minReviveOffersInterval: Long = 100,
mesosRole: Option[String] = None,
acceptedResourceRoles: Option[Set[String]] = None,
envVarsPrefix: Option[String] = None): AllConf = {
var args = Seq(
"--master", "127.0.0.1:5050",
"--max_tasks_per_offer", maxTasksPerOffer.toString,
"--min_revive_offers_interval", minReviveOffersInterval.toString
)
mesosRole.foreach(args ++= Seq("--mesos_role", _))
acceptedResourceRoles.foreach(v => args ++= Seq("--default_accepted_resource_roles", v.mkString(",")))
envVarsPrefix.foreach(args ++ Seq("--env_vars_prefix", _))
makeConfig(args: _*)
}
def makeBasicOffer(cpus: Double = 4.0, mem: Double = 16000, disk: Double = 1.0,
beginPort: Int = 31000, endPort: Int = 32000, role: String = "*"): Offer.Builder = {
val cpusResource = ScalarResource(Resource.CPUS, cpus, role = role)
val memResource = ScalarResource(Resource.MEM, mem, role = role)
val diskResource = ScalarResource(Resource.DISK, disk, role = role)
val portsResource = if (beginPort <= endPort) {
Some(RangesResource(
Resource.PORTS,
Seq(Range(beginPort.toLong, endPort.toLong)),
role
))
}
else {
None
}
val offerBuilder = Offer.newBuilder
.setId(OfferID("1"))
.setFrameworkId(FrameworkID("marathon"))
.setSlaveId(SlaveID("slave0"))
.setHostname("localhost")
.addResources(cpusResource)
.addResources(memResource)
.addResources(diskResource)
portsResource.foreach(offerBuilder.addResources(_))
offerBuilder
}
def makeBasicOfferWithRole(cpus: Double, mem: Double, disk: Double,
beginPort: Int, endPort: Int, role: String) = {
val portsResource = RangesResource(
Resource.PORTS,
Seq(Range(beginPort.toLong, endPort.toLong)),
role
)
val cpusResource = ScalarResource(Resource.CPUS, cpus, role)
val memResource = ScalarResource(Resource.MEM, mem, role)
val diskResource = ScalarResource(Resource.DISK, disk, role)
Offer.newBuilder
.setId(OfferID("1"))
.setFrameworkId(FrameworkID("marathon"))
.setSlaveId(SlaveID("slave0"))
.setHostname("localhost")
.addResources(cpusResource)
.addResources(memResource)
.addResources(diskResource)
.addResources(portsResource)
}
def makeOneCPUTask(taskId: String): TaskInfo.Builder = {
TaskInfo.newBuilder()
.setName("true")
.setTaskId(TaskID.newBuilder().setValue(taskId).build())
.setSlaveId(SlaveID("slave1"))
.setCommand(CommandInfo.newBuilder().setShell(true).addArguments("true"))
.addResources(ScalarResource(Resource.CPUS, 1.0, "*"))
}
def makeTaskFromTaskInfo(taskInfo: TaskInfo,
offer: Offer = makeBasicOffer().build(),
version: Timestamp = Timestamp(10), now: Timestamp = Timestamp(10)): MarathonTask =
{
import scala.collection.JavaConverters._
MarathonTasks.makeTask(
id = taskInfo.getTaskId.getValue,
host = offer.getHostname,
ports = Seq(1, 2, 3), // doesn't matter here
attributes = offer.getAttributesList.asScala,
version = version,
now = now,
slaveId = offer.getSlaveId
)
}
def makeBasicApp() = AppDefinition(
id = "test-app".toPath,
cpus = 1,
mem = 64,
disk = 1,
executor = "//cmd"
)
lazy val appSchema = {
val appJson = "/mesosphere/marathon/api/v2/AppDefinition.json"
val appDefinition = JsonLoader.fromResource(appJson)
val factory = JsonSchemaFactory.byDefault()
factory.getJsonSchema(appDefinition)
}
def validateJsonSchema(app: V2AppDefinition, valid: Boolean = true) {
import mesosphere.marathon.api.v2.json.Formats._
// TODO: Revalidate the decision to disallow null values in schema
// Possible resolution: Do not render null values in our formats by default anymore.
val appStr = Json.prettyPrint(JsonTestHelper.removeNullFieldValues(Json.toJson(app)))
validateJsonSchemaForString(appStr, valid)
}
def validateJsonSchemaForString(appStr: String, valid: Boolean): Unit = {
val appJson = JsonLoader.fromString(appStr)
val validationResult: ProcessingReport = appSchema.validate(appJson)
lazy val pretty = Json.prettyPrint(Json.parse(appStr))
assert(validationResult.isSuccess == valid, s"validation errors $validationResult for json:\\n$pretty")
}
}
object MarathonTestHelper extends MarathonTestHelper
| sledigabel/marathon | src/test/scala/mesosphere/marathon/MarathonTestHelper.scala | Scala | apache-2.0 | 5,539 |
// // Fitness: 12
// // ---
// // (0,0) 1,0 2,0 3,0
// // 0,1 3,1
// // 0,2 3,2
// // 0,3 1,3 2,3 3,3
//
// package com.routably.beessolver.vrp.data
//
// import com.routably.beessolver.vrp.Location
// import com.routably.beessolver.vrp.Job
//
// object Simple extends Problem {
//
// def maxVehicles = 1
// def maxCapacity = 100
// def maxRouteTime = 999999
// def depot = new Location(0, 0)
//
// def jobs = ImportUtil.toJobs(List(
// (2, 1, 0, 0),
// (3, 2, 0, 0),
// (4, 3, 0, 0),
// (5, 3, 1, 0),
// (6, 3, 2, 0),
// (7, 3, 3, 0),
// (8, 2, 3, 0),
// (9, 1, 3, 0),
// (10, 0, 3, 0),
// (11, 0, 2, 0),
// (12, 0, 1, 0)
// ))
//
// def solution = 12
//
// def solutionFull = ImportUtil.toSolution(this, List(
// List(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11)
// ))
//
//
// }
| aishfenton/bees_solver | src/main/scala/com/routably/beessolver/vrp/data/Simple.scala | Scala | mit | 903 |
package com.github.sstone.amqp.samples
import akka.actor.{Props, Actor, ActorSystem}
import com.github.sstone.amqp.{ChannelOwner, ConnectionOwner, Amqp}
import com.github.sstone.amqp.Amqp._
import com.rabbitmq.client.ConnectionFactory
import scala.concurrent.duration._
object PublisherConfirms extends App {
implicit val system = ActorSystem("mySystem")
case object PublishingFails
case object PublishingWorks
// create an AMQP connection
val connFactory = new ConnectionFactory()
connFactory.setUri("amqp://guest:guest@localhost/%2F")
val conn = system.actorOf(ConnectionOwner.props(connFactory, 1.second))
val producer = ConnectionOwner.createChildActor(conn, ChannelOwner.props())
Amqp.waitForConnection(system, producer).await()
class Foo extends Actor {
producer ! ConfirmSelect
producer ! AddReturnListener(self)
producer ! AddConfirmListener(self)
producer ! DeclareQueue(QueueParameters(name = "my_queue", passive = false, durable = false, autodelete = true))
def receive = {
case PublishingFails => {
producer ! Publish("", "no_such_queue", "yo!".getBytes)
producer ! Publish("", "no_such_queue", "yo!".getBytes)
producer ! WaitForConfirms(None)
}
case PublishingWorks => {
producer ! Publish("", "my_queue", "yo!".getBytes)
producer ! Publish("", "my_queue", "yo!".getBytes)
producer ! WaitForConfirms(None)
}
case msg => println(msg)
}
}
val foo = system.actorOf(Props[Foo])
foo ! PublishingFails
Thread.sleep(1000)
foo ! PublishingWorks
}
| gawkermedia/amqp-client | src/main/scala/com/github.sstone/amqp/samples/PublisherConfirms.scala | Scala | mit | 1,594 |
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf,SparkContext}
object CollectMap{
def main(args:Array[String]){
val conf = new SparkConf().setAppName("RDD Aggregate").setMaster("local")
val sc = new SparkContext(conf)
val citi = sc.textFile("./citi")
val citiPairRDD = citi.map(row => (row.split("\\t")(0), row.split("\\t")(1).toInt))
val data = citiPairRDD.collectAsMap()
println(data)
}
}
| malli3131/SparkApps | RDD_API_Calls/CollectMap.scala | Scala | apache-2.0 | 423 |
/*
* Copyright 2014 http4s.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.http4s.blaze.http
import org.http4s.blaze.util.Property
/** Enable logging of sensitive information such as header values and request content */
private[blaze] object logsensitiveinfo extends Property(default = false)
| http4s/blaze | http/src/main/scala/org/http4s/blaze/http/logsensitiveinfo.scala | Scala | apache-2.0 | 827 |
/*
* Copyright 2018 Analytics Zoo Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.zoo.pipeline.api.net
import com.intel.analytics.bigdl.optim.OptimMethod
import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.utils.{EngineType, Table}
import com.intel.analytics.zoo.common.PythonInterpreter
import com.intel.analytics.zoo.feature.PythonFeatureSet
import jep.NDArray
import org.apache.spark.TaskContext
import scala.reflect.ClassTag
import com.intel.analytics.zoo.pipeline.api.keras.models.InternalOptimizerUtil
import com.intel.analytics.zoo.pipeline.api.net.TorchOptim.DecayType
class TorchOptim[@specialized(Float, Double) T: ClassTag](
torchOptim: Array[Byte],
decayType: DecayType)(implicit ev: TensorNumeric[T]) extends OptimMethod[T] {
import TorchOptim._
@transient
protected val postfix = Integer.toHexString(java.util.UUID.randomUUID().hashCode())
@transient
protected lazy val optimType: OptimType = {
val partId = TaskContext.getPartitionId()
name = s"optim_${postfix}_${partId}"
PythonInterpreter.set("optim_bytes", torchOptim)
val currentEpoch = getEpoch(this)
val loadModelCode =
s"""
|import torch
|import io
|from torch.optim.optimizer import *
|from torch.optim.lr_scheduler import _LRScheduler
|from torch.optim.lr_scheduler import *
|from zoo.pipeline.api.torch import zoo_pickle_module
|
|optim_by = bytes(b % 256 for b in optim_bytes)
|$name = torch.load(io.BytesIO(optim_by), pickle_module=zoo_pickle_module)
|""".stripMargin
PythonInterpreter.exec(loadModelCode)
weightName = name + "_weight"
gradientName = name + "gradient"
if (PythonInterpreter.getValue[Boolean](s"isinstance($name, Optimizer)")) {
initCode = s"""
|$weightName = torch.tensor($weightName, requires_grad=True)
|$weightName = torch.autograd.Variable($weightName)
|${name}.__init__([${weightName}], **${name}.defaults)
|""".stripMargin
stepCode = s"""
|${weightName}.grad = torch.tensor(${gradientName})
|${name}.step()
|""".stripMargin
Optim
} else if (PythonInterpreter.getValue[Boolean](s"isinstance($name, _LRScheduler)")) {
initCode = s"""
|$weightName = torch.tensor($weightName, requires_grad=True)
|$weightName = torch.autograd.Variable($weightName)
|${name}.optimizer.__init__([${weightName}], **${name}.optimizer.defaults)
|""".stripMargin
stepCode = s"""
|${weightName}.grad = torch.tensor(${gradientName})
|${name}.optimizer.step()
|""".stripMargin
LrScheduler
} else if (PythonInterpreter.getValue[Boolean](s"isinstance($name, ReduceLROnPlateau)")) {
// ReduceLROnPlateau is not subclass of LRScheduler
require(decayType == EpochDecayByScore, "Plateau should use decayType EpochDecayByScore")
initCode = s"""
|$weightName = torch.tensor($weightName, requires_grad=True)
|$weightName = torch.autograd.Variable($weightName)
|${name}.optimizer.__init__([${weightName}], **${name}.optimizer.defaults)
|""".stripMargin
stepCode = s"""
|${weightName}.grad = torch.tensor(${gradientName})
|${name}.optimizer.step()
|""".stripMargin
Plateau
} else {
val unknowType = PythonInterpreter.getValue[String](s"str(type($name))")
throw new IllegalArgumentException(s"Unknown optimizer type: " + unknowType)
}
}
@transient
protected var name = ""
@transient
protected var weightName = ""
@transient
protected var gradientName = ""
@transient
protected var initCode = ""
@transient
protected var stepCode = ""
@transient
protected var init = false
@transient
protected var lastEpoch = -1
override def optimize(
feval: Tensor[T] => (T, Tensor[T]),
parameter: Tensor[T]): (Tensor[T], Array[T]) = {
optimType
val epoch = getEpoch(this)
val (fx, dfdx) = feval(parameter)
if (!init) {
lastEpoch = epoch
PythonInterpreter.set(weightName, new NDArray[Array[Float]](
parameter.toTensor[Float].storage().array()))
PythonInterpreter.exec(initCode)
init = true
} else {
updateHyperParameter()
}
PythonInterpreter.set(gradientName, new NDArray[Array[Float]](
dfdx.toTensor[Float].storage().array()))
PythonInterpreter.exec(stepCode)
val updatedParameter = PythonFeatureSet.ndArrayToTensor(
PythonInterpreter.getValue(s"${weightName}.data.numpy()").asInstanceOf[NDArray[_]])
parameter.copy(updatedParameter.toTensor[T])
(parameter, Array(fx))
}
override def clearHistory(): Unit = {
}
override def getLearningRate(): Double = {
optimType match {
case Optim =>
PythonInterpreter.getValue[Double](s"${name}.defaults['lr']")
case LrScheduler =>
// TODO: multi LR support.
PythonInterpreter.getValue[Double](s"${name}.get_last_lr()[0]")
case Plateau =>
if (PythonInterpreter.getValue[Boolean](s"hasattr(${name}, '_last_lr')")) {
PythonInterpreter.getValue[Double](s"${name}._last_lr[0]")
} else {
PythonInterpreter.getValue[Double](s"${name}.optimizer.defaults['lr']")
}
case _ =>
throw new IllegalArgumentException()
}
}
override def loadFromTable(config: Table): TorchOptim.this.type = {
this
}
override def updateHyperParameter(): Unit = {
if (optimType == LrScheduler || optimType == Plateau) {
val epoch = getEpoch(this)
decayType match {
case TorchOptim.EpochDecay =>
if (lastEpoch < epoch) {
PythonInterpreter.exec(s"${name}.step()")
lastEpoch += 1
}
case TorchOptim.IterationDecay =>
PythonInterpreter.exec(s"${name}.step()")
case TorchOptim.EpochDecayByScore =>
if (lastEpoch < epoch) {
val valScore = getScore(this)
PythonInterpreter.set("val_score", java.lang.Float.valueOf(valScore))
PythonInterpreter.exec(s"${name}.step(val_score)")
lastEpoch += 1
}
}
}
}
override def getHyperParameter(): String = {
if (optimType == LrScheduler) {
s"Current learning rate is ${getLearningRate()}. "
} else {
""
}
}
}
object TorchOptim{
sealed trait OptimType
case object LrScheduler extends OptimType
case object Optim extends OptimType
case object Plateau extends OptimType
sealed trait DecayType
case object EpochDecay extends DecayType
case object IterationDecay extends DecayType
case object EpochDecayByScore extends DecayType
// TODO: Support this later.
// case object IterationDecayByEpoch extends DecayType
def getDecayType(decayType: String): DecayType = {
decayType.toLowerCase() match {
case "epochdecay" =>
EpochDecay
case "iterationdecay" =>
IterationDecay
case "epochdecaybyscore" =>
EpochDecayByScore
// case "iterationdecaybyepoch" =>
// IterationDecayByEpoch
case _ =>
throw new IllegalArgumentException(s"unknow decay type: ${decayType}, expected:" +
s"EpochDecay, IterationDecay, EpochDecayByScore")
}
}
def apply[T: ClassTag](
optimBytes: Array[Byte],
decayType: String)(implicit ev: TensorNumeric[T]): TorchOptim[T] = {
apply[T](optimBytes, getDecayType(decayType))
}
def apply[T: ClassTag](
optimBytes: Array[Byte],
decayType: DecayType)(implicit ev: TensorNumeric[T]): TorchOptim[T] = {
new TorchOptim[T](optimBytes, decayType)
}
protected[net] def getEpoch[T: ClassTag](optim: TorchOptim[T]): Int = {
// BigDL's epoch starts from 1, while torch starts from 0.
InternalOptimizerUtil.getStateFromOptiMethod(optim)[Int]("epoch") - 1
}
protected[net] def getScore[T: ClassTag](optim: TorchOptim[T]): Float = {
// BigDL's epoch starts from 1, while torch starts from 0.
InternalOptimizerUtil.getStateFromOptiMethod(optim)[Float]("score")
}
}
| intel-analytics/analytics-zoo | zoo/src/main/scala/com/intel/analytics/zoo/pipeline/api/net/TorchOptim.scala | Scala | apache-2.0 | 8,829 |
package com.nn.math.activations
import scala.collection.mutable.ArrayBuffer
/**
* Parent Abstract class to represent our activation functions
* Created by george on 12/8/14.
*/
abstract class ActivationFunction {
def activation(inputs : Vector[Double], weights: Vector[Double]): Double
}
| GeorgeDittmar/Scala-NeuralNet | src/main/scala/com/nn/math/activations/ActivationFunction.scala | Scala | apache-2.0 | 295 |
/*
* Copyright 2016 Dennis Vriend
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package akka.persistence.jdbc.journal
import akka.NotUsed
import akka.actor.{ Actor, ActorRef }
import akka.persistence.AtomicWrite
import akka.persistence.jdbc.serialization.Serialized
import akka.persistence.journal.Tagged
import akka.persistence.query.EventEnvelope
import akka.stream.scaladsl.Flow
import scala.collection.{ Iterable, mutable }
import scala.util.Try
object EventsByPersistenceIdTagSubscriberRegistry {
case class EventsByPersistenceIdTagSubscriberTerminated(ref: ActorRef)
}
trait EventsByPersistenceIdTagSubscriberRegistry { _: SlickAsyncWriteJournal ⇒
import EventsByPersistenceIdTagSubscriberRegistry._
private val eventsByPersistenceIdAndTagSubscribers = new mutable.HashMap[String, mutable.Set[ActorRef]] with mutable.MultiMap[String, ActorRef]
private def hasEventsByTagSubscribers: Boolean = eventsByPersistenceIdAndTagSubscribers.nonEmpty
def subscriberKey(persistenceId: String, tag: String): String = s"$persistenceId-$tag"
private def addEventsByTagSubscriber(subscriber: ActorRef, persistenceId: String, tag: String): Unit =
eventsByPersistenceIdAndTagSubscribers.addBinding(subscriberKey(persistenceId, tag), subscriber)
private def removeEventsByPersistenceIdAndTagSubscriber(subscriber: ActorRef): Unit = {
val keys = eventsByPersistenceIdAndTagSubscribers.collect { case (k, s) if s.contains(subscriber) ⇒ k }
keys.foreach { key ⇒ eventsByPersistenceIdAndTagSubscribers.removeBinding(key, subscriber) }
}
protected def sendEventsByPersistenceIdAndTagSubscriberTerminated(ref: ActorRef): Unit =
self ! EventsByPersistenceIdTagSubscriberTerminated(ref)
protected def receiveEventsByPersistenceIdAndTagRegistry: Actor.Receive = {
case JdbcJournal.EventsByPersistenceIdAndTagRequest(persistenceId, tag) ⇒
addEventsByTagSubscriber(sender(), persistenceId, tag)
context.watch(sender())
case EventsByPersistenceIdTagSubscriberTerminated(ref) ⇒
removeEventsByPersistenceIdAndTagSubscriber(ref)
}
private def unwrapTagged(event: Any): Any = event match {
case Tagged(payload, tags) ⇒ payload
case _ ⇒ event
}
protected def eventsByPersistenceIdAndTagFlow(atomicWrites: Iterable[AtomicWrite]): Flow[Try[Iterable[Serialized]], Try[Iterable[Serialized]], NotUsed] =
Flow[Try[Iterable[Serialized]]].map { atomicWriteResult ⇒
if (hasEventsByTagSubscribers) {
for {
seqSerialized ← atomicWriteResult
serialized ← seqSerialized
persistenceId = serialized.persistenceId
tags ← serialized.tags
tag ← serializationFacade.decodeTags(tags)
key = subscriberKey(persistenceId, tag)
if eventsByPersistenceIdAndTagSubscribers contains key
atomicWrite ← atomicWrites
if atomicWrite.persistenceId == serialized.persistenceId
persistentRepr ← atomicWrite.payload
if persistentRepr.sequenceNr == serialized.sequenceNr
subscriber ← eventsByPersistenceIdAndTagSubscribers(key)
envelope = EventEnvelope(persistentRepr.sequenceNr, persistentRepr.persistenceId, persistentRepr.sequenceNr, unwrapTagged(persistentRepr.payload))
eventAppended = JdbcJournal.EventAppended(envelope)
} subscriber ! eventAppended
}
atomicWriteResult
}
}
| prettynatty/akka-persistence-jdbc | src/main/scala/akka/persistence/jdbc/journal/EventsByPersistenceIdTagSubscriberRegistry.scala | Scala | apache-2.0 | 3,955 |
package it.codingjam.lagioconda
import java.awt.geom.Ellipse2D
import java.awt.image.BufferedImage
import java.awt.{Graphics2D, RenderingHints}
import it.codingjam.lagioconda.ga.{Gene, _}
package object conversions {
implicit class CircleToGene(circle: Circle) {
private def to5bits(i: Int) = {
require(i >= 0 && i < 32)
"%05d".format(i.toBinaryString.toInt)
}
private def to6bits(i: Int) = {
require(i >= 0 && i < 64)
"%06d".format(i.toBinaryString.toInt)
}
private def to7bits(i: Int) = {
require(i >= 0 && i < 128)
"%07d".format(i.toBinaryString.toInt)
}
private def to8bits(i: Int) = {
require(i >= 0 && i < 256)
"%08d".format(i.toBinaryString.toInt)
}
private def to10bits(i: Int) = {
require(i >= 0 && i < 1024)
"%010d".format(i.toBinaryString.toInt)
}
private def to9bits(i: Int) = {
require(i >= 0 && i < 512)
"%09d".format(i.toBinaryString.toInt)
}
def toGene: Gene = {
val list = List(
to8bits(circle.center.x),
to8bits(circle.center.y),
to5bits(circle.radius),
to5bits(circle.color.red),
to5bits(circle.color.green),
to5bits(circle.color.blue)
)
Gene(list.mkString(""))
}
}
implicit class GeneToCircle(gene: Gene) {
def toCircle(alpha: Int)(implicit gm: GeneMapping): Circle = {
val c = gm.toComponents(gene)
Circle(Center(c._1, c._2), c._3, Color(c._4, c._5, c._6, alpha))
}
}
def neigh(gene: Gene)(implicit geneMapping: GeneMapping): List[Gene] = {
def to2bits(i: Int) = {
val k = if (i < 0) 0 else if (i > 3) 3 else i
"%05d".format(k.toBinaryString.toInt)
}
def to4bits(i: Int) = {
val k = (i + 16) % 16
"%04d".format(k.toBinaryString.toInt)
}
def to5bits(i: Int) = {
val k = (i + 32) % 32
"%05d".format(k.toBinaryString.toInt)
}
def to6bits(i: Int) = {
val k = (i + 64) % 64
"%06d".format(k.toBinaryString.toInt)
}
def to7bits(i: Int) = {
val k = (i + 128) % 128
"%07d".format(k.toBinaryString.toInt)
}
def to8bits(i: Int, min: Int = 0): String = {
var k = (i + 256) % 256
if (k < min) k = min
"%08d".format(k.toBinaryString.toInt)
}
def pow(i: Int): Int =
if (i == 3) 8 else if (i == 4) 16 else if (i == 5) 32 else if (i == 6) 64 else if (i == 7) 128 else if (i == 8) 256 else 0
def toBits(value: Int, numberOfBits: Int): String = {
require(numberOfBits >= 4 && numberOfBits <= 8, "Number of bits " + numberOfBits)
val pow2 = pow(numberOfBits)
val k = (value + pow2) % pow2
val s = s"%0${numberOfBits}d"
s.format(k.toBinaryString.toInt)
}
val t = geneMapping.toComponents(gene)
val s = geneMapping.sizes
val u = for {
i <- List(0, 2, -2)
j <- List(0, 2, -2)
k <- List(0, 2, -2)
l <- List(0, 7, -7)
m <- List(0, 7, -7)
n <- List(0, 7, -7)
} yield (t._1 + i, t._2 + j, t._3 + k, t._4 + l, t._5 + m, t._6 + n)
val ii = u.map(e =>
toBits(e._1, s(0)) + toBits(e._2, s(1)) + toBits(e._3, s(2)) + toBits(e._4, s(3)) + toBits(e._5, s(4)) + toBits(e._6, s(5)))
scala.concurrent.duration.Deadline
ii.distinct.map(Gene(_))
}
implicit class ChromosomeToBufferedImage(chromosome: Chromosome) {
def toBufferedImage(alpha: Int)(implicit dimensions: ImageDimensions): BufferedImage = {
val circles: List[Circle] = chromosome.genes.map(_.toCircle(alpha)(chromosome.geneMapping))
val image = new BufferedImage(dimensions.width, dimensions.height, BufferedImage.TYPE_3BYTE_BGR)
val g2: Graphics2D = image.createGraphics()
val qualityHints = new RenderingHints(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON)
qualityHints.put(RenderingHints.KEY_RENDERING, RenderingHints.VALUE_RENDER_SPEED)
g2.setRenderingHints(qualityHints)
circles.foreach { circle =>
val transparent = new java.awt.Color(circle.color.red, circle.color.blue, circle.color.green, circle.color.alpha)
g2.setColor(transparent)
g2.fill(
new Ellipse2D.Float(circle.center.x - circle.radius, circle.center.y - circle.radius, circle.radius * 2, circle.radius * 2))
}
g2.dispose()
image.flush()
image
}
}
}
| coding-jam/lagioconda | common/src/main/scala/it/codingjam/lagioconda/conversions.scala | Scala | apache-2.0 | 4,404 |
package blended.updater.config
import java.io._
import com.typesafe.config.{ Config, ConfigFactory, ConfigRenderOptions }
import blended.util.logging.Logger
/**
* Helper to write [[Config]] to files or streams.
*/
trait ConfigWriter {
private[this] val log = Logger[ConfigWriter]
def write(config: Config, file: File, path: Option[String]): Unit = {
file.getParentFile() match {
case null =>
case parent =>
log.debug(s"Creating dir: ${parent}")
parent.mkdirs()
}
val ps = new PrintStream(new BufferedOutputStream(new FileOutputStream(file)))
try {
write(config, ps, path)
} finally {
ps.close()
}
}
def write(config: Config, os: OutputStream, path: Option[String]): Unit = {
val ps = new PrintStream(new BufferedOutputStream(os))
val cnf = path.map { p =>
ConfigFactory.empty().withValue(p, config.root())
}.getOrElse(config)
ps.print(cnf.root().render(
ConfigRenderOptions.defaults().setOriginComments(false).setComments(false).setFormatted(true).setJson(false)
))
ps.flush()
}
}
object ConfigWriter extends ConfigWriter | lefou/blended | blended.updater.config/jvm/src/main/scala/blended/updater/config/ConfigWriter.scala | Scala | apache-2.0 | 1,142 |
package com.alexitc.coinalerts.data
import com.alexitc.coinalerts.models.{Exchange, NewCurrencyAlert, UserId}
import com.alexitc.playsonify.core.ApplicationResult
import scala.language.higherKinds
trait NewCurrencyAlertDataHandler[F[_]] {
def create(userId: UserId, exchange: Exchange): F[NewCurrencyAlert]
def get(userId: UserId): F[List[NewCurrencyAlert]]
def getBy(exchange: Exchange): F[List[NewCurrencyAlert]]
def getAll(): F[List[NewCurrencyAlert]]
def delete(userId: UserId, exchange: Exchange): F[NewCurrencyAlert]
}
trait NewCurrencyAlertBlockingDataHandler extends NewCurrencyAlertDataHandler[ApplicationResult]
| AlexITC/crypto-coin-alerts | alerts-server/app/com/alexitc/coinalerts/data/NewCurrencyAlertDataHandler.scala | Scala | gpl-3.0 | 642 |
package com.monochromeroad.play.xwiki.rendering.plugin
import org.specs2.mutable._
import play.api.test._
import play.api.test.Helpers._
import java.io.StringReader
import java.util.Date
/**
* Plugin for the default XWiki rendering system
*
* @author Masatoshi Hayashi
*/
class DefaultXWikiRenderingPluginSpec extends Specification {
val testString = "**TEST** {{rb read='read'}}Unreadable word{{/rb}} Current {{date/}}"
override def is = args(sequential = true) ^ super.is
"The Default XWiki Rendering Plugin" should {
"load default string stream renderer" in {
running(FakeApplication(
additionalPlugins = Seq("com.monochromeroad.play.xwiki.rendering.plugin.DefaultXWikiRenderingPlugin"),
additionalConfiguration = Map(
"xwiki.rendering.default.macros.1" -> "com.monochromeroad.play.xwiki.rendering.plugin.RbMacro",
"xwiki.rendering.default.macros.2" -> "com.monochromeroad.play.xwiki.rendering.plugin.DateMacro"
))) {
var result = new StringBuilder()
DefaultXWikiStringStreamRenderer.render(new StringReader(testString), { n => result.append(n)})
result.toString() must contain("<ruby>")
result.toString() must contain("Current %tF" format new Date())
}
}
}
}
| literalice/play-xwiki-rendering | src/test/scala/com/monochromeroad/play/xwiki/rendering/plugin/DefaultXWikiRenderingPluginSpec.scala | Scala | lgpl-2.1 | 1,279 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.scheduler
/**
* A simple listener for application events.
*
* This listener expects to hear events from a single application only. If events
* from multiple applications are seen, the behavior is unspecified.
*/
private[spark] class ApplicationEventListener extends SparkListener {
var appName: Option[String] = None
var appId: Option[String] = None
var sparkUser: Option[String] = None
var startTime: Option[Long] = None
var endTime: Option[Long] = None
var viewAcls: Option[String] = None
var adminAcls: Option[String] = None
override def onApplicationStart(applicationStart: SparkListenerApplicationStart) {
appName = Some(applicationStart.appName)
appId = applicationStart.appId
startTime = Some(applicationStart.time)
sparkUser = Some(applicationStart.sparkUser)
}
override def onApplicationEnd(applicationEnd: SparkListenerApplicationEnd) {
endTime = Some(applicationEnd.time)
}
override def onEnvironmentUpdate(environmentUpdate: SparkListenerEnvironmentUpdate) {
synchronized {
val environmentDetails = environmentUpdate.environmentDetails
val allProperties = environmentDetails("Spark Properties").toMap
viewAcls = allProperties.get("spark.ui.view.acls")
adminAcls = allProperties.get("spark.admin.acls")
}
}
}
| Dax1n/spark-core | core/src/main/scala/org/apache/spark/scheduler/ApplicationEventListener.scala | Scala | apache-2.0 | 2,134 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.ui
import scala.collection.mutable
import com.google.common.annotations.VisibleForTesting
import org.apache.spark.{JobExecutionStatus, Logging}
import org.apache.spark.executor.TaskMetrics
import org.apache.spark.scheduler._
import org.apache.spark.sql.SQLContext
import org.apache.spark.sql.execution.SQLExecution
import org.apache.spark.sql.execution.metric.{SQLMetricParam, SQLMetricValue}
private[sql] class SQLListener(sqlContext: SQLContext) extends SparkListener with Logging {
private val retainedExecutions =
sqlContext.sparkContext.conf.getInt("spark.sql.ui.retainedExecutions", 1000)
private val activeExecutions = mutable.HashMap[Long, SQLExecutionUIData]()
// Old data in the following fields must be removed in "trimExecutionsIfNecessary".
// If adding new fields, make sure "trimExecutionsIfNecessary" can clean up old data
private val _executionIdToData = mutable.HashMap[Long, SQLExecutionUIData]()
/**
* Maintain the relation between job id and execution id so that we can get the execution id in
* the "onJobEnd" method.
*/
private val _jobIdToExecutionId = mutable.HashMap[Long, Long]()
private val _stageIdToStageMetrics = mutable.HashMap[Long, SQLStageMetrics]()
private val failedExecutions = mutable.ListBuffer[SQLExecutionUIData]()
private val completedExecutions = mutable.ListBuffer[SQLExecutionUIData]()
def executionIdToData: Map[Long, SQLExecutionUIData] = synchronized {
_executionIdToData.toMap
}
def jobIdToExecutionId: Map[Long, Long] = synchronized {
_jobIdToExecutionId.toMap
}
def stageIdToStageMetrics: Map[Long, SQLStageMetrics] = synchronized {
_stageIdToStageMetrics.toMap
}
private def trimExecutionsIfNecessary(
executions: mutable.ListBuffer[SQLExecutionUIData]): Unit = {
if (executions.size > retainedExecutions) {
val toRemove = math.max(retainedExecutions / 10, 1)
executions.take(toRemove).foreach { execution =>
for (executionUIData <- _executionIdToData.remove(execution.executionId)) {
for (jobId <- executionUIData.jobs.keys) {
_jobIdToExecutionId.remove(jobId)
}
for (stageId <- executionUIData.stages) {
_stageIdToStageMetrics.remove(stageId)
}
}
}
executions.trimStart(toRemove)
}
}
override def onJobStart(jobStart: SparkListenerJobStart): Unit = {
val executionIdString = jobStart.properties.getProperty(SQLExecution.EXECUTION_ID_KEY)
if (executionIdString == null) {
// This is not a job created by SQL
return
}
val executionId = executionIdString.toLong
val jobId = jobStart.jobId
val stageIds = jobStart.stageIds
synchronized {
activeExecutions.get(executionId).foreach { executionUIData =>
executionUIData.jobs(jobId) = JobExecutionStatus.RUNNING
executionUIData.stages ++= stageIds
stageIds.foreach(stageId =>
_stageIdToStageMetrics(stageId) = new SQLStageMetrics(stageAttemptId = 0))
_jobIdToExecutionId(jobId) = executionId
}
}
}
override def onJobEnd(jobEnd: SparkListenerJobEnd): Unit = synchronized {
val jobId = jobEnd.jobId
for (executionId <- _jobIdToExecutionId.get(jobId);
executionUIData <- _executionIdToData.get(executionId)) {
jobEnd.jobResult match {
case JobSucceeded => executionUIData.jobs(jobId) = JobExecutionStatus.SUCCEEDED
case JobFailed(_) => executionUIData.jobs(jobId) = JobExecutionStatus.FAILED
}
if (executionUIData.completionTime.nonEmpty && !executionUIData.hasRunningJobs) {
// We are the last job of this execution, so mark the execution as finished. Note that
// `onExecutionEnd` also does this, but currently that can be called before `onJobEnd`
// since these are called on different threads.
markExecutionFinished(executionId)
}
}
}
override def onExecutorMetricsUpdate(
executorMetricsUpdate: SparkListenerExecutorMetricsUpdate): Unit = synchronized {
for ((taskId, stageId, stageAttemptID, metrics) <- executorMetricsUpdate.taskMetrics) {
updateTaskAccumulatorValues(taskId, stageId, stageAttemptID, metrics, finishTask = false)
}
}
override def onStageSubmitted(stageSubmitted: SparkListenerStageSubmitted): Unit = synchronized {
val stageId = stageSubmitted.stageInfo.stageId
val stageAttemptId = stageSubmitted.stageInfo.attemptId
// Always override metrics for old stage attempt
_stageIdToStageMetrics(stageId) = new SQLStageMetrics(stageAttemptId)
}
override def onTaskEnd(taskEnd: SparkListenerTaskEnd): Unit = synchronized {
updateTaskAccumulatorValues(
taskEnd.taskInfo.taskId,
taskEnd.stageId,
taskEnd.stageAttemptId,
taskEnd.taskMetrics,
finishTask = true)
}
/**
* Update the accumulator values of a task with the latest metrics for this task. This is called
* every time we receive an executor heartbeat or when a task finishes.
*/
private def updateTaskAccumulatorValues(
taskId: Long,
stageId: Int,
stageAttemptID: Int,
metrics: TaskMetrics,
finishTask: Boolean): Unit = {
if (metrics == null) {
return
}
_stageIdToStageMetrics.get(stageId) match {
case Some(stageMetrics) =>
if (stageAttemptID < stageMetrics.stageAttemptId) {
// A task of an old stage attempt. Because a new stage is submitted, we can ignore it.
} else if (stageAttemptID > stageMetrics.stageAttemptId) {
logWarning(s"A task should not have a higher stageAttemptID ($stageAttemptID) then " +
s"what we have seen (${stageMetrics.stageAttemptId})")
} else {
// TODO We don't know the attemptId. Currently, what we can do is overriding the
// accumulator updates. However, if there are two same task are running, such as
// speculation, the accumulator updates will be overriding by different task attempts,
// the results will be weird.
stageMetrics.taskIdToMetricUpdates.get(taskId) match {
case Some(taskMetrics) =>
if (finishTask) {
taskMetrics.finished = true
taskMetrics.accumulatorUpdates = metrics.accumulatorUpdates()
} else if (!taskMetrics.finished) {
taskMetrics.accumulatorUpdates = metrics.accumulatorUpdates()
} else {
// If a task is finished, we should not override with accumulator updates from
// heartbeat reports
}
case None =>
// TODO Now just set attemptId to 0. Should fix here when we can get the attempt
// id from SparkListenerExecutorMetricsUpdate
stageMetrics.taskIdToMetricUpdates(taskId) = new SQLTaskMetrics(
attemptId = 0, finished = finishTask, metrics.accumulatorUpdates())
}
}
case None =>
// This execution and its stage have been dropped
}
}
def onExecutionStart(
executionId: Long,
description: String,
details: String,
physicalPlanDescription: String,
physicalPlanGraph: SparkPlanGraph,
time: Long): Unit = {
val sqlPlanMetrics = physicalPlanGraph.nodes.flatMap { node =>
node.metrics.map(metric => metric.accumulatorId -> metric)
}
val executionUIData = new SQLExecutionUIData(executionId, description, details,
physicalPlanDescription, physicalPlanGraph, sqlPlanMetrics.toMap, time)
synchronized {
activeExecutions(executionId) = executionUIData
_executionIdToData(executionId) = executionUIData
}
}
def onExecutionEnd(executionId: Long, time: Long): Unit = synchronized {
_executionIdToData.get(executionId).foreach { executionUIData =>
executionUIData.completionTime = Some(time)
if (!executionUIData.hasRunningJobs) {
// onExecutionEnd happens after all "onJobEnd"s
// So we should update the execution lists.
markExecutionFinished(executionId)
} else {
// There are some running jobs, onExecutionEnd happens before some "onJobEnd"s.
// Then we don't if the execution is successful, so let the last onJobEnd updates the
// execution lists.
}
}
}
private def markExecutionFinished(executionId: Long): Unit = {
activeExecutions.remove(executionId).foreach { executionUIData =>
if (executionUIData.isFailed) {
failedExecutions += executionUIData
trimExecutionsIfNecessary(failedExecutions)
} else {
completedExecutions += executionUIData
trimExecutionsIfNecessary(completedExecutions)
}
}
}
def getRunningExecutions: Seq[SQLExecutionUIData] = synchronized {
activeExecutions.values.toSeq
}
def getFailedExecutions: Seq[SQLExecutionUIData] = synchronized {
failedExecutions
}
def getCompletedExecutions: Seq[SQLExecutionUIData] = synchronized {
completedExecutions
}
def getExecution(executionId: Long): Option[SQLExecutionUIData] = synchronized {
_executionIdToData.get(executionId)
}
/**
* Get all accumulator updates from all tasks which belong to this execution and merge them.
*/
def getExecutionMetrics(executionId: Long): Map[Long, Any] = synchronized {
_executionIdToData.get(executionId) match {
case Some(executionUIData) =>
val accumulatorUpdates = {
for (stageId <- executionUIData.stages;
stageMetrics <- _stageIdToStageMetrics.get(stageId).toIterable;
taskMetrics <- stageMetrics.taskIdToMetricUpdates.values;
accumulatorUpdate <- taskMetrics.accumulatorUpdates.toSeq) yield {
accumulatorUpdate
}
}.filter { case (id, _) => executionUIData.accumulatorMetrics.contains(id) }
mergeAccumulatorUpdates(accumulatorUpdates, accumulatorId =>
executionUIData.accumulatorMetrics(accumulatorId).metricParam).
mapValues(_.asInstanceOf[SQLMetricValue[_]].value)
case None =>
// This execution has been dropped
Map.empty
}
}
private def mergeAccumulatorUpdates(
accumulatorUpdates: Seq[(Long, Any)],
paramFunc: Long => SQLMetricParam[SQLMetricValue[Any], Any]): Map[Long, Any] = {
accumulatorUpdates.groupBy(_._1).map { case (accumulatorId, values) =>
val param = paramFunc(accumulatorId)
(accumulatorId,
values.map(_._2.asInstanceOf[SQLMetricValue[Any]]).foldLeft(param.zero)(param.addInPlace))
}
}
}
/**
* Represent all necessary data for an execution that will be used in Web UI.
*/
private[ui] class SQLExecutionUIData(
val executionId: Long,
val description: String,
val details: String,
val physicalPlanDescription: String,
val physicalPlanGraph: SparkPlanGraph,
val accumulatorMetrics: Map[Long, SQLPlanMetric],
val submissionTime: Long,
var completionTime: Option[Long] = None,
val jobs: mutable.HashMap[Long, JobExecutionStatus] = mutable.HashMap.empty,
val stages: mutable.ArrayBuffer[Int] = mutable.ArrayBuffer()) {
/**
* Return whether there are running jobs in this execution.
*/
def hasRunningJobs: Boolean = jobs.values.exists(_ == JobExecutionStatus.RUNNING)
/**
* Return whether there are any failed jobs in this execution.
*/
def isFailed: Boolean = jobs.values.exists(_ == JobExecutionStatus.FAILED)
def runningJobs: Seq[Long] =
jobs.filter { case (_, status) => status == JobExecutionStatus.RUNNING }.keys.toSeq
def succeededJobs: Seq[Long] =
jobs.filter { case (_, status) => status == JobExecutionStatus.SUCCEEDED }.keys.toSeq
def failedJobs: Seq[Long] =
jobs.filter { case (_, status) => status == JobExecutionStatus.FAILED }.keys.toSeq
}
/**
* Represent a metric in a SQLPlan.
*
* Because we cannot revert our changes for an "Accumulator", we need to maintain accumulator
* updates for each task. So that if a task is retried, we can simply override the old updates with
* the new updates of the new attempt task. Since we cannot add them to accumulator, we need to use
* "AccumulatorParam" to get the aggregation value.
*/
private[ui] case class SQLPlanMetric(
name: String,
accumulatorId: Long,
metricParam: SQLMetricParam[SQLMetricValue[Any], Any])
/**
* Store all accumulatorUpdates for all tasks in a Spark stage.
*/
private[ui] class SQLStageMetrics(
val stageAttemptId: Long,
val taskIdToMetricUpdates: mutable.HashMap[Long, SQLTaskMetrics] = mutable.HashMap.empty)
/**
* Store all accumulatorUpdates for a Spark task.
*/
private[ui] class SQLTaskMetrics(
val attemptId: Long, // TODO not used yet
var finished: Boolean,
var accumulatorUpdates: Map[Long, Any])
| ArvinDevel/onlineAggregationOnSparkV2 | sql/core/src/main/scala/org/apache/spark/sql/execution/ui/SQLListener.scala | Scala | apache-2.0 | 13,686 |
package chap4
object Exe6 extends App {
val e = new Exception("Exe6")
def left[A]: Either[Exception, A] = Left(e)
assert(Right(1).map(_ + 1) == Right(2))
assert(left[Int].map(_ + 1) == Left(e))
// 面倒くさくなったのでテスト略
}
| ponkotuy/FPScala | src/main/scala/chap4/Exe6.scala | Scala | unlicense | 253 |
package spark.metrics
import java.util.Properties
import java.io.{File, FileOutputStream}
import org.scalatest.{BeforeAndAfter, FunSuite}
import spark.metrics._
class MetricsConfigSuite extends FunSuite with BeforeAndAfter {
var filePath: String = _
before {
filePath = getClass.getClassLoader.getResource("test_metrics_config.properties").getFile()
}
test("MetricsConfig with default properties") {
val conf = new MetricsConfig(Option("dummy-file"))
conf.initialize()
assert(conf.properties.size() === 0)
assert(conf.properties.getProperty("test-for-dummy") === null)
val property = conf.getInstance("random")
assert(property.size() === 0)
}
test("MetricsConfig with properties set") {
val conf = new MetricsConfig(Option(filePath))
conf.initialize()
val masterProp = conf.getInstance("master")
assert(masterProp.size() === 3)
assert(masterProp.getProperty("sink.console.period") === "20")
assert(masterProp.getProperty("sink.console.unit") === "minutes")
assert(masterProp.getProperty("source.jvm.class") === "spark.metrics.source.JvmSource")
val workerProp = conf.getInstance("worker")
assert(workerProp.size() === 3)
assert(workerProp.getProperty("sink.console.period") === "10")
assert(workerProp.getProperty("sink.console.unit") === "seconds")
assert(masterProp.getProperty("source.jvm.class") === "spark.metrics.source.JvmSource")
}
test("MetricsConfig with subProperties") {
val conf = new MetricsConfig(Option(filePath))
conf.initialize()
val propCategories = conf.propertyCategories
assert(propCategories.size === 2)
val masterProp = conf.getInstance("master")
val sourceProps = conf.subProperties(masterProp, MetricsSystem.SOURCE_REGEX)
assert(sourceProps.size === 1)
assert(sourceProps("jvm").getProperty("class") === "spark.metrics.source.JvmSource")
val sinkProps = conf.subProperties(masterProp, MetricsSystem.SINK_REGEX)
assert(sinkProps.size === 1)
assert(sinkProps.contains("console"))
val consoleProps = sinkProps("console")
assert(consoleProps.size() === 2)
}
}
| rjpower/spark | core/src/test/scala/spark/metrics/MetricsConfigSuite.scala | Scala | apache-2.0 | 2,144 |
package vggames.shared
import br.com.caelum.vraptor.{ Get, Resource, Result }
import vggames.shared.vraptor.VraptorExtensions._
import vggames.shared.view.Robots
import vggames.shared.view.Sitemap
@Resource
class SearchEngineConfiguration(cfg : GamesConfiguration, result : Result) {
@Get(Array("/robots.txt"))
def robots {
result.render(new Robots)(cfg.activeGames, cfg.inactiveGames)
}
@Get(Array("/sitemap.xml"))
def sitemap {
result.render(new Sitemap)(cfg.activeGames, cfg.buildDate)
}
} | rustaeja/rustaeja-project-8273 | web/src/main/scala/vggames/shared/SearchEngineConfiguration.scala | Scala | gpl-3.0 | 516 |
package eu.henkelmann.sbt
import _root_.sbt._
import java.io.{StringWriter, PrintWriter, File}
import java.net.InetAddress
import scala.collection.mutable.ListBuffer
import scala.xml.{Elem, Node, XML}
import sbt.testing.{Event => TEvent, Status => TStatus, Logger => TLogger, NestedTestSelector, TestSelector, AnnotatedFingerprint, SubclassFingerprint}
/*
The api for the test interface defining the results and events
can be found here:
https://github.com/harrah/test-interface
*/
/**
* A tests listener that outputs the results it receives in junit xml
* report format.
* @param outputDir path to the dir in which a folder with results is generated
*/
class JUnitXmlTestsListener(val outputDir:String) extends TestsListener
{
/**Current hostname so we know which machine executed the tests*/
val hostname = InetAddress.getLocalHost.getHostName
/**The dir in which we put all result files. Is equal to the given dir + "/test-reports"*/
val targetDir = new File(outputDir + "/test-reports/")
/**all system properties as XML*/
val properties =
<properties> {
val iter = System.getProperties.entrySet.iterator
val props:ListBuffer[Node] = new ListBuffer()
while (iter.hasNext) {
val next = iter.next
props += <property name={next.getKey.toString} value={next.getValue.toString} />
}
props
}
</properties>
/** Gathers data for one Test Suite. We map test groups to TestSuites.
* Each TestSuite gets its own output file.
*/
class TestSuite(val name:String) {
val events:ListBuffer[TEvent] = new ListBuffer()
val start = System.currentTimeMillis
var end = System.currentTimeMillis
/**Adds one test result to this suite.*/
def addEvent(e:TEvent) = events += e
/** Returns a triplet with the number of errors, failures and the
* total numbers of tests in this suite.
*/
def count():(Int, Int, Int) = {
var errors, failures = 0
for (e <- events) {
e.status() match {
case TStatus.Error => errors +=1
case TStatus.Failure => failures +=1
case _ =>
}
}
(errors, failures, events.size)
}
/** Stops the time measuring and emits the XML for
* All tests collected so far.
*/
def stop():Elem = {
end = System.currentTimeMillis
val duration = end - start
val (errors, failures, tests) = count()
val result = <testsuite hostname={hostname} name={name}
tests={tests + ""} errors={errors + ""} failures={failures + ""}
time={(duration/1000.0).toString} >
{properties}
{
for (e <- events) yield {
val name = e.selector match {
case t:TestSelector => t.testName()
case n: NestedTestSelector => n.testName()
case _ => e.selector().toString
}
<testcase classname={name} name={name} time={e.duration().toString}> {
var trace:String = if (e.status() == TStatus.Error && e.throwable().isDefined) {
val stringWriter = new StringWriter()
val writer = new PrintWriter(stringWriter)
//e.error.printStackTrace(writer)
e.throwable().get().printStackTrace(writer)
writer.flush()
stringWriter.toString
}
else {
""
}
e.status() match {
case TStatus.Error if e.throwable().isDefined => <error message={e.throwable().get().getMessage} type={e.throwable().get().getClass.getName}>{trace}</error>
case TStatus.Error => <error message={"No Exception or message provided"} />
case TStatus.Failure if e.throwable().isDefined => <failure message={e.throwable().get().getMessage} type={e.throwable().get().getClass.getName}>{trace}</failure>
case TStatus.Failure => <failure message={"No Exception or message provided"} />
case TStatus.Skipped => <skipped />
case _ => {}
}
}
</testcase>
}
}
<system-out><![CDATA[]]></system-out>
<system-err><![CDATA[]]></system-err>
</testsuite>
result
}
}
/**The currently running test suite*/
var testSuite:TestSuite = null
/**Creates the output Dir*/
override def doInit() = {targetDir.mkdirs()}
/** Starts a new, initially empty Suite with the given name.
*/
override def startGroup(name: String) {testSuite = new TestSuite(name)}
/** Adds all details for the given even to the current suite.
*/
override def testEvent(event: TestEvent): Unit = for (e <- event.detail) {testSuite.addEvent(e)}
/** called for each class or equivalent grouping
* We map one group to one Testsuite, so for each Group
* we create an XML like this:
* <?xml version="1.0" encoding="UTF-8" ?>
* <testsuite errors="x" failures="y" tests="z" hostname="example.com" name="eu.henkelmann.bla.SomeTest" time="0.23">
* <properties>
* <property name="os.name" value="Linux" />
* ...
* </properties>
* <testcase classname="eu.henkelmann.bla.SomeTest" name="testFooWorks" time="0.0" >
* <error message="the foo did not work" type="java.lang.NullPointerException">... stack ...</error>
* </testcase>
* <testcase classname="eu.henkelmann.bla.SomeTest" name="testBarThrowsException" time="0.0" />
* <testcase classname="eu.henkelmann.bla.SomeTest" name="testBaz" time="0.0">
* <failure message="the baz was no bar" type="junit.framework.AssertionFailedError">...stack...</failure>
* </testcase>
* <system-out><![CDATA[]]></system-out>
* <system-err><![CDATA[]]></system-err>
* </testsuite>
*
* I don't know how to measure the time for each testcase, so it has to remain "0.0" for now :(
*/
override def endGroup(name: String, t: Throwable) = {
System.err.println("Throwable escaped the test run of '" + name + "': " + t)
t.printStackTrace(System.err)
}
/** Ends the current suite, wraps up the result and writes it to an XML file
* in the output folder that is named after the suite.
*/
override def endGroup(name: String, result: TestResult.Value) = {
XML.save (new File(targetDir, testSuite.name + ".xml").getAbsolutePath, testSuite.stop(), "UTF-8", true, null)
}
/**Does nothing, as we write each file after a suite is done.*/
override def doComplete(finalResult: TestResult.Value): Unit = {}
/**Returns None*/
override def contentLogger(test: TestDefinition): Option[ContentLogger] = None
}
| chenkelmann/junit_xml_listener | src/main/scala/eu/henkelmann/sbt/JUnitXmlTestsListener.scala | Scala | mit | 7,729 |
package org.opencoin.core.util.crypto
import org.opencoin.core.token.PublicRSAKey
import org.opencoin.issuer.PrivateRSAKey
import java.math.BigInteger
import java.security.interfaces.RSAPrivateKey
import java.security.interfaces.RSAPublicKey
import java.security.KeyFactory
import java.security.MessageDigest
import java.security.Signature;
import java.security.spec.RSAPrivateKeySpec
import java.security.KeyPairGenerator
import java.security.KeyPair
import java.security.SecureRandom
import org.eintr.loglady.Logging
object generateKeyPair {
def apply(reference: BigInt, cipher_suite: String): (PublicRSAKey, PrivateRSAKey) = {
//TODO cipher_suite is ignored for now. Instead RSA-2048 is used always.
val r = new scala.util.Random
val keyGen: KeyPairGenerator = KeyPairGenerator.getInstance("RSA")
val random: SecureRandom = SecureRandom.getInstance("SHA1PRNG", "SUN")
keyGen.initialize(2048, random)
val keyPair: KeyPair = keyGen.genKeyPair()
val privateKey: RSAPrivateKey = keyPair.getPrivate.asInstanceOf[RSAPrivateKey]
val publicKey: RSAPublicKey = keyPair.getPublic.asInstanceOf[RSAPublicKey]
//This may help: keyPair.getPrivate.asInstanceOf[RSAPrivateKey].getPrivateExponent
val key_modulus = BigInt(privateKey.getModulus)
val key_public_exponent = BigInt(publicKey.getPublicExponent)
//val key_private_exponent = Base64(privateKey.getPrivateExponent.toString)
val privKey = PrivateRSAKey(reference, cipher_suite, BigInt(privateKey.getModulus), new BigInt(privateKey.getPrivateExponent))
val pubKey = PublicRSAKey(key_modulus, key_public_exponent)
(pubKey, privKey)
}
}
object hash extends Logging {
/** create a SHA-256 hash from a String. Found in net.liftweb.util.SecurityHelpers */
//TODO A good library for more algorithms https://github.com/Nycto/Hasher
//TODO Add try/catch for MessageDigest operation
def apply(in: String, algorithm: String): Option[BigInt] = algorithm match {
case "SHA-256" => {
log.debug("Hashing: " + in)
val md = MessageDigest.getInstance("SHA-256")
md.update(in.getBytes("UTF-8"))
Some(BigInt(new BigInteger(1, md.digest))) // use this 1 to tell it is positive.
//Base64.encode(MessageDigest.getInstance("SHA-256").digest(in.getBytes("UTF-8")).mkString)
}
case _ => None
}
}
/*
def sign(token: Array[Byte], key: PrivateMintKey): Base64 = {
import java.math.BigInteger
import java.security.Signature
import java.security.spec.RSAPrivateKeySpec
import java.security.KeyFactory
//Convert public key into java.security.PublicKey format
//See this tutorial for details: http://www.java2s.com/Tutorial/Java/0490__Security/BasicRSAexample.htm
val spec = new RSAPrivateKeySpec(key.modulus, key.private_exponent)
val kf = KeyFactory.getInstance("RSA")
val privateKey = kf.generatePrivate(spec)
//Sign
val sig: Signature = Signature.getInstance("SHA256withRSA") //If it fails, try Bouncycastle provider
sig.initSign(privateKey)
sig.update(token)
new Base64(sig.sign)
}
*/
/**
* Cipher Suite is ignored for now.
**/
object sign {
def apply(token: String, privkey: PrivateRSAKey, cipherSuite: String): Option[BigInt] = {
//TODO Use different cipher suites and key lengths. ECDSA: https://github.com/baturinsky/Scala-Ecc#readme
//TODO try/catch for java calls
//require(cipherSuite=="RSA-2048")
val privateKeySpec: RSAPrivateKeySpec = new RSAPrivateKeySpec(privkey.modulus.bigInteger, privkey.private_exponent.bigInteger)
val key: RSAPrivateKey = KeyFactory.getInstance("RSA").generatePrivate(privateKeySpec).asInstanceOf[RSAPrivateKey]
val signature: Signature = Signature.getInstance("SHA256withRSA")
signature.initSign(key);
signature.update(token.getBytes());
Some(BigInt(new BigInteger(1, signature.sign))) // use this 1 to tell it is positive.
}
}
/*
def sign(message: String, secretKey: Key, cipherSuite: String): Base64 = {
// Install Bouncycastle?
// Use different cipher suites, key lengths
//use fixed keys
require(cipherSuite=="RSA-2048")
val keyGen: KeyPairGenerator = KeyPairGenerator.getInstance("RSA", "BC");
keyGen.initialize(2048, new SecureRandom());
val keyPair: KeyPair= keyGen.generateKeyPair();
val signature: Signature = Signature.getInstance("RSA", "BC");
signature.initSign(keyPair.getPrivate(), new SecureRandom());
signature.update(message.getBytes());
val sigBytes = signature.sign();
signature.initVerify(keyPair.getPublic());
signature.update(message.getBytes());
if(signature.verify(sigBytes)==false) null
else Base64(sigBytes)
}
}
//import java.security.Security;
import java.security.Key
import org.opencoin.core.util.Base64
def sign(message: String, secretKey: Key, cipherSuite: String): String = {
import java.security.interfaces.RSAPrivateKey
val privateKey: RSAPrivateKey = secretKey.publicKey
val s:String = Base64(rsa(message, privateKey.getModulus(), privateKey.getPrivateExponent()))
return "test"
}
import java.math.BigInteger
def rsa(message: String, modulus: BigInteger, pubExp: BigInteger): Seq[Byte] = {
import java.security.KeyFactory
import java.security.spec.RSAPublicKeySpec
import java.security.interfaces.RSAPublicKey
import javax.crypto.Cipher
val keyFactory: KeyFactory = KeyFactory.getInstance("RSA")
val pubKeySpec: RSAPublicKeySpec = new RSAPublicKeySpec(modulus, pubExp)
val key: RSAPublicKey = keyFactory.generatePublic(pubKeySpec).asInstanceOf[RSAPublicKey]
val cipher: Cipher = Cipher.getInstance("RSA/ECB/NoPadding")
cipher.init(Cipher.ENCRYPT_MODE, key)
cipher.doFinal(message.getBytes())
}
} */ | OpenCoin/opencoin-issuer-scala | src/main/scala/org/opencoin/core/util/crypto.scala | Scala | gpl-3.0 | 5,703 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.adaptive
import org.apache.spark.sql.catalyst.expressions
import org.apache.spark.sql.catalyst.expressions.{CreateNamedStruct, DynamicPruningExpression, ListQuery, Literal}
import org.apache.spark.sql.catalyst.rules.Rule
import org.apache.spark.sql.catalyst.trees.TreePattern.{DYNAMIC_PRUNING_SUBQUERY, IN_SUBQUERY,
SCALAR_SUBQUERY}
import org.apache.spark.sql.execution
import org.apache.spark.sql.execution.{BaseSubqueryExec, InSubqueryExec, SparkPlan}
case class PlanAdaptiveSubqueries(
subqueryMap: Map[Long, BaseSubqueryExec]) extends Rule[SparkPlan] {
def apply(plan: SparkPlan): SparkPlan = {
plan.transformAllExpressionsWithPruning(
_.containsAnyPattern(SCALAR_SUBQUERY, IN_SUBQUERY, DYNAMIC_PRUNING_SUBQUERY)) {
case expressions.ScalarSubquery(_, _, exprId, _) =>
execution.ScalarSubquery(subqueryMap(exprId.id), exprId)
case expressions.InSubquery(values, ListQuery(_, _, exprId, _, _)) =>
val expr = if (values.length == 1) {
values.head
} else {
CreateNamedStruct(
values.zipWithIndex.flatMap { case (v, index) =>
Seq(Literal(s"col_$index"), v)
}
)
}
InSubqueryExec(expr, subqueryMap(exprId.id), exprId)
case expressions.DynamicPruningSubquery(value, _, _, _, _, exprId) =>
DynamicPruningExpression(InSubqueryExec(value, subqueryMap(exprId.id), exprId))
}
}
}
| wangmiao1981/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/adaptive/PlanAdaptiveSubqueries.scala | Scala | apache-2.0 | 2,274 |
package slick.jdbc
import java.io.Closeable
import java.util.Properties
import java.util.concurrent.TimeUnit
import java.sql.{SQLException, DriverManager, Driver, Connection}
import javax.sql.DataSource
import com.typesafe.config.Config
import slick.util.{Logging, ClassLoaderUtil, BeanConfigurator}
import slick.util.ConfigExtensionMethods._
import slick.SlickException
/** A `JdbcDataSource` provides a way to create a `Connection` object for a database. It is
* similar to a `javax.sql.DataSource` but simpler. Unlike [[JdbcBackend.DatabaseDef]] it is not a
* part of the backend cake. This trait defines the SPI for 3rd-party connection pool support. */
trait JdbcDataSource extends Closeable {
/** Create a new Connection or get one from the pool */
def createConnection(): Connection
/** If this object represents a connection pool managed directly by Slick, close it.
* Otherwise no action is taken. */
def close(): Unit
}
object JdbcDataSource extends Logging {
/** Create a JdbcDataSource from a `Config`. See [[JdbcBackend.DatabaseFactoryDef.forConfig]]
* for documentation of the supported configuration parameters. */
def forConfig(c: Config, driver: Driver, name: String, classLoader: ClassLoader): JdbcDataSource = {
def loadFactory(name: String): JdbcDataSourceFactory = {
val clazz = classLoader.loadClass(name)
clazz.getField("MODULE$").get(clazz).asInstanceOf[JdbcDataSourceFactory]
}
val pf: JdbcDataSourceFactory = c.getStringOr("connectionPool", "HikariCP") match {
case "disabled" => DataSourceJdbcDataSource
case "HikariCP" => loadFactory("slick.jdbc.hikaricp.HikariCPJdbcDataSource$")
case "slick.jdbc.HikariCPJdbcDataSource" =>
logger.warn("connectionPool class 'slick.jdbc.HikariCPJdbcDataSource$' has been renamed to 'slick.jdbc.hikaricp.HikariCPJdbcDataSource$'")
loadFactory("slick.jdbc.hikaricp.HikariCPJdbcDataSource$")
case name => loadFactory(name)
}
pf.forConfig(c, driver, name, classLoader)
}
}
/** Create a [[JdbcDataSource]] from a `Config` object and an optional JDBC `Driver`.
* This is used with the "connectionPool" configuration option in
* [[JdbcBackend.DatabaseFactoryDef.forConfig]]. */
trait JdbcDataSourceFactory {
def forConfig(c: Config, driver: Driver, name: String, classLoader: ClassLoader): JdbcDataSource
}
/** A JdbcDataSource for a `DataSource` */
class DataSourceJdbcDataSource(val ds: DataSource, val keepAliveConnection: Boolean,
val connectionPreparer: ConnectionPreparer = null) extends JdbcDataSource {
private[this] var openedKeepAliveConnection: Connection = null
def createConnection(): Connection = {
if(keepAliveConnection) {
synchronized {
if(openedKeepAliveConnection eq null)
openedKeepAliveConnection = ds.getConnection
}
}
val c = ds.getConnection
if(connectionPreparer ne null) connectionPreparer(c)
c
}
def close(): Unit = {
try if(keepAliveConnection && (openedKeepAliveConnection ne null)) openedKeepAliveConnection.close()
finally ds match {
case ds: Closeable => ds.close()
case _ =>
}
}
}
object DataSourceJdbcDataSource extends JdbcDataSourceFactory {
def forConfig(c: Config, driver: Driver, name: String, classLoader: ClassLoader): DataSourceJdbcDataSource = {
val ds = c.getStringOpt("dataSourceClass") match {
case Some(dsClass) =>
val propsO = c.getPropertiesOpt("properties")
try {
val ds = Class.forName(dsClass).newInstance.asInstanceOf[DataSource]
propsO.foreach(BeanConfigurator.configure(ds, _))
ds
} catch { case ex: Exception => throw new SlickException("Error configuring DataSource "+dsClass, ex) }
case None =>
val ds = new DriverDataSource
ds.classLoader = classLoader
BeanConfigurator.configure(ds, c.toProperties, Set("url", "user", "password", "properties", "driver", "driverClassName"))
ds
}
new DataSourceJdbcDataSource(ds, c.getBooleanOr("keepAliveConnection"), new ConnectionPreparer(c))
}
}
/** A JdbcDataSource which can load a JDBC `Driver` from a class name */
@deprecated("Use DataSourceJdbcDataSource with DriverDataSource instead", "3.1")
trait DriverBasedJdbcDataSource extends JdbcDataSource {
private[this] var registeredDriver: Driver = null
protected[this] def registerDriver(driverName: String, url: String): Unit = if(driverName ne null) {
val oldDriver = try DriverManager.getDriver(url) catch { case ex: SQLException if "08001" == ex.getSQLState => null }
if(oldDriver eq null) {
Class.forName(driverName)
registeredDriver = DriverManager.getDriver(url)
}
}
/** Deregister the JDBC driver if it was registered by this JdbcDataSource.
* Returns true if an attempt was made to deregister a driver. */
def deregisterDriver(): Boolean =
if(registeredDriver ne null) { DriverManager.deregisterDriver(registeredDriver); true }
else false
}
/** A JdbcDataSource for lookup via a `Driver` or the `DriverManager` */
@deprecated("Use DataSourceJdbcDataSource with DriverDataSource instead", "3.1")
class DriverJdbcDataSource(url: String, user: String, password: String, prop: Properties,
driverName: String = null, driver: Driver = null,
connectionPreparer: ConnectionPreparer = null,
keepAliveConnection: Boolean = false) extends DriverBasedJdbcDataSource {
private[this] var openedKeepAliveConnection: Connection = null
if(driver eq null) registerDriver(driverName, url)
val connectionProps = if(prop.ne(null) && user.eq(null) && password.eq(null)) prop else {
val p = new Properties(prop)
if(user ne null) p.setProperty("user", user)
if(password ne null) p.setProperty("password", password)
p
}
def createConnection(): Connection = {
if(keepAliveConnection) {
synchronized {
if(openedKeepAliveConnection eq null)
openedKeepAliveConnection = internalCreateConnection()
}
}
internalCreateConnection()
}
protected[this] def internalCreateConnection(): Connection = {
val conn = (if(driver eq null) DriverManager.getConnection(url, connectionProps)
else {
val conn = driver.connect(url, connectionProps)
if(conn eq null)
throw new SQLException("Driver " + driver + " does not know how to handle URL " + url, "08001")
conn
})
if(connectionPreparer ne null) connectionPreparer(conn)
conn
}
def close(): Unit = if(keepAliveConnection) {
if(openedKeepAliveConnection ne null) openedKeepAliveConnection.close()
}
}
@deprecated("Use DataSourceJdbcDataSource with DriverDataSource instead", "3.1")
object DriverJdbcDataSource extends JdbcDataSourceFactory {
def forConfig(c: Config, driver: Driver, name: String, classLoader: ClassLoader): DriverJdbcDataSource = {
val cp = new ConnectionPreparer(c)
new DriverJdbcDataSource(
c.getStringOr("url"),
c.getStringOr("user"),
c.getStringOr("password"),
c.getPropertiesOr("properties"),
c.getStringOr("driver", c.getStringOr("driverClassName")),
driver,
if(cp.isLive) cp else null,
c.getBooleanOr("keepAliveConnection"))
}
}
/** Set parameters on a new Connection. This is used by [[DataSourceJdbcDataSource]]. */
class ConnectionPreparer(c: Config) extends (Connection => Unit) {
val isolation = c.getStringOpt("isolation").map {
case "NONE" => Connection.TRANSACTION_NONE
case "READ_COMMITTED" => Connection.TRANSACTION_READ_COMMITTED
case "READ_UNCOMMITTED" => Connection.TRANSACTION_READ_UNCOMMITTED
case "REPEATABLE_READ" => Connection.TRANSACTION_REPEATABLE_READ
case "SERIALIZABLE" => Connection.TRANSACTION_SERIALIZABLE
case unknown => throw new SlickException(s"Unknown transaction isolation level [$unknown]")
}
val catalog = c.getStringOpt("catalog").orElse(c.getStringOpt("defaultCatalog"))
val readOnly = c.getBooleanOpt("readOnly")
val isLive = isolation.isDefined || catalog.isDefined || readOnly.isDefined
def apply(c: Connection): Unit = if(isLive) {
isolation.foreach(c.setTransactionIsolation)
readOnly.foreach(c.setReadOnly)
catalog.foreach(c.setCatalog)
}
}
| jkutner/slick | slick/src/main/scala/slick/jdbc/JdbcDataSource.scala | Scala | bsd-2-clause | 8,346 |
/*
* sbt
* Copyright 2011 - 2018, Lightbend, Inc.
* Copyright 2008 - 2010, Mark Harrah
* Licensed under Apache License 2.0 (see LICENSE)
*/
package sbt
package internal
import sbt.Def.ScopedKey
import sbt.Keys._
import sbt.Scope.Global
import sbt.SlashSyntax0._
import sbt.internal.util.MainAppender._
import sbt.internal.util.{ Terminal => ITerminal, _ }
import sbt.util.{ Level, Logger, LoggerContext }
import java.io.PrintWriter
import scala.annotation.nowarn
sealed abstract class LogManager {
def apply(
data: Settings[Scope],
state: State,
task: ScopedKey[_],
writer: PrintWriter,
context: LoggerContext,
): ManagedLogger
@deprecated("Use alternate apply that provides a LoggerContext", "1.4.0")
def apply(
data: Settings[Scope],
state: State,
task: ScopedKey[_],
writer: PrintWriter
): ManagedLogger = apply(data, state, task, writer, LoggerContext.globalContext)
def backgroundLog(
data: Settings[Scope],
state: State,
task: ScopedKey[_],
context: LoggerContext
): ManagedLogger
@deprecated("Use alternate background log that provides a LoggerContext", "1.4.0")
final def backgroundLog(data: Settings[Scope], state: State, task: ScopedKey[_]): ManagedLogger =
backgroundLog(data, state, task, LoggerContext.globalContext)
}
/**
* A functional interface that allows us to preserve binary compatibility
* for LogManager.defaults with the old log4j variant.
*/
trait AppenderSupplier {
def apply(s: ScopedKey[_]): Seq[Appender]
}
object LogManager {
import java.util.concurrent.atomic.AtomicInteger
private val generateId: AtomicInteger = new AtomicInteger
// This is called by mkStreams
//
@nowarn
def construct(
data: Settings[Scope],
state: State
): (ScopedKey[_], PrintWriter) => ManagedLogger =
(task: ScopedKey[_], to: PrintWriter) => {
val context = state.get(Keys.loggerContext).getOrElse(LoggerContext.globalContext)
val manager: LogManager =
(logManager in task.scope).get(data) getOrElse defaultManager(state.globalLogging.console)
manager(data, state, task, to, context)
}
@nowarn
def constructBackgroundLog(
data: Settings[Scope],
state: State
): (ScopedKey[_]) => ManagedLogger =
(task: ScopedKey[_]) => {
val manager: LogManager =
(logManager in task.scope).get(data) getOrElse defaultManager(state.globalLogging.console)
val context = state.get(Keys.loggerContext).getOrElse(LoggerContext.globalContext)
manager.backgroundLog(data, state, task, context)
}
def defaultManager(console: ConsoleOut): LogManager =
withLoggers((_, _) => defaultScreen(console))
// This is called by Defaults.
def defaults(extra: AppenderSupplier, console: ConsoleOut): LogManager =
withLoggers(
(task, state) => defaultScreen(console, suppressedMessage(task, state)),
extra = extra
)
def withScreenLogger(mk: (ScopedKey[_], State) => Appender): LogManager =
withLoggers(screen = mk)
def withLoggers(
screen: (ScopedKey[_], State) => Appender = (_, s) => defaultScreen(s.globalLogging.console),
backed: PrintWriter => Appender = defaultBacked,
relay: Unit => Appender = defaultRelay,
extra: AppenderSupplier = _ => Nil
): LogManager = new DefaultLogManager(screen, backed, relay, extra)
private class DefaultLogManager(
screen: (ScopedKey[_], State) => Appender,
backed: PrintWriter => Appender,
relay: Unit => Appender,
extra: AppenderSupplier
) extends LogManager {
def apply(
data: Settings[Scope],
state: State,
task: ScopedKey[_],
to: PrintWriter,
context: LoggerContext,
): ManagedLogger =
defaultLogger(
data,
state,
task,
screen(task, state),
backed(to),
relay(()),
extra(task).toList,
context,
)
def backgroundLog(
data: Settings[Scope],
state: State,
task: ScopedKey[_],
context: LoggerContext
): ManagedLogger = {
val console = screen(task, state)
LogManager.backgroundLog(data, state, task, console, relay(()), context)
}
}
// to change from global being the default to overriding, switch the order of state.get and data.get
def getOr[T](
key: AttributeKey[T],
data: Settings[Scope],
scope: Scope,
state: State,
default: T
): T =
data.get(scope, key) orElse state.get(key) getOrElse default
@deprecated("Use defaultLogger that provides a LoggerContext", "1.4.0")
def defaultLogger(
data: Settings[Scope],
state: State,
task: ScopedKey[_],
console: Appender,
backed: Appender,
relay: Appender,
extra: List[Appender]
): ManagedLogger =
defaultLogger(data, state, task, console, backed, relay, extra, LoggerContext.globalContext)
// This is the main function that is used to generate the logger for tasks.
def defaultLogger(
data: Settings[Scope],
state: State,
task: ScopedKey[_],
console: Appender,
backed: Appender,
relay: Appender,
extra: List[Appender],
context: LoggerContext,
): ManagedLogger = {
val execOpt = state.currentCommand
val loggerName: String = s"${task.key.label}-${generateId.incrementAndGet}"
val channelName: Option[String] = execOpt flatMap (_.source map (_.channelName))
val execId: Option[String] = execOpt flatMap { _.execId }
val log = context.logger(loggerName, channelName, execId)
val scope = task.scope
val screenLevel = getOr(logLevel.key, data, scope, state, Level.Info)
val backingLevel = getOr(persistLogLevel.key, data, scope, state, Level.Debug)
val screenTrace = getOr(traceLevel.key, data, scope, state, defaultTraceLevel(state))
val backingTrace = getOr(persistTraceLevel.key, data, scope, state, Int.MaxValue)
val extraBacked = state.globalLogging.backed :: relay :: Nil
val consoleOpt = consoleLocally(state, console)
val config = MainAppender.MainAppenderConfig(
consoleOpt,
backed,
extraBacked ::: extra,
screenLevel,
backingLevel,
screenTrace,
backingTrace
)
multiLogger(log, config, context)
}
// Return None if the exec is not from console origin.
def consoleLocally(state: State, console: Appender): Option[Appender] =
state.currentCommand match {
case Some(x: Exec) =>
x.source match {
// TODO: Fix this stringliness
case Some(x: CommandSource) if x.channelName == ConsoleChannel.defaultName =>
Option(console)
case _ => Option(console)
}
case _ => Option(console)
}
def defaultTraceLevel(state: State): Int =
if (state.interactive) -1 else Int.MaxValue
def suppressedMessage(
key: ScopedKey[_],
state: State
): SuppressedTraceContext => Option[String] = {
val display = Project.showContextKey(state)
def commandBase = "last " + display.show(unwrapStreamsKey(key))
def command(useFormat: Boolean) =
if (useFormat) s"${scala.Console.MAGENTA}$commandBase${scala.Console.RESET}"
else s"'$commandBase'"
{ context =>
Some(
s"stack trace is suppressed; run ${command(context.useFormat)} for the full output"
)
}
}
def unwrapStreamsKey(key: ScopedKey[_]): ScopedKey[_] = key.scope.task match {
case Select(task) => ScopedKey(key.scope.copy(task = Zero), task)
case _ => key // should never get here
}
def backgroundLog(
data: Settings[Scope],
state: State,
task: ScopedKey[_],
console: Appender,
/* TODO: backed: Appender,*/
relay: Appender,
context: LoggerContext,
): ManagedLogger = {
val scope = task.scope
val screenLevel = getOr(logLevel.key, data, scope, state, Level.Info)
val backingLevel = getOr(persistLogLevel.key, data, scope, state, Level.Debug)
val screenTrace = getOr(traceLevel.key, data, scope, state, 0)
val execOpt = state.currentCommand
val loggerName: String = s"bg-${task.key.label}-${generateId.incrementAndGet}"
val channelName: Option[String] = execOpt flatMap (_.source map (_.channelName))
// val execId: Option[String] = execOpt flatMap { _.execId }
val log = context.logger(loggerName, channelName, None)
context.clearAppenders(loggerName)
val consoleOpt = consoleLocally(state, console) map {
case a: Appender =>
a.setTrace(screenTrace)
a
case a => a
}
consoleOpt.foreach(a => context.addAppender(loggerName, a -> screenLevel))
context.addAppender(loggerName, relay -> backingLevel)
log
}
// TODO: Fix this
// if global logging levels are not explicitly set, set them from project settings
// private[sbt] def setGlobalLogLevels(s: State, data: Settings[Scope]): State =
// if (hasExplicitGlobalLogLevels(s))
// s
// else {
// val logging = s.globalLogging
// def get[T](key: SettingKey[T]) = key in GlobalScope get data
// def transfer(l: AbstractLogger, traceKey: SettingKey[Int], levelKey: SettingKey[Level.Value]): Unit = {
// get(traceKey).foreach(l.setTrace)
// get(levelKey).foreach(l.setLevel)
// }
// logging.full match {
// case a: AbstractLogger => transfer(a, traceLevel, logLevel)
// case _ => ()
// }
// transfer(logging.backed, persistTraceLevel, persistLogLevel)
// s
// }
def setGlobalLogLevel(s: State, level: Level.Value): State = {
val s1 = s.put(BasicKeys.explicitGlobalLogLevels, true).put(Keys.logLevel.key, level)
val gl = s1.globalLogging
LoggerContext.globalContext.clearAppenders(gl.full.name)
LoggerContext.globalContext.addAppender(gl.full.name, gl.backed -> level)
s1
}
// This is the default implementation for the relay appender
val defaultRelay: Unit => ConsoleAppender = _ => defaultRelayImpl
private[this] lazy val defaultRelayImpl: ConsoleAppender = new RelayAppender("Relay0")
private[sbt] def settingsLogger(state: State): Def.Setting[_] =
// strict to avoid retaining a reference to `state`
Global / sLog :== globalWrapper(state)
// construct a Logger that delegates to the global logger, but only holds a weak reference
// this is an approximation to the ideal that would invalidate the delegate after loading completes
private[this] def globalWrapper(s: State): Logger =
new Logger {
private[this] val ref = new java.lang.ref.WeakReference(s.globalLogging.full)
private[this] def slog: Logger =
Option(ref.get) getOrElse sys.error("Settings logger used after project was loaded.")
override val ansiCodesSupported = ITerminal.isAnsiSupported
override def trace(t: => Throwable) = slog.trace(t)
override def success(message: => String) = slog.success(message)
override def log(level: Level.Value, message: => String) = slog.log(level, message)
}
}
| sbt/sbt | main/src/main/scala/sbt/internal/LogManager.scala | Scala | apache-2.0 | 11,079 |
package com.atanana.json
import javax.inject.Inject
import com.atanana.FsHandler
import spray.json.DefaultJsonProtocol._
import spray.json._
import scala.util.Try
class JsonConfig @Inject()(fsHandler: FsHandler) {
import JsonConfig.FILE_NAME
private implicit val configFormat: RootJsonFormat[Config] = jsonFormat8(Config)
def read: Try[Config] = {
fsHandler.readFile(FILE_NAME)
.flatMap(contents => Try {
contents.parseJson.convertTo[Config]
})
}
}
object JsonConfig {
val FILE_NAME = "config.json"
def apply(fsHandler: FsHandler): JsonConfig = new JsonConfig(fsHandler)
}
case class Config(
token: String,
chat: Int,
team: Int,
city: Int,
port: Int,
cityName: String,
countryName: String,
ignoredVenues: List[String]
) | atanana/rating-bot | src/main/scala/com/atanana/json/JsonConfig.scala | Scala | mit | 934 |
/***
* Copyright 2014 Rackspace US, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.rackspace.com.papi.components.checker.step.startend
import java.util.HashMap
import javax.servlet.FilterChain
import com.rackspace.com.papi.components.checker.servlet._
import com.rackspace.com.papi.components.checker.step.base.StepContext
import com.rackspace.com.papi.components.checker.step.results._
import scala.util.matching.Regex
//
// Like MethodFail, but fails only if the current method is not
// matched against the uri regex
//
class MethodFailMatch(id : String, label : String, val method : Regex, priority : Long) extends MethodFail(id, label, priority) {
private val allowHeaders = new HashMap[String, String](1)
allowHeaders.put("Allow", method.toString.replaceAll("\\\\|",", "))
override def check(req : CheckerServletRequest,
resp : CheckerServletResponse,
chain : FilterChain,
context : StepContext) : Option[Result] = {
var result : Option[Result] = super.check(req, resp, chain, context)
if (result != None) {
req.getMethod() match {
case method() => result = None
case _ => result = Some(new MethodFailResult (result.get.message+". The Method does not match the pattern: '"+method+"'",
context,
id,
priority,
allowHeaders.clone.asInstanceOf[java.util.Map[String,String]])) // Augment our parents result with match info
}
}
result
}
}
| tylerroyal/api-checker | core/src/main/scala/com/rackspace/com/papi/components/checker/step/startend/MethodFailMatch.scala | Scala | apache-2.0 | 2,228 |
package com.sageaxcess.sampletask.plainscala
import java.io.{File, PrintWriter}
import com.sageaxcess.sampletask.tokenizer.Tokenizer
/**
* CSV files processing app,
* example input file: <pre>
* HeaderA,HeaderB,HeaderC
* valueA1,valueB1,valueC1
* valueA1,valueB2,value with spaces
* </pre>
* example output file: <pre>
* valueA1:2
* valueB1:1
* valueC1:1
* valueB2:1
* value with spaces:1
* </pre>
* call via sbt:
* sbt "plainscala/run input.csv output"
*/
object TokensCount extends App {
private val OUTPUT_SEPARATOR = ':'
private val DEFAULT_INPUT = "input.csv"
private val DEFAULT_OUTPUT = "output"
if (args.nonEmpty && Array("-h", "h", "--help", "help").contains(args(0))) {
printHelpAndExit()
}
val inputFileName = if (args.nonEmpty) {
args(0)
} else DEFAULT_INPUT
val outputFileName = if (args.length > 1) {
args(1)
} else DEFAULT_OUTPUT
val lines = io.Source.fromFile(inputFileName).getLines()
if (!lines.hasNext) {
println("empty input file")
System.exit(1)
}
val header = lines.next()
val tokens = getTokens(lines)
println(s"Processing CSV values with header: $header")
val counts = getCounts(tokens)
printOutputSample()
println(s"Full output goes to $outputFileName")
writeOutputToFile()
def printHelpAndExit(): Unit = {
println(
"""Usage:
|sbt "plainscala/run --help"
|shows this message
|
|sbt "plainscala/run input.csv"
|provide input file name, default is "input.csv"
|
|sbt "plainscala/run input.csv output"
|provide both input and output file names, defaults are "input.csv", "output"
""".stripMargin)
System.exit(0)
}
def getTokens(lines: Iterator[String]): Iterator[String] = {
lines flatMap Tokenizer.tokenize()
}
def getCounts(tokens: Iterator[String]): Map[String, Long] = {
tokens.foldLeft(Map.empty[String, Long]) {
(count, word) => count + (word -> (count.getOrElse(word, 0L) + 1))
}
}
def printOutputSample(): Unit = {
counts.take(10).foreach {
case (token, count) => println(s"$token$OUTPUT_SEPARATOR$count")
}
println("...")
}
def writeOutputToFile(): Unit = {
val writer = new PrintWriter(new File(outputFileName))
counts.foreach {
case (token, count) =>
writer.print(token)
writer.print(OUTPUT_SEPARATOR)
writer.println(count)
}
writer.close()
}
}
| utgarda/SageAxcessSampleTask | plainscala/src/main/scala/com/sageaxcess/sampletask/plainscala/TokensCount.scala | Scala | mit | 2,467 |
/**
* Copyright (C) 2017 Pants project contributors (see CONTRIBUTORS.md).
* Licensed under the Apache License, Version 2.0 (see LICENSE).
*/
package org.pantsbuild.zinc.analysis
import java.nio.file.Path
import java.io.{File, IOException}
import java.util.Optional
import scala.compat.java8.OptionConverters._
import sbt.internal.inc.{
Analysis,
CompanionsStore,
Locate
}
import xsbti.api.Companions
import xsbti.compile.{
AnalysisContents,
AnalysisStore,
CompileAnalysis,
DefinesClass,
FileAnalysisStore,
MiniSetup,
PerClasspathEntryLookup
}
import org.pantsbuild.zinc.cache.Cache.Implicits
import org.pantsbuild.zinc.cache.{Cache, FileFPrint}
import org.pantsbuild.zinc.util.Util
/**
* A facade around the analysis cache to:
* 1) map between classpath entries and cache locations
* 2) use analysis for `definesClass` when it is available
*
* SBT uses the `definesClass` and `getAnalysis` methods in order to load the APIs for upstream
* classes. For a classpath containing multiple entries, sbt will call `definesClass` sequentially
* on classpath entries until it finds a classpath entry defining a particular class. When it finds
* the appropriate classpath entry, it will use `getAnalysis` to fetch the API for that class.
*/
class AnalysisMap private[AnalysisMap] (
// a map of classpath entries to cache file fingerprints, excluding the current compile destination
analysisLocations: Map[File, FileFPrint],
// a Map of File bases to destinations to re-relativize them to
rebases: Map[File, File]
) {
private val analysisMappers = PortableAnalysisMappers.create(rebases)
def getPCELookup = new PerClasspathEntryLookup {
/**
* Gets analysis for a classpath entry (if it exists) by translating its path to a potential
* cache location and then checking the cache.
*/
def analysis(classpathEntry: File): Optional[CompileAnalysis] =
analysisLocations.get(classpathEntry).flatMap(cacheLookup).asJava
/**
* An implementation of definesClass that will use analysis for an input directory to determine
* whether it defines a particular class.
*
* TODO: This optimization is unnecessary for jars on the classpath, which are already indexed.
* Can remove after the sbt jar output patch lands.
*/
def definesClass(classpathEntry: File): DefinesClass = {
// If we have analysis with a valid Compilation, use the classnames it refers to.
val analysisDefinesClass =
for (
abstractAnalysis <- getAnalysis(classpathEntry);
analysis = abstractAnalysis.asInstanceOf[Analysis];
compilation <- analysis.compilations.allCompilations.headOption;
singleOutput <- compilation.getOutput.getSingleOutput.asScala;
classesDir = singleOutput.toPath
) yield {
// strongly hold the classNames, and transform them to ensure that they are unlinked from
// the remainder of the analysis
val classNames = analysis.relations.srcProd.reverseMap.keys.toList.toSet.map(
(f: File) => filePathToClassName(classesDir, f))
new ClassNamesDefinesClass(classNames)
}
analysisDefinesClass.getOrElse {
// no analysis: return a function that will scan instead
Locate.definesClass(classpathEntry)
}
}
private class ClassNamesDefinesClass(classes: Set[String]) extends DefinesClass {
override def apply(className: String): Boolean = classes(className)
}
private def filePathToClassName(classesDir: Path, file: File): String =
classesDir.relativize(file.toPath).toString.replace(".class", "").replaceAll("/", ".")
/**
* Gets analysis for a classpath entry (if it exists) by translating its path to a potential
* cache location and then checking the cache.
*/
def getAnalysis(classpathEntry: File): Option[CompileAnalysis] =
analysisLocations.get(classpathEntry).flatMap(cacheLookup)
}
def cachedStore(cacheFile: File): AnalysisStore =
AnalysisStore.getThreadSafeStore(
new AnalysisStore {
val fileStore = mkFileAnalysisStore(cacheFile)
def set(analysis: AnalysisContents) {
fileStore.set(analysis)
FileFPrint.fprint(cacheFile).foreach { fprint =>
AnalysisMap.analysisCache.put(fprint, Some(analysis))
}
}
def get(): Optional[AnalysisContents] = {
val res =
FileFPrint.fprint(cacheFile) flatMap { fprint =>
AnalysisMap.analysisCache.getOrElseUpdate(fprint) {
fileStore.get().asScala
}
}
res.asJava
}
}
)
private def cacheLookup(cacheFPrint: FileFPrint): Option[CompileAnalysis] =
AnalysisMap.analysisCache.getOrElseUpdate(cacheFPrint) {
// re-fingerprint the file on miss, to ensure that analysis hasn't changed since we started
if (!FileFPrint.fprint(cacheFPrint.file).exists(_ == cacheFPrint)) {
throw new IOException(s"Analysis at $cacheFPrint has changed since startup!")
}
mkFileAnalysisStore(cacheFPrint.file).get().asScala
}.map(_.getAnalysis)
private def mkFileAnalysisStore(file: File): AnalysisStore =
FileAnalysisStore.getDefault(file, analysisMappers)
}
object AnalysisMap {
// Because the analysis cache uses Weak references, bounding its size is generally
// counterproductive.
private val analysisCacheLimit =
Util.intProperty(
"zinc.analysis.cache.limit",
Int.MaxValue
)
/**
* Static cache for compile analyses. Values must be Options because in get() we don't yet
* know if, on a cache miss, the underlying file will yield a valid Analysis.
*/
private val analysisCache =
Cache[FileFPrint, Option[AnalysisContents]](analysisCacheLimit)
def create(options: AnalysisOptions): AnalysisMap =
new AnalysisMap(
// create fingerprints for all inputs at startup
options.cacheMap.flatMap {
case (classpathEntry, cacheFile) => FileFPrint.fprint(cacheFile).map(classpathEntry -> _)
},
options.rebaseMap
)
}
| tdyas/pants | src/scala/org/pantsbuild/zinc/analysis/AnalysisMap.scala | Scala | apache-2.0 | 6,152 |
package it.seralf.googlegroups
import java.net.URL
case class GGTopic(topic_id: String, url: URL, messages: Stream[GGMessage] = Stream.empty) {
override def toString() = s"""TOPIC [$url] ${messages.toList.size} messages"""
} | seralf/ggroups_exporter | src/main/scala/it/seralf/googlegroups/GGTopic.scala | Scala | apache-2.0 | 228 |
package controllers
import java.util.UUID
import actors.execution.ExecutionActorFactory
import actors.{ActorsMap, PLMActor}
import akka.actor.ActorRef
import com.google.inject.Inject
import com.google.inject.name.Named
import com.mohiva.play.silhouette.api.{Environment, LogoutEvent, Silhouette}
import com.mohiva.play.silhouette.impl.authenticators.JWTAuthenticator
import com.mohiva.play.silhouette.impl.providers.SocialProviderRegistry
import json.LectureToJson
import models.User
import play.api.Logger
import play.api.Play.current
import play.api.i18n.{Lang, MessagesApi}
import play.api.libs.concurrent.Execution.Implicits.defaultContext
import play.api.libs.json.{JsValue, Json}
import play.api.mvc._
import plm.core.lang.ProgrammingLanguages
import plm.core.model.lesson.{Exercises, Lessons}
import utils._
import scala.concurrent.Future
/**
* The basic application controller.
*
* @param env The Silhouette environment.
*/
class ApplicationController @Inject() (
@Named("pushActor") pushActor: ActorRef,
val messagesApi: MessagesApi,
implicit val env: Environment[User, JWTAuthenticator],
lessons: Lessons,
exercises: Exercises,
programmingLanguages: ProgrammingLanguages,
lectureToJson: LectureToJson,
socialProviderRegistry: SocialProviderRegistry,
executionActorFactory: ExecutionActorFactory)
extends Silhouette[User, JWTAuthenticator] {
def socket(optToken: Option[String]) = WebSocket.tryAcceptWithActor[JsValue, String] { request =>
var token = optToken.getOrElse("")
var userAgent: String = request.headers.get("User-Agent").getOrElse("")
var requestWithToken: RequestHeader = env.authenticatorService.embed(token, request)
var actorUUID: String = UUID.randomUUID.toString
implicit val req = Request(requestWithToken, AnyContentAsEmpty)
SecuredRequestHandler { securedRequest =>
Future.successful(HandlerResult(Ok, Some(securedRequest.identity)))
}.map {
case HandlerResult(r, Some(user)) =>
Right(
PLMActor.propsWithUser(
pushActor,
executionActorFactory.create(user.preferredLang),
userAgent,
actorUUID,
lessons,
exercises,
programmingLanguages,
lectureToJson,
user) _)
case HandlerResult(r, None) =>
val preferredLang: Lang = LangUtils.getPreferredLang(request)
val lastProgLang: String = CookieUtils.getCookieValue(request, "progLang")
var newUser: Boolean = false
var gitID: String = CookieUtils.getCookieValue(request, "gitID")
if(gitID.isEmpty) {
newUser = true
gitID = UUID.randomUUID.toString
}
Right(
PLMActor.props(
pushActor,
executionActorFactory.create(Some(preferredLang)),
userAgent,
actorUUID,
gitID,
newUser,
Some(preferredLang),
Some(lastProgLang),
trackUser = Some(false),
lessons,
exercises,
programmingLanguages,
lectureToJson) _)
}
}
/**
* Returns the user.
*
* @return The result to display.
*/
def user(actorUUID: String) = SecuredAction.async { implicit request =>
ActorsMap.get(actorUUID) match {
case Some(actor) =>
actor ! Json.obj(
"cmd" -> "signIn",
"user" -> request.identity
)
Future.successful(Ok)
case _ =>
Logger.error("Actor not found... Weird isn't it?")
Future.successful(Unauthorized)
}
}
/**
* Manages the sign out action.
*/
def signOut = SecuredAction.async { implicit request =>
env.eventBus.publish(LogoutEvent(request.identity, request, request2Messages))
env.authenticatorService.discard(request.authenticator, Ok)
}
}
| MatthieuNICOLAS/webPLM | app/controllers/ApplicationController.scala | Scala | agpl-3.0 | 3,890 |
/*
* =========================================================================================
* Copyright © 2017 Workday, Inc.
* Copyright © 2013-2017 the kamon project <http://kamon.io/>
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
* =========================================================================================
*/
package com.workday.prometheus.akka
import scala.concurrent.duration.Duration
import akka.actor.Actor
class RouterMetricsTestActor extends Actor {
import RouterMetricsTestActor._
override def receive = {
case Discard ⇒
case Fail ⇒ throw new ArithmeticException("Division by zero.")
case Ping ⇒ sender ! Pong
case RouterTrackTimings(sendTimestamp, sleep) ⇒ {
val dequeueTimestamp = System.nanoTime()
sleep.map(s ⇒ Thread.sleep(s.toMillis))
val afterReceiveTimestamp = System.nanoTime()
sender ! RouterTrackedTimings(sendTimestamp, dequeueTimestamp, afterReceiveTimestamp)
}
}
}
object RouterMetricsTestActor {
case object Ping
case object Pong
case object Fail
case object Discard
case class RouterTrackTimings(sendTimestamp: Long = System.nanoTime(), sleep: Option[Duration] = None)
case class RouterTrackedTimings(sendTimestamp: Long, dequeueTimestamp: Long, afterReceiveTimestamp: Long) {
def approximateTimeInMailbox: Long = dequeueTimestamp - sendTimestamp
def approximateProcessingTime: Long = afterReceiveTimestamp - dequeueTimestamp
}
}
| Workday/prometheus-akka | src/test/scala/com/workday/prometheus/akka/RouterMetricsTestActor.scala | Scala | apache-2.0 | 1,978 |
package athena.client
import athena.Requests.BoundStatement
import athena.data.{PreparedStatementDef, CvResult, CValue, Writes}
import scala.annotation.implicitNotFound
import play.api.libs.iteratee.{Iteratee, Enumeratee, Enumerator}
import scala.concurrent.{ExecutionContext, Future}
object QueryInterpolation {
@implicitNotFound("No implicit ParamGenerator was found for type ${T}. The easiest way to fix this is to ensure that a Writes value for ${T} is implicitly available.")
trait ParamGenerator[T] {
def toParam: CValue
}
object ParamGenerator {
import scala.language.implicitConversions
implicit def fromWrites[T](t: T)(implicit w: Writes[T]): ParamGenerator[T] = new ParamGenerator[T] {
override def toParam: CValue = w.writes(t)
}
}
implicit class QueryContext(val s: StringContext) extends AnyVal {
def cql(paramGenerators: ParamGenerator[_]*) = {
val params = paramGenerators.map(_.toParam)
val query = s.parts.mkString("?")
new InterpolatedQuery(query, params)
}
def cqlu(paramGenerators: ParamGenerator[_]*) = {
val params = paramGenerators.map(_.toParam)
val query = s.parts.mkString("?")
StatementRunner.unitRunner(query, params)
}
}
implicit class SimpleQuery(val q: String) extends AnyVal {
def asQuery[A, B](implicit rw: RowWriter[A], rr: RowReader[B]): A => StatementRunner[Enumerator[CvResult[B]]] = {
a => StatementRunner.streamRunner(q, rw.write(a))(rr)
}
def asQueryNoArgs[B](implicit rr: RowReader[B]): StatementRunner[Enumerator[CvResult[B]]] = {
//don't use a prepared statement because there are no query parameters - doesn't make a lot of sense
StatementRunner.streamRunner(q, Seq(), usePreparedStatement = false)(rr)
}
def asUpdate[A](implicit rw: RowWriter[A]): A => StatementRunner[Future[Unit]] = {
a => StatementRunner.unitRunner(q, rw.write(a))
}
}
class InterpolatedQuery private[QueryInterpolation] (query: String, args: Seq[CValue]) {
def as[A](implicit rr: RowReader[A]): StatementRunner[Enumerator[CvResult[A]]] = {
StatementRunner.streamRunner(query, args)(rr)
}
}
implicit class PreparedStatementOps(val stmt: PreparedStatementDef) extends AnyVal {
//TODO: Add bits that allow for changing query params like consistency, etc.
// Probably need a more complex abstraction than this
def binder[A](implicit rw: RowWriter[A]): A => BoundStatement = { a =>
BoundStatement(stmt, rw.write(a))
}
}
}
| vast-engineering/athena | src/main/scala/athena/client/QueryInterpolation.scala | Scala | apache-2.0 | 2,540 |
package unof.cv.tools.paramsmenu
import unof.cv.tools.CallbackCenter
import unof.cv.tools.CvSetting
import unof.cv.base.charLib.CMImage
import unof.cv.base.charLib.CMShape
import unof.cv.base.charLib.CMPart
import unof.cv.base.charLib.CMCategory
import unof.cv.base.charLib.CMLayer
trait LayerTypeInsensitvePannel {
def refresh(callbacks: CallbackCenter, settings: CvSetting) = {
callbacks.selection.forSelected(callbacks.charMaker,
ifLayerSelected(callbacks, settings, _),
ifPartSelected(callbacks, settings, _),
ifCategorySelected(callbacks, settings, _)
)
}
def ifLayerSelected(callbacks: CallbackCenter, settings: CvSetting, image: CMLayer)
def ifPartSelected(callbacks: CallbackCenter, settings: CvSetting, part: CMPart)
def ifCategorySelected(callbacks: CallbackCenter, settings: CvSetting, cat: CMCategory)
} | Hgjj/CharViewer | js/src/main/scala/unof/cv/tools/paramsmenu/LayerTypeInsensitvePannel.scala | Scala | bsd-3-clause | 861 |
package es.upm.fi.oeg.siq.tools
import org.slf4j.LoggerFactory
object URLTools {
val logger = LoggerFactory.getLogger(URLTools.getClass())
def stripSpecial(input: String) = {
val resultStr = new StringBuilder
input.toCharArray.foreach { ch =>
if (!isSpecial(ch))
resultStr.append(ch)
}
resultStr.toString
}
def encode(input: String) = {
val resultStr = new StringBuilder
input match {
case null => resultStr.append("NULL")
case text => text.toCharArray.foreach { ch =>
if (isUnsafe(ch)) {
resultStr.append('%')
resultStr.append(toHex(ch / 16))
resultStr.append(toHex(ch % 16))
} else resultStr.append(ch)
}
}
resultStr.toString
}
def encodeAll(input: String) = {
val resultStr = new StringBuilder
input.toCharArray.foreach { ch =>
if (isSpecial(ch)) {
resultStr.append('%')
resultStr.append(toHex(ch / 16))
resultStr.append(toHex(ch % 16))
} else resultStr.append(ch)
}
resultStr.toString
}
private def toHex(ch: Int): Char =
(if (ch < 10) '0' + ch else 'A' + ch - 10).toChar
private def isUnsafe(ch: Char): Boolean =
if (ch > 128 || ch < 0) true
else " %$&+,=?@<>%".indexOf(ch) >= 0
private def isSpecial(ch: Char): Boolean = {
if (ch > 128 || ch < 0) return true
return " \"/%$:&+,;=?@<>#%{}\\".indexOf(ch) >= 0;
}
def isAbsIRI(iri: String) = {
logger.debug("\n\n\n\nIRI: " + iri)
iri.startsWith("http://") || iri.startsWith("https://")
}
} | jpcik/morph | morph-core/src/main/scala/es/upm/fi/oeg/siq/tools/URLTools.scala | Scala | apache-2.0 | 1,568 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.columnar
import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.types._
class ColumnStatsSuite extends SparkFunSuite {
testColumnStats(classOf[BooleanColumnStats], BOOLEAN, Array(true, false, 0))
testColumnStats(classOf[ByteColumnStats], BYTE, Array(Byte.MaxValue, Byte.MinValue, 0))
testColumnStats(classOf[ShortColumnStats], SHORT, Array(Short.MaxValue, Short.MinValue, 0))
testColumnStats(classOf[IntColumnStats], INT, Array(Int.MaxValue, Int.MinValue, 0))
testColumnStats(classOf[LongColumnStats], LONG, Array(Long.MaxValue, Long.MinValue, 0))
testColumnStats(classOf[FloatColumnStats], FLOAT, Array(Float.MaxValue, Float.MinValue, 0))
testColumnStats(classOf[DoubleColumnStats], DOUBLE, Array(Double.MaxValue, Double.MinValue, 0))
testColumnStats(classOf[StringColumnStats], STRING, Array(null, null, 0))
testDecimalColumnStats(Array(null, null, 0))
testIntervalColumnStats(Array(null, null, 0))
def testColumnStats[T <: AtomicType, U <: ColumnStats](
columnStatsClass: Class[U],
columnType: NativeColumnType[T],
initialStatistics: Array[Any]): Unit = {
val columnStatsName = columnStatsClass.getSimpleName
test(s"$columnStatsName: empty") {
val columnStats = columnStatsClass.getConstructor().newInstance()
columnStats.collectedStatistics.zip(initialStatistics).foreach {
case (actual, expected) => assert(actual === expected)
}
}
test(s"$columnStatsName: non-empty") {
import org.apache.spark.sql.execution.columnar.ColumnarTestUtils._
val columnStats = columnStatsClass.getConstructor().newInstance()
val rows = Seq.fill(10)(makeRandomRow(columnType)) ++ Seq.fill(10)(makeNullRow(1))
rows.foreach(columnStats.gatherStats(_, 0))
val values = rows.take(10).map(_.get(0, columnType.dataType).asInstanceOf[T#InternalType])
val ordering = columnType.dataType.ordering.asInstanceOf[Ordering[T#InternalType]]
val stats = columnStats.collectedStatistics
assertResult(values.min(ordering), "Wrong lower bound")(stats(0))
assertResult(values.max(ordering), "Wrong upper bound")(stats(1))
assertResult(10, "Wrong null count")(stats(2))
assertResult(20, "Wrong row count")(stats(3))
assertResult(stats(4), "Wrong size in bytes") {
rows.map { row =>
if (row.isNullAt(0)) 4 else columnType.actualSize(row, 0)
}.sum
}
}
}
def testDecimalColumnStats[T <: AtomicType, U <: ColumnStats](
initialStatistics: Array[Any]): Unit = {
val columnStatsName = classOf[DecimalColumnStats].getSimpleName
val columnType = COMPACT_DECIMAL(15, 10)
test(s"$columnStatsName: empty") {
val columnStats = new DecimalColumnStats(15, 10)
columnStats.collectedStatistics.zip(initialStatistics).foreach {
case (actual, expected) => assert(actual === expected)
}
}
test(s"$columnStatsName: non-empty") {
import org.apache.spark.sql.execution.columnar.ColumnarTestUtils._
val columnStats = new DecimalColumnStats(15, 10)
val rows = Seq.fill(10)(makeRandomRow(columnType)) ++ Seq.fill(10)(makeNullRow(1))
rows.foreach(columnStats.gatherStats(_, 0))
val values = rows.take(10).map(_.get(0, columnType.dataType).asInstanceOf[T#InternalType])
val ordering = columnType.dataType.ordering.asInstanceOf[Ordering[T#InternalType]]
val stats = columnStats.collectedStatistics
assertResult(values.min(ordering), "Wrong lower bound")(stats(0))
assertResult(values.max(ordering), "Wrong upper bound")(stats(1))
assertResult(10, "Wrong null count")(stats(2))
assertResult(20, "Wrong row count")(stats(3))
assertResult(stats(4), "Wrong size in bytes") {
rows.map { row =>
if (row.isNullAt(0)) 4 else columnType.actualSize(row, 0)
}.sum
}
}
}
def testIntervalColumnStats[T <: AtomicType, U <: ColumnStats](
initialStatistics: Array[Any]): Unit = {
val columnStatsName = classOf[IntervalColumnStats].getSimpleName
val columnType = CALENDAR_INTERVAL
test(s"$columnStatsName: empty") {
val columnStats = new IntervalColumnStats
columnStats.collectedStatistics.zip(initialStatistics).foreach {
case (actual, expected) => assert(actual === expected)
}
}
test(s"$columnStatsName: non-empty") {
import org.apache.spark.sql.execution.columnar.ColumnarTestUtils._
val columnStats = new IntervalColumnStats
val rows = Seq.fill(10)(makeRandomRow(columnType)) ++ Seq.fill(10)(makeNullRow(1))
rows.foreach(columnStats.gatherStats(_, 0))
val stats = columnStats.collectedStatistics
assertResult(10, "Wrong null count")(stats(2))
assertResult(20, "Wrong row count")(stats(3))
assertResult(stats(4), "Wrong size in bytes") {
rows.map { row =>
if (row.isNullAt(0)) 4 else columnType.actualSize(row, 0)
}.sum
}
}
}
}
| hvanhovell/spark | sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/ColumnStatsSuite.scala | Scala | apache-2.0 | 5,820 |
/*
* Copyright 2011-2014 Chris de Vreeze
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package nl.ebpi.yaidom.core
import org.junit.Test
import org.junit.runner.RunWith
import org.scalatest.Suite
import org.scalatest.junit.JUnitRunner
/**
* QName test case.
*
* @author Chris de Vreeze
*/
@RunWith(classOf[JUnitRunner])
class QNameTest extends Suite {
@Test def testUnprefixedName(): Unit = {
val qname = UnprefixedName("Bookstore")
assertResult("Bookstore") {
qname.localPart
}
assertResult(None) {
qname.prefixOption
}
val qname2 = QName("Bookstore")
assertResult("Bookstore") {
qname2.localPart
}
assertResult(None) {
qname2.prefixOption
}
assertResult(qname) {
qname2
}
assertResult(qname.hashCode) {
qname2.hashCode
}
val qname3 = QName(None, "Bookstore")
assertResult("Bookstore") {
qname3.localPart
}
assertResult(None) {
qname3.prefixOption
}
assertResult(qname) {
qname3
}
assertResult(qname.hashCode) {
qname3.hashCode
}
val qname4 = QName("Bookstore")
assertResult("Bookstore") {
qname4.localPart
}
assertResult(None) {
qname4.prefixOption
}
assertResult(qname) {
qname4
}
assertResult(qname.hashCode) {
qname4.hashCode
}
val qname5 = QName("Bookstore")
assertResult("Bookstore") {
qname5.localPart
}
assertResult(None) {
qname5.prefixOption
}
assertResult(qname) {
qname5
}
assertResult(qname.hashCode) {
qname5.hashCode
}
intercept[Exception] {
UnprefixedName(null)
}
intercept[Exception] {
UnprefixedName("").validated
}
intercept[Exception] {
UnprefixedName("a:b").validated
}
intercept[Exception] {
QName.parse("").validated
}
intercept[Exception] {
QName.parse(":").validated
}
val qnOption = qname match {
case qn @ QName(None, localPart) => Some(qn)
case _ => None
}
assertResult(Some(qname)) {
qnOption
}
}
@Test def testPrefixedName(): Unit = {
val qname = PrefixedName("books", "Bookstore")
assertResult("Bookstore") {
qname.localPart
}
assertResult(Some("books")) {
qname.prefixOption
}
assertResult("books") {
qname.prefix
}
val qname2: PrefixedName = PrefixedName("books", "Bookstore")
assertResult("Bookstore") {
qname2.localPart
}
assertResult(Some("books")) {
qname2.prefixOption
}
assertResult("books") {
qname2.prefix
}
assertResult(qname) {
qname2
}
assertResult(qname.hashCode) {
qname2.hashCode
}
val qname3: PrefixedName = QName(Some("books"), "Bookstore").asInstanceOf[PrefixedName]
assertResult("Bookstore") {
qname3.localPart
}
assertResult(Some("books")) {
qname3.prefixOption
}
assertResult("books") {
qname3.prefix
}
assertResult(qname) {
qname3
}
assertResult(qname.hashCode) {
qname3.hashCode
}
val qname4 = QName("books:Bookstore").asInstanceOf[PrefixedName]
assertResult("Bookstore") {
qname4.localPart
}
assertResult(Some("books")) {
qname4.prefixOption
}
assertResult("books") {
qname4.prefix
}
assertResult(qname) {
qname4
}
assertResult(qname.hashCode) {
qname4.hashCode
}
val qname5 = QName("books:Bookstore").asInstanceOf[PrefixedName]
assertResult("Bookstore") {
qname5.localPart
}
assertResult(Some("books")) {
qname5.prefixOption
}
assertResult("books") {
qname5.prefix
}
assertResult(qname) {
qname5
}
assertResult(qname.hashCode) {
qname5.hashCode
}
intercept[Exception] {
PrefixedName(null, null)
}
intercept[Exception] {
PrefixedName(null, "b")
}
intercept[Exception] {
PrefixedName("a", null)
}
intercept[Exception] {
PrefixedName("", "").validated
}
intercept[Exception] {
PrefixedName("", "b").validated
}
intercept[Exception] {
PrefixedName("a", "").validated
}
intercept[Exception] {
PrefixedName("a:c", "b").validated
}
intercept[Exception] {
PrefixedName("a", "b:c").validated
}
intercept[Exception] {
QName.parse("a:").validated
}
intercept[Exception] {
QName.parse(":b").validated
}
val qnOption = qname match {
case qn @ QName(Some(prefix), localPart) => Some(qn)
case _ => None
}
assertResult(Some(qname)) {
qnOption
}
}
}
| EBPI/yaidom | src/test/scala/nl/ebpi/yaidom/core/QNameTest.scala | Scala | apache-2.0 | 5,297 |
package reactivemongo.api
import scala.concurrent.{ ExecutionContext, Future }
/**
* Cursor wrapper, to help to define custom cursor classes.
* @see CursorProducer
*/
trait WrappedCursor[T] extends Cursor[T] with WrappedCursorCompat[T] {
/** The underlying cursor */
protected def wrappee: Cursor[T]
final def foldBulks[A](z: => A, maxDocs: Int = -1)(suc: (A, Iterator[T]) => Cursor.State[A], err: Cursor.ErrorHandler[A])(implicit ec: ExecutionContext): Future[A] = wrappee.foldBulks(z, maxDocs)(suc, err)
final def foldBulksM[A](z: => A, maxDocs: Int = -1)(suc: (A, Iterator[T]) => Future[Cursor.State[A]], err: Cursor.ErrorHandler[A])(implicit ec: ExecutionContext): Future[A] = wrappee.foldBulksM(z, maxDocs)(suc, err)
final def foldWhile[A](z: => A, maxDocs: Int = -1)(suc: (A, T) => Cursor.State[A], err: Cursor.ErrorHandler[A])(implicit ec: ExecutionContext): Future[A] = wrappee.foldWhile(z, maxDocs)(suc, err)
final def foldWhileM[A](z: => A, maxDocs: Int = -1)(suc: (A, T) => Future[Cursor.State[A]], err: Cursor.ErrorHandler[A])(implicit ec: ExecutionContext): Future[A] = wrappee.foldWhileM(z, maxDocs)(suc, err)
final def head(implicit ec: ExecutionContext): Future[T] = wrappee.head
final def headOption(implicit ec: ExecutionContext): Future[Option[T]] = wrappee.headOption
}
| ReactiveMongo/ReactiveMongo | driver/src/main/scala/api/WrappedCursor.scala | Scala | apache-2.0 | 1,317 |
/**
* Copyright 2011 Mogoterra, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mogotest.localtunnel
import scala.util.parsing.json._
class ServerResponse(json: String)
{
private val decoded = JSON.parseFull(json).get.asInstanceOf[Map[String, Any]]
val host = decoded.getOrElse("host", null).asInstanceOf[String]
val throughPort = decoded.getOrElse("through_port", -1).asInstanceOf[Double].toInt
val banner = decoded.getOrElse("banner", null).asInstanceOf[String]
val user = decoded.getOrElse("user", null).asInstanceOf[String]
val errorMessage = decoded.getOrElse("error", null).asInstanceOf[String]
val sshHost = if (host == null) null else host.split(':').head
} | mogotest/localtunnel-jvm-client | src/main/scala/com/mogotest/localtunnel/ServerResponse.scala | Scala | apache-2.0 | 1,216 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.codegen
import java.lang.reflect.ParameterizedType
import java.lang.{Iterable => JIterable}
import org.apache.flink.api.common.typeinfo.TypeInformation
import org.apache.flink.table.api.TableConfig
import org.apache.flink.table.codegen.Indenter.toISC
import org.apache.flink.table.codegen.CodeGenUtils.newName
import org.apache.flink.table.functions.AggregateFunction
import org.apache.flink.table.functions.utils.UserDefinedFunctionUtils.{getUserDefinedMethod, signatureToString}
import org.apache.flink.table.runtime.aggregate.{GeneratedAggregations, SingleElementIterable}
/**
* A code generator for generating [[GeneratedAggregations]].
*
* @param config configuration that determines runtime behavior
* @param nullableInput input(s) can be null.
* @param input type information about the input of the Function
*/
class AggregationCodeGenerator(
config: TableConfig,
nullableInput: Boolean,
input: TypeInformation[_ <: Any])
extends CodeGenerator(config, nullableInput, input) {
/**
* Generates a [[org.apache.flink.table.runtime.aggregate.GeneratedAggregations]] that can be
* passed to a Java compiler.
*
* @param name Class name of the function.
* Does not need to be unique but has to be a valid Java class identifier.
* @param generator The code generator instance
* @param physicalInputTypes Physical input row types
* @param aggregates All aggregate functions
* @param aggFields Indexes of the input fields for all aggregate functions
* @param aggMapping The mapping of aggregates to output fields
* @param partialResults A flag defining whether final or partial results (accumulators) are set
* to the output row.
* @param fwdMapping The mapping of input fields to output fields
* @param mergeMapping An optional mapping to specify the accumulators to merge. If not set, we
* assume that both rows have the accumulators at the same position.
* @param constantFlags An optional parameter to define where to set constant boolean flags in
* the output row.
* @param outputArity The number of fields in the output row.
* @param needRetract a flag to indicate if the aggregate needs the retract method
* @param needMerge a flag to indicate if the aggregate needs the merge method
* @param needReset a flag to indicate if the aggregate needs the resetAccumulator method
*
* @return A GeneratedAggregationsFunction
*/
def generateAggregations(
name: String,
generator: CodeGenerator,
physicalInputTypes: Seq[TypeInformation[_]],
aggregates: Array[AggregateFunction[_ <: Any, _ <: Any]],
aggFields: Array[Array[Int]],
aggMapping: Array[Int],
partialResults: Boolean,
fwdMapping: Array[Int],
mergeMapping: Option[Array[Int]],
constantFlags: Option[Array[(Int, Boolean)]],
outputArity: Int,
needRetract: Boolean,
needMerge: Boolean,
needReset: Boolean)
: GeneratedAggregationsFunction = {
// get unique function name
val funcName = newName(name)
// register UDAGGs
val aggs = aggregates.map(a => generator.addReusableFunction(a))
// get java types of accumulators
val accTypeClasses = aggregates.map { a =>
a.getClass.getMethod("createAccumulator").getReturnType
}
val accTypes = accTypeClasses.map(_.getCanonicalName)
// get java classes of input fields
val javaClasses = physicalInputTypes.map(t => t.getTypeClass)
// get parameter lists for aggregation functions
val parameters = aggFields.map { inFields =>
val fields = for (f <- inFields) yield
s"(${javaClasses(f).getCanonicalName}) input.getField($f)"
fields.mkString(", ")
}
val methodSignaturesList = aggFields.map {
inFields => for (f <- inFields) yield javaClasses(f)
}
// check and validate the needed methods
aggregates.zipWithIndex.map {
case (a, i) => {
getUserDefinedMethod(a, "accumulate", Array(accTypeClasses(i)) ++ methodSignaturesList(i))
.getOrElse(
throw new CodeGenException(
s"No matching accumulate method found for AggregateFunction " +
s"'${a.getClass.getCanonicalName}'" +
s"with parameters '${signatureToString(methodSignaturesList(i))}'.")
)
if (needRetract) {
getUserDefinedMethod(a, "retract", Array(accTypeClasses(i)) ++ methodSignaturesList(i))
.getOrElse(
throw new CodeGenException(
s"No matching retract method found for AggregateFunction " +
s"'${a.getClass.getCanonicalName}'" +
s"with parameters '${signatureToString(methodSignaturesList(i))}'.")
)
}
if (needMerge) {
val methods =
getUserDefinedMethod(a, "merge", Array(accTypeClasses(i), classOf[JIterable[Any]]))
.getOrElse(
throw new CodeGenException(
s"No matching merge method found for AggregateFunction " +
s"${a.getClass.getCanonicalName}'.")
)
var iterableTypeClass = methods.getGenericParameterTypes.apply(1)
.asInstanceOf[ParameterizedType].getActualTypeArguments.apply(0)
// further extract iterableTypeClass if the accumulator has generic type
iterableTypeClass match {
case impl: ParameterizedType => iterableTypeClass = impl.getRawType
case _ =>
}
if (iterableTypeClass != accTypeClasses(i)) {
throw new CodeGenException(
s"merge method in AggregateFunction ${a.getClass.getCanonicalName} does not have " +
s"the correct Iterable type. Actually: ${iterableTypeClass.toString}. " +
s"Expected: ${accTypeClasses(i).toString}")
}
}
if (needReset) {
getUserDefinedMethod(a, "resetAccumulator", Array(accTypeClasses(i)))
.getOrElse(
throw new CodeGenException(
s"No matching resetAccumulator method found for " +
s"aggregate ${a.getClass.getCanonicalName}'.")
)
}
}
}
def genSetAggregationResults: String = {
val sig: String =
j"""
| public final void setAggregationResults(
| org.apache.flink.types.Row accs,
| org.apache.flink.types.Row output)""".stripMargin
val setAggs: String = {
for (i <- aggs.indices) yield
if (partialResults) {
j"""
| output.setField(
| ${aggMapping(i)},
| (${accTypes(i)}) accs.getField($i));""".stripMargin
} else {
j"""
| org.apache.flink.table.functions.AggregateFunction baseClass$i =
| (org.apache.flink.table.functions.AggregateFunction) ${aggs(i)};
|
| output.setField(
| ${aggMapping(i)},
| baseClass$i.getValue((${accTypes(i)}) accs.getField($i)));""".stripMargin
}
}.mkString("\\n")
j"""
|$sig {
|$setAggs
| }""".stripMargin
}
def genAccumulate: String = {
val sig: String =
j"""
| public final void accumulate(
| org.apache.flink.types.Row accs,
| org.apache.flink.types.Row input)""".stripMargin
val accumulate: String = {
for (i <- aggs.indices) yield
j"""
| ${aggs(i)}.accumulate(
| ((${accTypes(i)}) accs.getField($i)),
| ${parameters(i)});""".stripMargin
}.mkString("\\n")
j"""$sig {
|$accumulate
| }""".stripMargin
}
def genRetract: String = {
val sig: String =
j"""
| public final void retract(
| org.apache.flink.types.Row accs,
| org.apache.flink.types.Row input)""".stripMargin
val retract: String = {
for (i <- aggs.indices) yield
j"""
| ${aggs(i)}.retract(
| ((${accTypes(i)}) accs.getField($i)),
| ${parameters(i)});""".stripMargin
}.mkString("\\n")
if (needRetract) {
j"""
|$sig {
|$retract
| }""".stripMargin
} else {
j"""
|$sig {
| }""".stripMargin
}
}
def genCreateAccumulators: String = {
val sig: String =
j"""
| public final org.apache.flink.types.Row createAccumulators()
| """.stripMargin
val init: String =
j"""
| org.apache.flink.types.Row accs =
| new org.apache.flink.types.Row(${aggs.length});"""
.stripMargin
val create: String = {
for (i <- aggs.indices) yield
j"""
| accs.setField(
| $i,
| ${aggs(i)}.createAccumulator());"""
.stripMargin
}.mkString("\\n")
val ret: String =
j"""
| return accs;"""
.stripMargin
j"""$sig {
|$init
|$create
|$ret
| }""".stripMargin
}
def genSetForwardedFields: String = {
val sig: String =
j"""
| public final void setForwardedFields(
| org.apache.flink.types.Row input,
| org.apache.flink.types.Row output)
| """.stripMargin
val forward: String = {
for (i <- fwdMapping.indices if fwdMapping(i) >= 0) yield
{
j"""
| output.setField(
| $i,
| input.getField(${fwdMapping(i)}));"""
.stripMargin
}
}.mkString("\\n")
j"""$sig {
|$forward
| }""".stripMargin
}
def genSetConstantFlags: String = {
val sig: String =
j"""
| public final void setConstantFlags(org.apache.flink.types.Row output)
| """.stripMargin
val setFlags: String = if (constantFlags.isDefined) {
{
for (cf <- constantFlags.get) yield {
j"""
| output.setField(${cf._1}, ${if (cf._2) "true" else "false"});"""
.stripMargin
}
}.mkString("\\n")
} else {
""
}
j"""$sig {
|$setFlags
| }""".stripMargin
}
def genCreateOutputRow: String = {
j"""
| public final org.apache.flink.types.Row createOutputRow() {
| return new org.apache.flink.types.Row($outputArity);
| }""".stripMargin
}
def genMergeAccumulatorsPair: String = {
val mapping = mergeMapping.getOrElse(aggs.indices.toArray)
val sig: String =
j"""
| public final org.apache.flink.types.Row mergeAccumulatorsPair(
| org.apache.flink.types.Row a,
| org.apache.flink.types.Row b)
""".stripMargin
val merge: String = {
for (i <- aggs.indices) yield
j"""
| ${accTypes(i)} aAcc$i = (${accTypes(i)}) a.getField($i);
| ${accTypes(i)} bAcc$i = (${accTypes(i)}) b.getField(${mapping(i)});
| accIt$i.setElement(bAcc$i);
| ${aggs(i)}.merge(aAcc$i, accIt$i);
| a.setField($i, aAcc$i);
""".stripMargin
}.mkString("\\n")
val ret: String =
j"""
| return a;
""".stripMargin
if (needMerge) {
j"""
|$sig {
|$merge
|$ret
| }""".stripMargin
} else {
j"""
|$sig {
|$ret
| }""".stripMargin
}
}
def genMergeList: String = {
{
val singleIterableClass = classOf[SingleElementIterable[_]].getCanonicalName
for (i <- accTypes.indices) yield
j"""
| private final $singleIterableClass<${accTypes(i)}> accIt$i =
| new $singleIterableClass<${accTypes(i)}>();
""".stripMargin
}.mkString("\\n")
}
def genResetAccumulator: String = {
val sig: String =
j"""
| public final void resetAccumulator(
| org.apache.flink.types.Row accs)""".stripMargin
val reset: String = {
for (i <- aggs.indices) yield
j"""
| ${aggs(i)}.resetAccumulator(
| ((${accTypes(i)}) accs.getField($i)));""".stripMargin
}.mkString("\\n")
if (needReset) {
j"""$sig {
|$reset
| }""".stripMargin
} else {
j"""$sig {
| }""".stripMargin
}
}
val generatedAggregationsClass = classOf[GeneratedAggregations].getCanonicalName
var funcCode =
j"""
|public final class $funcName extends $generatedAggregationsClass {
|
| ${reuseMemberCode()}
| $genMergeList
| public $funcName() throws Exception {
| ${reuseInitCode()}
| }
| ${reuseConstructorCode(funcName)}
|
""".stripMargin
funcCode += genSetAggregationResults + "\\n"
funcCode += genAccumulate + "\\n"
funcCode += genRetract + "\\n"
funcCode += genCreateAccumulators + "\\n"
funcCode += genSetForwardedFields + "\\n"
funcCode += genSetConstantFlags + "\\n"
funcCode += genCreateOutputRow + "\\n"
funcCode += genMergeAccumulatorsPair + "\\n"
funcCode += genResetAccumulator + "\\n"
funcCode += "}"
GeneratedAggregationsFunction(funcName, funcCode)
}
}
| mtunique/flink | flink-libraries/flink-table/src/main/scala/org/apache/flink/table/codegen/AggregationCodeGenerator.scala | Scala | apache-2.0 | 14,664 |
import org.elkdanger.testing.MongoMocks
import org.scalatest.FunSpec
import org.scalatest.matchers.ShouldMatchers
import org.scalatest.mock.MockitoSugar
import scala.concurrent.duration._
import scala.concurrent.{Await, Future}
abstract class SpecBase extends FunSpec with MongoMocks with ShouldMatchers with MockitoSugar {
/*
* Awaits for a Future to complete, with a timeout of 5 seconds
*/
protected def await[T](result: Future[T]) = Await.result(result, 5 seconds)
}
| elkdanger/mongo-mocks | test/SpecBase.scala | Scala | mit | 485 |
package epic.features
import epic.framework.Feature
class HackyLexicalSplitFeaturizer[W]() extends SplitSpanFeaturizer[W] {
val label = s"RelativeDifference"
private val emptyArray = Array.empty[Feature]
private val theSplitNeedingAnchoring = new SplitSpanFeatureAnchoring[W] with Serializable {
def featuresForSplit(begin: Int, split: Int, end: Int): Array[Feature] = {
emptyArray
// Array(DistanceFeature(db.binnedDistance((end-split) - (split-begin)), label))
}
def featuresForSpan(begin: Int, end: Int): Array[Feature] = emptyArray
}
def anchor(w: IndexedSeq[W]): SplitSpanFeatureAnchoring[W] = {
theSplitNeedingAnchoring
}
} | langkilde/epic | src/main/scala/epic/features/HackyLexicalSplitFeaturizer.scala | Scala | apache-2.0 | 681 |
/*
* Copyright 2014 – 2015 Paul Horn
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package rx.redis.util.pool
trait PooledObject[A] {
def create(): A
def activate(obj: A): A
def passivate(obj: A): A
def destroy(obj: A): Unit
def validate(obj: A): Boolean
}
object PooledObject {
@inline def apply[A](implicit A: PooledObject[A]): PooledObject[A] = A
abstract class PooledObjectBase[A] extends PooledObject[A] {
def activate(obj: A): A = obj
def passivate(obj: A): A = obj
def destroy(obj: A): Unit = ()
def validate(obj: A): Boolean = true
}
def of[A](make: ⇒ A): PooledObject[A] = new PooledObjectBase[A] {
def create(): A = make
}
}
| knutwalker/rx-redis | modules/core/src/main/scala/rx/redis/util/pool/PooledObject.scala | Scala | apache-2.0 | 1,200 |
/**
* (c) Copyright 2012 WibiData, Inc.
*
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kiji.schema.shell.ddl
import scala.collection.mutable.ListBuffer
import scala.collection.JavaConversions._
import java.util.ArrayList
import org.kiji.schema.avro.FamilyDesc
import org.kiji.schema.avro.LocalityGroupDesc
/**
* Holds info describing a group-type column family.
*/
class GroupFamilyInfo(val name: String, val desc: Option[String], val cols: List[ColumnClause]) {
/**
* Add this new group family definition to a locality group.
* Assumes that this family name does not exist elsewhere in the layout
* (verified in AlterTableAddGroupFamilyCommand.validateArguments()).
*/
def addToLocalityGroup(locGroup: LocalityGroupDesc): Unit = {
val groupFamily = new FamilyDesc
groupFamily.setName(name)
groupFamily.setEnabled(true)
desc match {
case Some(descStr) => { groupFamily.setDescription(descStr) }
case None => { groupFamily.setDescription("") }
}
var avroCols = cols.map(c => c.toAvroColumnDesc())
groupFamily.setColumns(ListBuffer(avroCols: _*))
groupFamily.setAliases(new ArrayList[String])
locGroup.getFamilies().add(groupFamily)
}
}
| alexandre-normand/kiji-schema-shell | src/main/scala/org/kiji/schema/shell/ddl/GroupFamilyInfo.scala | Scala | apache-2.0 | 1,851 |
package org.scaladebugger.api.profiles.java.info
import com.sun.jdi._
import org.scaladebugger.api.profiles.traits.info.{InfoProducer, LocationInfo, MethodInfo, ReferenceTypeInfo}
import org.scaladebugger.api.virtualmachines.ScalaVirtualMachine
import org.scaladebugger.test.helpers.ParallelMockFunSpec
import org.scalamock.scalatest.MockFactory
import org.scalatest.{FunSpec, Matchers, ParallelTestExecution}
class JavaLocationInfoSpec extends ParallelMockFunSpec
{
private val mockNewMethodProfile = mockFunction[Method, MethodInfo]
private val mockNewReferenceTypeProfile = mockFunction[ReferenceType, ReferenceTypeInfo]
private val mockScalaVirtualMachine = mock[ScalaVirtualMachine]
private val mockInfoProducerProfile = mock[InfoProducer]
private val mockLocation = mock[Location]
private val javaLocationInfoProfile = new JavaLocationInfo(
scalaVirtualMachine = mockScalaVirtualMachine,
infoProducer = mockInfoProducerProfile,
_location = mockLocation
) {
override protected def newReferenceTypeProfile(
referenceType: ReferenceType
): ReferenceTypeInfo = mockNewReferenceTypeProfile(referenceType)
override protected def newMethodProfile(
method: Method
): MethodInfo = mockNewMethodProfile(method)
}
describe("JavaLocationInfo") {
describe("#toJavaInfo") {
it("should return a new instance of the Java profile representation") {
val expected = mock[LocationInfo]
// Get Java version of info producer
(mockInfoProducerProfile.toJavaInfo _).expects()
.returning(mockInfoProducerProfile).once()
// Create new info profile using Java version of info producer
(mockInfoProducerProfile.newLocationInfo _)
.expects(mockScalaVirtualMachine, mockLocation)
.returning(expected).once()
val actual = javaLocationInfoProfile.toJavaInfo
actual should be (expected)
}
}
describe("#isJavaInfo") {
it("should return true") {
val expected = true
val actual = javaLocationInfoProfile.isJavaInfo
actual should be (expected)
}
}
describe("#toJdiInstance") {
it("should return the JDI instance this profile instance represents") {
val expected = mockLocation
val actual = javaLocationInfoProfile.toJdiInstance
actual should be (expected)
}
}
describe("#declaringType") {
it("should return a wrapper profile for the declaring type of the location") {
val expected = mock[ReferenceTypeInfo]
val referenceType = mock[ReferenceType]
(mockLocation.declaringType _).expects()
.returning(referenceType).once()
mockNewReferenceTypeProfile.expects(referenceType)
.returning(expected).once()
val actual = javaLocationInfoProfile.declaringType
actual should be (expected)
}
}
describe("#method") {
it("should return a wrapper profile for the method of the location") {
val expected = mock[MethodInfo]
val referenceType = mock[Method]
(mockLocation.method _).expects()
.returning(referenceType).once()
mockNewMethodProfile.expects(referenceType)
.returning(expected).once()
val actual = javaLocationInfoProfile.method
actual should be (expected)
}
}
describe("#codeIndex") {
it("should return the code index of the underlying location") {
val expected = 999
(mockLocation.codeIndex _).expects().returning(expected).once()
val actual = javaLocationInfoProfile.codeIndex
actual should be (expected)
}
}
describe("#lineNumber") {
it("should return the line number of the underlying location") {
val expected = 999
(mockLocation.lineNumber: Function0[Int]).expects()
.returning(expected).once()
val actual = javaLocationInfoProfile.lineNumber
actual should be (expected)
}
}
describe("#sourceName") {
it("should return the source name of the underlying location") {
val expected = "file.scala"
(mockLocation.sourceName: Function0[String]).expects()
.returning(expected).once()
val actual = javaLocationInfoProfile.sourceName
actual should be (expected)
}
}
describe("#sourcePath") {
it("should return the source path of the underlying location") {
val expected = "path/to/file.scala"
(mockLocation.sourcePath: Function0[String]).expects()
.returning(expected).once()
val actual = javaLocationInfoProfile.sourcePath
actual should be (expected)
}
}
}
}
| ensime/scala-debugger | scala-debugger-api/src/test/scala/org/scaladebugger/api/profiles/java/info/JavaLocationInfoSpec.scala | Scala | apache-2.0 | 4,730 |
package org.mybatis.scala
import org.mybatis.scala.config.Configuration
import org.mybatis.scala.session.{Session, SessionManager}
import org.mybatis.scala.infrastructure.{BlogRepository, UserRepository}
/**
* This trait provides the feature of using databases in test cases.
*/
trait DatabaseSupport {
/**
* Executes a callback function provided by a argument of this function within a read-only database transaction.
* @param block the callback function to be executed within a database transaction.
*/
def withReadOnly(db: SessionManager)(block: Session => Unit): Unit = {
db.readOnly { implicit session =>
DatabaseSchema.prepare
block(session)
}
}
}
| tempbottle/scala-1 | mybatis-scala-core/src/test/scala/org/mybatis/scala/DatabaseSupport.scala | Scala | apache-2.0 | 693 |
package com.sksamuel.elastic4s.get
import com.sksamuel.elastic4s.testkit.ElasticSugar
import org.scalatest.{FlatSpec, Matchers}
class GetDslTest extends FlatSpec with Matchers with ElasticSugar {
"a get by id request" should "accept tuple for from" in {
val req = get id 123 from "places" -> "cities"
assert(req.build.index() === "places")
assert(req.build.`type`() === "cities")
}
it should "accept two parameters" in {
val req = get id 123 from("places", "cities")
assert(req.build.index() === "places")
assert(req.build.`type`() === "cities")
}
it should "parse slash indextype" in {
val req = get id 123 from "places/cities"
assert(req.build.index() === "places")
assert(req.build.`type`() === "cities")
}
it should "accept one field" in {
val req = get id 123 from "places/cities" fields "name"
assert(req.build.index() === "places")
assert(req.build.`type`() === "cities")
req.build.storedFields() shouldBe Array("name")
}
it should "accept multiple fields" in {
val req = get id 123 from "places/cities" fields("name", "title", "content")
assert(req.build.index() === "places")
assert(req.build.`type`() === "cities")
req.build.storedFields() shouldBe Array("name", "title", "content")
}
it should "disable fetchSource" in {
val req = get(123).from("places/cities").fetchSourceContext(false)
assert(req.build.index() === "places")
assert(req.build.`type`() === "cities")
req.build.fetchSourceContext().fetchSource shouldBe false
}
it should "should support routing" in {
val req = get(123).from("places" / "cities").routing("aroundwego")
assert(req.build.routing() === "aroundwego")
}
it should "should support parent" in {
val req = get id 123 from "places/cities" parent "whosyour"
assert(req.build.parent() === "whosyour")
}
}
| ulric260/elastic4s | elastic4s-core-tests/src/test/scala/com/sksamuel/elastic4s/get/GetDslTest.scala | Scala | apache-2.0 | 1,879 |
package ru.imho.dddmt
import ru.imho.dddmt.config.typesafe.UniverseBuilder
import ru.imho.dddmt.std.StandardParameterTypes._
import ru.imho.dddmt.core._
import Base._
import org.slf4j.LoggerFactory
import ru.imho.dddmt.core.impl.DGraph
/**
* Driver program
*
* @author VVybornov
*
*/
object Main {
val logger = LoggerFactory.getLogger("Main")
def main(args: Array[String]): Unit = {
var build = true
var dot = false
var maxCommandsPerIteration = 16
val period = YMDHRangePeriod(
YMDHParameterType.fromString(args(0)),
YMDHParameterType.fromString(args(1)))
val u = UniverseBuilder.buildUniverse(None)
DGraph.validateNSDepMap(u.nsDeps)
val dg = new DGraph(period, u.nsDeps, new DefaultWeavingPolicy(u.nsDeps))
if(dot) {
println("------------------------------------------------")
println(dg.dot)
println("------------------------------------------------")
}
def iteration: Boolean = {
val outdated = dg.newSession.leftUpTraverser.collect
logger.debug("Outdated: {}", outdated)
val commands = outdated.take(maxCommandsPerIteration).map { case (n, a) =>
try {
u.jobFactories(n.nodeSpace.id).newJob(n.nodeSpace, n.parameterValue)
} catch {
case t: NoSuchElementException => sys.error(s"No build command defined for ns `${n.nodeSpace.id}`")
}
}
true
}
}
} | IMHOVi/dddmt | dddmt-engine/src/main/scala/ru/imho/dddmt/Main.scala | Scala | apache-2.0 | 1,444 |
package com.ubirch.chain.core.actor.consumer
import com.ubirch.chain.config.ChainConfig
import com.ubirch.chain.core.actor.util.ActorTools
import com.ubirch.chain.core.actor.{ActorNames, BigchainActor}
import com.ubirch.chain.model.rest.DeviceMsgHashIn
import com.ubirch.util.json.{Json4sUtil, MyJsonProtocol}
import org.json4s.JValue
import akka.actor.{ActorLogging, Props}
import akka.camel.{CamelMessage, Consumer}
/**
* author: cvandrei
* since: 2017-06-28
*/
class DeviceDataHashInConsumer extends Consumer
with ActorTools
with ActorLogging
with MyJsonProtocol {
private implicit val _system = context.system
private val bigchainActor = context.actorOf(BigchainActor.props(), ActorNames.BIGCHAIN)
override def endpointUri: String = sqsEndpointConsumer(ChainConfig.awsSqsQueueDeviceDataHashIn)
override def receive: Receive = {
case msg: CamelMessage =>
log.debug(s"received ${msg.bodyAs[String]}")
camelMsgToDeviceDataIn(msg.body) match {
case Some(deviceMsgHash: DeviceMsgHashIn) => bigchainActor ! deviceMsgHash
case None => log.error(s"invalid json message: ${msg.body}")
}
}
override def unhandled(message: Any): Unit = {
log.error(s"received from ${context.sender().path} unknown message: ${message.toString} (${message.getClass})")
}
private def camelMsgToDeviceDataIn(body: Any): Option[DeviceMsgHashIn] = {
body match {
case txString: String =>
Json4sUtil.string2JValue(txString) match {
case Some(txJson: JValue) => txJson.extractOpt[DeviceMsgHashIn]
case _ => None
}
case _ =>
log.error(s"received invalid message body: $body")
None
}
}
}
object DeviceDataHashInConsumer {
def props(): Props = Props[DeviceDataHashInConsumer]
}
| ubirch/ubirch-chain-service | core/src/main/scala/com/ubirch/chain/core/actor/consumer/DeviceDataHashInConsumer.scala | Scala | apache-2.0 | 1,813 |
object Test extends App {
Macros.m1
Macros.m2
Macros.m3
}
| AlexSikia/dotty | tests/untried/neg/macro-quasiquotes/Test_2.scala | Scala | bsd-3-clause | 64 |
package com.sksamuel.elastic4s.http.search.aggs
import com.sksamuel.elastic4s.http.search.SearchBodyBuilderFn
import com.sksamuel.elastic4s.searches.SearchDefinition
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval
import org.scalatest.{FunSuite, Matchers}
class MaxBucketAggBuilderTest extends FunSuite with Matchers {
import com.sksamuel.elastic4s.http.ElasticDsl._
test("max bucket agg should match the spec") {
val search = SearchDefinition("myindex" / "mytype").aggs(
dateHistogramAgg("sales_per_month", "date").interval(DateHistogramInterval.MONTH).subagg(
sumAgg("sales", "price")
),
maxBucketAgg("max_monthly_sales", "sales_per_month>sales")
)
SearchBodyBuilderFn(search).string() shouldBe
"""{"aggs":{"sales_per_month":{"date_histogram":{"interval":"1M","field":"date"},"aggs":{"sales":{"sum":{"field":"price"}}}},"max_monthly_sales":{"max_bucket":{"buckets_path":"sales_per_month>sales"}}}}"""
}
}
| aroundus-inc/elastic4s | elastic4s-http/src/test/scala/com/sksamuel/elastic4s/http/search/aggs/MaxBucketAggBuilderTest.scala | Scala | apache-2.0 | 997 |
package proofpeer.proofscript.serialization
import proofpeer.proofscript.frontend._
import ParseTree._
import proofpeer.general._
import proofpeer.indent.Span
import proofpeer.proofscript.logic._
object BasicSourceSerializer extends TransformSerializer[Source, (String, String)](
PairSerializer(StringSerializer, StringSerializer),
(s : Source) => (s.namespace.toString, s.src.toString),
(n : (String, String)) => new Source(Namespace(n._1), n._2))
final class CustomizableSourceSerializer(store : UniquelyIdentifiableStore) extends UniquelyIdentifiableSerializer(
store, BasicSourceSerializer, UISTypeCodes.SOURCE)
object SpanSerializer extends TransformSerializer(VectorSerializer(IntSerializer),
(s : Span) => Vector(s.firstRow, s.lastRow, s.maxRowGap, s.leftMostInFirst, s.leftMost, s.leftMostFirst, s.leftMostRest,
s.rightMostLast, s.firstIndexIncl, s.lastIndexExcl),
(s : Vector[Int]) =>
if (s.size == 10) Span(s(0), s(1), s(2), s(3), s(4), s(5), s(6), s(7), s(8), s(9))
else throw new RuntimeException("cannot deserialize span: " + s))
final class BasicSourcePositionSerializer(SourceSerializer : Serializer[Source])
extends TransformSerializer[SourcePosition, Option[(Source, Option[Span])]](
OptionSerializer(PairSerializer(SourceSerializer, OptionSerializer(SpanSerializer))),
(p : SourcePosition) => if (p == null) None else Some((p.source, p.span)),
(p : Option[(Source, Option[Span])]) => {
p match {
case None => null
case Some((_source, _span)) => new SourcePosition { val source = _source; val span = _span }
}
})
final class CustomizableParseTreeSerializer(
SourcePositionSerializer : Serializer[SourcePosition],
IndexedNameSerializer : Serializer[IndexedName],
NamespaceSerializer : Serializer[Namespace],
NameSerializer : Serializer[Name],
TermSerializer : Serializer[Term],
TypeSerializer : Serializer[Type])
extends Serializer[TracksSourcePosition]
{
val ParseTreeSerializer = new TypecastSerializer[ParseTree, TracksSourcePosition](this)
private val TracksSourcePositionSerializer = this
val PretermSerializer = new CustomizablePretermSerializer(SourcePositionSerializer, IndexedNameSerializer,
NamespaceSerializer, NameSerializer, TermSerializer, TypeSerializer, ParseTreeSerializer)
val PretypeSerializer = PretermSerializer.PretypeSerializer
private class PTSerializer[Special <: TracksSourcePosition] extends Serializer[Special] {
def serialize(special : Special) = TracksSourcePositionSerializer.serialize(special)
def deserialize(serialized : Any) : Special = {
TracksSourcePositionSerializer.deserialize(serialized).asInstanceOf[Special]
}
}
private object StatementSerializer extends PTSerializer[Statement]
private object IdSerializer extends PTSerializer[Id]
private object BlockSerializer extends PTSerializer[Block]
private object ExprSerializer extends PTSerializer[Expr]
private object DefCaseSerializer extends PTSerializer[DefCase]
private object PatternSerializer extends PTSerializer[Pattern]
private object UnaryOperatorSerializer extends PTSerializer[UnaryOperator]
private object BinaryOperatorSerializer extends PTSerializer[BinaryOperator]
private object CmpOperatorSerializer extends PTSerializer[CmpOperator]
private object ControlFlowSerializer extends PTSerializer[ControlFlow]
private object MatchCaseSerializer extends PTSerializer[MatchCase]
private object CommentSerializer extends PTSerializer[Comment]
private object ValueTypeSerializer extends PTSerializer[ValueType]
private object DatatypeConstrSerializer extends PTSerializer[DatatypeConstr]
private object DatatypeCaseSerializer extends PTSerializer[DatatypeCase]
private object ParseTreeSerializerBase extends CaseClassSerializerBase[TracksSourcePosition] {
object Kind {
val NILEXPR = 0
val LITERALCONTEXTEXPR = 1
val BOOL = -1
val INTEGER = 2
val STRINGLITERAL = -2
val QUALIFIEDID = 3
val ID = -3
val UNARYOPERATION = 4
val BINARYOPERATION = -4
val CMPOPERATION = 5
val TUPLE = -5
val SETLITERAL = 6
val MAPLITERAL = -6
val APP = 7
val FUN = -7
val TYPECAST = 8
val LAZY = -8
val LOGICTERM = 9
val LOGICTYPE = -9
val CONTROLFLOWEXPR = 10
val DO = -10
val IF = 11
val WHILE = -11
val FOR = 12
val TIMEIT = -12
val MATCHCASE = 13
val MATCH = -13
val CONTEXTCONTROL = 14
val INCONTEXTCONTROL = -14
val INLITERALCONTEXTCONTROL = 15
val NEG = -15
val NOT = 16
val BANG = -16
val RANGETO = 17
val RANGEDOWNTO = -17
val ADD = 18
val SUB = -18
val MUL = 19
val DIV = -19
val MOD = 20
val AND = -20
val OR = 21
val PREPEND = -21
val APPEND = 22
val CONCAT = -22
val MINUS = 23
val EQ = -23
val NEQ = 24
val LE = -24
val LEQ = 25
val GR = -25
val GEQ = 26
val PANY = -26
val PID = 27
val PINT = -27
val PBOOL = 28
val PSTRING = -28
val PLOGICTERM = 29
val PLOGICTYPE = -29
val PTUPLE = 30
val PPREPEND = -30
val PAPPEND = 31
val PIF = -31
val PAS = 32
val PNIL = -32
val PNILBANG = 33
val PTYPE = -33
val PCONSTR = 34
val PDESTRUCT = -34
val TYANY = 35
val TYNIL = -35
val TYCONTEXT = 36
val TYTHEOREM = -36
val TYTERM = 37
val TYTYPE = -37
val TYBOOLEAN = 38
val TYINTEGER = -38
val TYFUNCTION = 39
val TYSTRING = -39
val TYTUPLE = 40
val TYMAP = -40
val TYSET = 41
val TYOPTION = -41
val TYUNION = 42
val TYCUSTOM = -42
val COMMENT = 43
val STCOMMENT = -43
val STEXPR = 44
val STCONTROLFLOW = -44
val STSHOW = 45
val STFAIL = -45
val STASSERT = 46
val STFAILURE = -46
val STVAL = 47
val STVALINTRO = -47
val STASSIGN = 48
val STDEF = -48
val DEFCASE = 49
val DATATYPECONSTR = -49
val DATATYPECASE = 50
val STDATATYPE = -50
val STRETURN = 51
val STASSUME = -51
val STLET = 52
val STCHOOSE = -52
val STTHEOREM = 53
val STTHEOREMBY = -53
val STTHEORY = 54
val BLOCK = -54
val FRESHQUOTE = 55
}
object Serializers {
val BOOL = BooleanSerializer
val INTEGER = BigIntSerializer
val STRINGLITERAL = VectorSerializer(IntSerializer)
val QUALIFIEDID = PairSerializer(NamespaceSerializer,StringSerializer)
val ID = StringSerializer
val UNARYOPERATION = PairSerializer(UnaryOperatorSerializer,ExprSerializer)
val BINARYOPERATION = TripleSerializer(BinaryOperatorSerializer,ExprSerializer,ExprSerializer)
val CMPOPERATION = PairSerializer(VectorSerializer(CmpOperatorSerializer),VectorSerializer(ExprSerializer))
val TUPLE = VectorSerializer(ExprSerializer)
val SETLITERAL = VectorSerializer(ExprSerializer)
val MAPLITERAL = VectorSerializer(PairSerializer(ExprSerializer, ExprSerializer))
val APP = PairSerializer(ExprSerializer,ExprSerializer)
val FUN = PairSerializer(PatternSerializer,BlockSerializer)
val TYPECAST = PairSerializer(ExprSerializer,ValueTypeSerializer)
val LAZY = ExprSerializer
val LOGICTERM = PretermSerializer
val LOGICTYPE = PretypeSerializer
val CONTROLFLOWEXPR = ControlFlowSerializer
val DO = PairSerializer(BlockSerializer,BooleanSerializer)
val IF = TripleSerializer(ExprSerializer,BlockSerializer,BlockSerializer)
val WHILE = PairSerializer(ExprSerializer,BlockSerializer)
val FOR = TripleSerializer(PatternSerializer,ExprSerializer,BlockSerializer)
val TIMEIT = BlockSerializer
val MATCHCASE = PairSerializer(PatternSerializer,BlockSerializer)
val MATCH = PairSerializer(ExprSerializer,VectorSerializer(MatchCaseSerializer))
val CONTEXTCONTROL = PairSerializer(OptionSerializer(ExprSerializer),BlockSerializer)
val INCONTEXTCONTROL = PairSerializer(OptionSerializer(ExprSerializer),BlockSerializer)
val INLITERALCONTEXTCONTROL = PairSerializer(OptionSerializer(ExprSerializer),BlockSerializer)
val PID = StringSerializer
val PINT = BigIntSerializer
val PBOOL = BooleanSerializer
val PSTRING = VectorSerializer(IntSerializer)
val PLOGICTERM = PretermSerializer
val PLOGICTYPE = PretypeSerializer
val PTUPLE = VectorSerializer(PatternSerializer)
val PPREPEND = PairSerializer(PatternSerializer,PatternSerializer)
val PAPPEND = PairSerializer(PatternSerializer,PatternSerializer)
val PIF = PairSerializer(PatternSerializer,ExprSerializer)
val PAS = PairSerializer(PatternSerializer,StringSerializer)
val PTYPE = PairSerializer(PatternSerializer,ValueTypeSerializer)
val PCONSTR = PairSerializer(NameSerializer,OptionSerializer(PatternSerializer))
val PDESTRUCT = PairSerializer(StringSerializer,PatternSerializer)
val TYOPTION = ValueTypeSerializer
val TYUNION = PairSerializer(ValueTypeSerializer,ValueTypeSerializer)
val TYCUSTOM = PairSerializer(OptionSerializer(NamespaceSerializer),StringSerializer)
val COMMENT = StringSerializer
val STCOMMENT = CommentSerializer
val STEXPR = ExprSerializer
val STCONTROLFLOW = ControlFlowSerializer
val STSHOW = ExprSerializer
val STFAIL = OptionSerializer(ExprSerializer)
val STASSERT = ExprSerializer
val STFAILURE = BlockSerializer
val STVAL = PairSerializer(PatternSerializer,BlockSerializer)
val STVALINTRO = ListSerializer(IdSerializer)
val STASSIGN = PairSerializer(PatternSerializer,BlockSerializer)
val STDEF = TripleSerializer(MapSerializer(StringSerializer,VectorSerializer(DefCaseSerializer)),BooleanSerializer,OptionSerializer(ExprSerializer))
val DEFCASE = QuadrupleSerializer(StringSerializer,PatternSerializer,OptionSerializer(ValueTypeSerializer),BlockSerializer)
val DATATYPECONSTR = PairSerializer(StringSerializer,OptionSerializer(PatternSerializer))
val DATATYPECASE = PairSerializer(StringSerializer,VectorSerializer(DatatypeConstrSerializer))
val STDATATYPE = VectorSerializer(DatatypeCaseSerializer)
val STRETURN = OptionSerializer(ExprSerializer)
val STASSUME = PairSerializer(OptionSerializer(StringSerializer),ExprSerializer)
val STLET = PairSerializer(OptionSerializer(StringSerializer),ExprSerializer)
val STCHOOSE = TripleSerializer(OptionSerializer(StringSerializer),ExprSerializer,BlockSerializer)
val STTHEOREM = TripleSerializer(OptionSerializer(StringSerializer),ExprSerializer,BlockSerializer)
val STTHEOREMBY = TripleSerializer(OptionSerializer(StringSerializer),ExprSerializer,ExprSerializer)
val STTHEORY = TripleSerializer(NamespaceSerializer,ListSerializer(PairSerializer(IdSerializer,NamespaceSerializer)),ListSerializer(NamespaceSerializer))
val BLOCK = VectorSerializer(StatementSerializer)
val FRESHQUOTE = PairSerializer(BooleanSerializer,IdSerializer)
}
def decomposeAndSerialize(obj : TracksSourcePosition) : (Int, Option[Any]) = {
obj match {
case NilExpr =>
(Kind.NILEXPR, None)
case LiteralcontextExpr =>
(Kind.LITERALCONTEXTEXPR, None)
case Bool(x) =>
(Kind.BOOL, Some(Serializers.BOOL.serialize(x)))
case Integer(x) =>
(Kind.INTEGER, Some(Serializers.INTEGER.serialize(x)))
case StringLiteral(x) =>
(Kind.STRINGLITERAL, Some(Serializers.STRINGLITERAL.serialize(x)))
case t : QualifiedId =>
(Kind.QUALIFIEDID, Some(Serializers.QUALIFIEDID.serialize(QualifiedId.unapply(t).get)))
case Id(x) =>
(Kind.ID, Some(Serializers.ID.serialize(x)))
case t : UnaryOperation =>
(Kind.UNARYOPERATION, Some(Serializers.UNARYOPERATION.serialize(UnaryOperation.unapply(t).get)))
case t : BinaryOperation =>
(Kind.BINARYOPERATION, Some(Serializers.BINARYOPERATION.serialize(BinaryOperation.unapply(t).get)))
case t : CmpOperation =>
(Kind.CMPOPERATION, Some(Serializers.CMPOPERATION.serialize(CmpOperation.unapply(t).get)))
case Tuple(x) =>
(Kind.TUPLE, Some(Serializers.TUPLE.serialize(x)))
case SetLiteral(x) =>
(Kind.SETLITERAL, Some(Serializers.SETLITERAL.serialize(x)))
case MapLiteral(x) =>
(Kind.MAPLITERAL, Some(Serializers.MAPLITERAL.serialize(x)))
case t : App =>
(Kind.APP, Some(Serializers.APP.serialize(App.unapply(t).get)))
case t : Fun =>
(Kind.FUN, Some(Serializers.FUN.serialize(Fun.unapply(t).get)))
case t : TypeCast =>
(Kind.TYPECAST, Some(Serializers.TYPECAST.serialize(TypeCast.unapply(t).get)))
case Lazy(x) =>
(Kind.LAZY, Some(Serializers.LAZY.serialize(x)))
case LogicTerm(x) =>
(Kind.LOGICTERM, Some(Serializers.LOGICTERM.serialize(x)))
case LogicType(x) =>
(Kind.LOGICTYPE, Some(Serializers.LOGICTYPE.serialize(x)))
case ControlFlowExpr(x) =>
(Kind.CONTROLFLOWEXPR, Some(Serializers.CONTROLFLOWEXPR.serialize(x)))
case t : Do =>
(Kind.DO, Some(Serializers.DO.serialize(Do.unapply(t).get)))
case t : If =>
(Kind.IF, Some(Serializers.IF.serialize(If.unapply(t).get)))
case t : While =>
(Kind.WHILE, Some(Serializers.WHILE.serialize(While.unapply(t).get)))
case t : For =>
(Kind.FOR, Some(Serializers.FOR.serialize(For.unapply(t).get)))
case Timeit(x) =>
(Kind.TIMEIT, Some(Serializers.TIMEIT.serialize(x)))
case t : MatchCase =>
(Kind.MATCHCASE, Some(Serializers.MATCHCASE.serialize(MatchCase.unapply(t).get)))
case t : Match =>
(Kind.MATCH, Some(Serializers.MATCH.serialize(Match.unapply(t).get)))
case t : ContextControl =>
(Kind.CONTEXTCONTROL, Some(Serializers.CONTEXTCONTROL.serialize(ContextControl.unapply(t).get)))
case t : InContextControl =>
(Kind.INCONTEXTCONTROL, Some(Serializers.INCONTEXTCONTROL.serialize(InContextControl.unapply(t).get)))
case t : InLiteralcontextControl =>
(Kind.INLITERALCONTEXTCONTROL, Some(Serializers.INLITERALCONTEXTCONTROL.serialize(InLiteralcontextControl.unapply(t).get)))
case Neg =>
(Kind.NEG, None)
case Not =>
(Kind.NOT, None)
case Bang =>
(Kind.BANG, None)
case RangeTo =>
(Kind.RANGETO, None)
case RangeDownto =>
(Kind.RANGEDOWNTO, None)
case Add =>
(Kind.ADD, None)
case Sub =>
(Kind.SUB, None)
case Mul =>
(Kind.MUL, None)
case Div =>
(Kind.DIV, None)
case Mod =>
(Kind.MOD, None)
case And =>
(Kind.AND, None)
case Or =>
(Kind.OR, None)
case Prepend =>
(Kind.PREPEND, None)
case Append =>
(Kind.APPEND, None)
case Concat =>
(Kind.CONCAT, None)
case Minus =>
(Kind.MINUS, None)
case Eq =>
(Kind.EQ, None)
case NEq =>
(Kind.NEQ, None)
case Le =>
(Kind.LE, None)
case Leq =>
(Kind.LEQ, None)
case Gr =>
(Kind.GR, None)
case Geq =>
(Kind.GEQ, None)
case PAny =>
(Kind.PANY, None)
case PId(x) =>
(Kind.PID, Some(Serializers.PID.serialize(x)))
case PInt(x) =>
(Kind.PINT, Some(Serializers.PINT.serialize(x)))
case PBool(x) =>
(Kind.PBOOL, Some(Serializers.PBOOL.serialize(x)))
case PString(x) =>
(Kind.PSTRING, Some(Serializers.PSTRING.serialize(x)))
case PLogicTerm(x) =>
(Kind.PLOGICTERM, Some(Serializers.PLOGICTERM.serialize(x)))
case PLogicType(x) =>
(Kind.PLOGICTYPE, Some(Serializers.PLOGICTYPE.serialize(x)))
case PTuple(x) =>
(Kind.PTUPLE, Some(Serializers.PTUPLE.serialize(x)))
case t : PPrepend =>
(Kind.PPREPEND, Some(Serializers.PPREPEND.serialize(PPrepend.unapply(t).get)))
case t : PAppend =>
(Kind.PAPPEND, Some(Serializers.PAPPEND.serialize(PAppend.unapply(t).get)))
case t : PIf =>
(Kind.PIF, Some(Serializers.PIF.serialize(PIf.unapply(t).get)))
case t : PAs =>
(Kind.PAS, Some(Serializers.PAS.serialize(PAs.unapply(t).get)))
case PNil =>
(Kind.PNIL, None)
case PNilBang =>
(Kind.PNILBANG, None)
case t : PType =>
(Kind.PTYPE, Some(Serializers.PTYPE.serialize(PType.unapply(t).get)))
case t : PConstr =>
(Kind.PCONSTR, Some(Serializers.PCONSTR.serialize(PConstr.unapply(t).get)))
case t : PDestruct =>
(Kind.PDESTRUCT, Some(Serializers.PDESTRUCT.serialize(PDestruct.unapply(t).get)))
case TyAny =>
(Kind.TYANY, None)
case TyNil =>
(Kind.TYNIL, None)
case TyContext =>
(Kind.TYCONTEXT, None)
case TyTheorem =>
(Kind.TYTHEOREM, None)
case TyTerm =>
(Kind.TYTERM, None)
case TyType =>
(Kind.TYTYPE, None)
case TyBoolean =>
(Kind.TYBOOLEAN, None)
case TyInteger =>
(Kind.TYINTEGER, None)
case TyFunction =>
(Kind.TYFUNCTION, None)
case TyString =>
(Kind.TYSTRING, None)
case TyTuple =>
(Kind.TYTUPLE, None)
case TyMap =>
(Kind.TYMAP, None)
case TySet =>
(Kind.TYSET, None)
case TyOption(x) =>
(Kind.TYOPTION, Some(Serializers.TYOPTION.serialize(x)))
case t : TyUnion =>
(Kind.TYUNION, Some(Serializers.TYUNION.serialize(TyUnion.unapply(t).get)))
case t : TyCustom =>
(Kind.TYCUSTOM, Some(Serializers.TYCUSTOM.serialize(TyCustom.unapply(t).get)))
case Comment(x) =>
(Kind.COMMENT, Some(Serializers.COMMENT.serialize(x)))
case STComment(x) =>
(Kind.STCOMMENT, Some(Serializers.STCOMMENT.serialize(x)))
case STExpr(x) =>
(Kind.STEXPR, Some(Serializers.STEXPR.serialize(x)))
case STControlFlow(x) =>
(Kind.STCONTROLFLOW, Some(Serializers.STCONTROLFLOW.serialize(x)))
case STShow(x) =>
(Kind.STSHOW, Some(Serializers.STSHOW.serialize(x)))
case STFail(x) =>
(Kind.STFAIL, Some(Serializers.STFAIL.serialize(x)))
case STAssert(x) =>
(Kind.STASSERT, Some(Serializers.STASSERT.serialize(x)))
case STFailure(x) =>
(Kind.STFAILURE, Some(Serializers.STFAILURE.serialize(x)))
case t : STVal =>
(Kind.STVAL, Some(Serializers.STVAL.serialize(STVal.unapply(t).get)))
case STValIntro(x) =>
(Kind.STVALINTRO, Some(Serializers.STVALINTRO.serialize(x)))
case t : STAssign =>
(Kind.STASSIGN, Some(Serializers.STASSIGN.serialize(STAssign.unapply(t).get)))
case t : STDef =>
(Kind.STDEF, Some(Serializers.STDEF.serialize(STDef.unapply(t).get)))
case t : DefCase =>
(Kind.DEFCASE, Some(Serializers.DEFCASE.serialize(DefCase.unapply(t).get)))
case t : DatatypeConstr =>
(Kind.DATATYPECONSTR, Some(Serializers.DATATYPECONSTR.serialize(DatatypeConstr.unapply(t).get)))
case t : DatatypeCase =>
(Kind.DATATYPECASE, Some(Serializers.DATATYPECASE.serialize(DatatypeCase.unapply(t).get)))
case STDatatype(x) =>
(Kind.STDATATYPE, Some(Serializers.STDATATYPE.serialize(x)))
case STReturn(x) =>
(Kind.STRETURN, Some(Serializers.STRETURN.serialize(x)))
case t : STAssume =>
(Kind.STASSUME, Some(Serializers.STASSUME.serialize(STAssume.unapply(t).get)))
case t : STLet =>
(Kind.STLET, Some(Serializers.STLET.serialize(STLet.unapply(t).get)))
case t : STChoose =>
(Kind.STCHOOSE, Some(Serializers.STCHOOSE.serialize(STChoose.unapply(t).get)))
case t : STTheorem =>
(Kind.STTHEOREM, Some(Serializers.STTHEOREM.serialize(STTheorem.unapply(t).get)))
case t : STTheoremBy =>
(Kind.STTHEOREMBY, Some(Serializers.STTHEOREMBY.serialize(STTheoremBy.unapply(t).get)))
case t : STTheory =>
(Kind.STTHEORY, Some(Serializers.STTHEORY.serialize(STTheory.unapply(t).get)))
case Block(x) =>
(Kind.BLOCK, Some(Serializers.BLOCK.serialize(x)))
case t : FreshQuote =>
(Kind.FRESHQUOTE, Some(Serializers.FRESHQUOTE.serialize(FreshQuote.unapply(t).get)))
case _ => throw new RuntimeException("ParseTreeSerializerBase: cannot serialize " + obj)
}
}
def deserializeAndCompose(kind : Int, args : Option[Any]) : TracksSourcePosition = {
kind match {
case Kind.NILEXPR if args.isEmpty =>
NilExpr
case Kind.LITERALCONTEXTEXPR if args.isEmpty =>
LiteralcontextExpr
case Kind.BOOL if args.isDefined =>
Bool(Serializers.BOOL.deserialize(args.get))
case Kind.INTEGER if args.isDefined =>
Integer(Serializers.INTEGER.deserialize(args.get))
case Kind.STRINGLITERAL if args.isDefined =>
StringLiteral(Serializers.STRINGLITERAL.deserialize(args.get))
case Kind.QUALIFIEDID if args.isDefined =>
QualifiedId.tupled(Serializers.QUALIFIEDID.deserialize(args.get))
case Kind.ID if args.isDefined =>
Id(Serializers.ID.deserialize(args.get))
case Kind.UNARYOPERATION if args.isDefined =>
UnaryOperation.tupled(Serializers.UNARYOPERATION.deserialize(args.get))
case Kind.BINARYOPERATION if args.isDefined =>
BinaryOperation.tupled(Serializers.BINARYOPERATION.deserialize(args.get))
case Kind.CMPOPERATION if args.isDefined =>
CmpOperation.tupled(Serializers.CMPOPERATION.deserialize(args.get))
case Kind.TUPLE if args.isDefined =>
Tuple(Serializers.TUPLE.deserialize(args.get))
case Kind.SETLITERAL if args.isDefined =>
SetLiteral(Serializers.SETLITERAL.deserialize(args.get))
case Kind.MAPLITERAL if args.isDefined =>
MapLiteral(Serializers.MAPLITERAL.deserialize(args.get))
case Kind.APP if args.isDefined =>
App.tupled(Serializers.APP.deserialize(args.get))
case Kind.FUN if args.isDefined =>
Fun.tupled(Serializers.FUN.deserialize(args.get))
case Kind.TYPECAST if args.isDefined =>
TypeCast.tupled(Serializers.TYPECAST.deserialize(args.get))
case Kind.LAZY if args.isDefined =>
Lazy(Serializers.LAZY.deserialize(args.get))
case Kind.LOGICTERM if args.isDefined =>
LogicTerm(Serializers.LOGICTERM.deserialize(args.get))
case Kind.LOGICTYPE if args.isDefined =>
LogicType(Serializers.LOGICTYPE.deserialize(args.get))
case Kind.CONTROLFLOWEXPR if args.isDefined =>
ControlFlowExpr(Serializers.CONTROLFLOWEXPR.deserialize(args.get))
case Kind.DO if args.isDefined =>
Do.tupled(Serializers.DO.deserialize(args.get))
case Kind.IF if args.isDefined =>
If.tupled(Serializers.IF.deserialize(args.get))
case Kind.WHILE if args.isDefined =>
While.tupled(Serializers.WHILE.deserialize(args.get))
case Kind.FOR if args.isDefined =>
For.tupled(Serializers.FOR.deserialize(args.get))
case Kind.TIMEIT if args.isDefined =>
Timeit(Serializers.TIMEIT.deserialize(args.get))
case Kind.MATCHCASE if args.isDefined =>
MatchCase.tupled(Serializers.MATCHCASE.deserialize(args.get))
case Kind.MATCH if args.isDefined =>
Match.tupled(Serializers.MATCH.deserialize(args.get))
case Kind.CONTEXTCONTROL if args.isDefined =>
ContextControl.tupled(Serializers.CONTEXTCONTROL.deserialize(args.get))
case Kind.INCONTEXTCONTROL if args.isDefined =>
InContextControl.tupled(Serializers.INCONTEXTCONTROL.deserialize(args.get))
case Kind.INLITERALCONTEXTCONTROL if args.isDefined =>
InLiteralcontextControl.tupled(Serializers.INLITERALCONTEXTCONTROL.deserialize(args.get))
case Kind.NEG if args.isEmpty =>
Neg
case Kind.NOT if args.isEmpty =>
Not
case Kind.BANG if args.isEmpty =>
Bang
case Kind.RANGETO if args.isEmpty =>
RangeTo
case Kind.RANGEDOWNTO if args.isEmpty =>
RangeDownto
case Kind.ADD if args.isEmpty =>
Add
case Kind.SUB if args.isEmpty =>
Sub
case Kind.MUL if args.isEmpty =>
Mul
case Kind.DIV if args.isEmpty =>
Div
case Kind.MOD if args.isEmpty =>
Mod
case Kind.AND if args.isEmpty =>
And
case Kind.OR if args.isEmpty =>
Or
case Kind.PREPEND if args.isEmpty =>
Prepend
case Kind.APPEND if args.isEmpty =>
Append
case Kind.CONCAT if args.isEmpty =>
Concat
case Kind.MINUS if args.isEmpty =>
Minus
case Kind.EQ if args.isEmpty =>
Eq
case Kind.NEQ if args.isEmpty =>
NEq
case Kind.LE if args.isEmpty =>
Le
case Kind.LEQ if args.isEmpty =>
Leq
case Kind.GR if args.isEmpty =>
Gr
case Kind.GEQ if args.isEmpty =>
Geq
case Kind.PANY if args.isEmpty =>
PAny
case Kind.PID if args.isDefined =>
PId(Serializers.PID.deserialize(args.get))
case Kind.PINT if args.isDefined =>
PInt(Serializers.PINT.deserialize(args.get))
case Kind.PBOOL if args.isDefined =>
PBool(Serializers.PBOOL.deserialize(args.get))
case Kind.PSTRING if args.isDefined =>
PString(Serializers.PSTRING.deserialize(args.get))
case Kind.PLOGICTERM if args.isDefined =>
PLogicTerm(Serializers.PLOGICTERM.deserialize(args.get))
case Kind.PLOGICTYPE if args.isDefined =>
PLogicType(Serializers.PLOGICTYPE.deserialize(args.get))
case Kind.PTUPLE if args.isDefined =>
PTuple(Serializers.PTUPLE.deserialize(args.get))
case Kind.PPREPEND if args.isDefined =>
PPrepend.tupled(Serializers.PPREPEND.deserialize(args.get))
case Kind.PAPPEND if args.isDefined =>
PAppend.tupled(Serializers.PAPPEND.deserialize(args.get))
case Kind.PIF if args.isDefined =>
PIf.tupled(Serializers.PIF.deserialize(args.get))
case Kind.PAS if args.isDefined =>
PAs.tupled(Serializers.PAS.deserialize(args.get))
case Kind.PNIL if args.isEmpty =>
PNil
case Kind.PNILBANG if args.isEmpty =>
PNilBang
case Kind.PTYPE if args.isDefined =>
PType.tupled(Serializers.PTYPE.deserialize(args.get))
case Kind.PCONSTR if args.isDefined =>
PConstr.tupled(Serializers.PCONSTR.deserialize(args.get))
case Kind.PDESTRUCT if args.isDefined =>
PDestruct.tupled(Serializers.PDESTRUCT.deserialize(args.get))
case Kind.TYANY if args.isEmpty =>
TyAny
case Kind.TYNIL if args.isEmpty =>
TyNil
case Kind.TYCONTEXT if args.isEmpty =>
TyContext
case Kind.TYTHEOREM if args.isEmpty =>
TyTheorem
case Kind.TYTERM if args.isEmpty =>
TyTerm
case Kind.TYTYPE if args.isEmpty =>
TyType
case Kind.TYBOOLEAN if args.isEmpty =>
TyBoolean
case Kind.TYINTEGER if args.isEmpty =>
TyInteger
case Kind.TYFUNCTION if args.isEmpty =>
TyFunction
case Kind.TYSTRING if args.isEmpty =>
TyString
case Kind.TYTUPLE if args.isEmpty =>
TyTuple
case Kind.TYMAP if args.isEmpty =>
TyMap
case Kind.TYSET if args.isEmpty =>
TySet
case Kind.TYOPTION if args.isDefined =>
TyOption(Serializers.TYOPTION.deserialize(args.get))
case Kind.TYUNION if args.isDefined =>
TyUnion.tupled(Serializers.TYUNION.deserialize(args.get))
case Kind.TYCUSTOM if args.isDefined =>
TyCustom.tupled(Serializers.TYCUSTOM.deserialize(args.get))
case Kind.COMMENT if args.isDefined =>
Comment(Serializers.COMMENT.deserialize(args.get))
case Kind.STCOMMENT if args.isDefined =>
STComment(Serializers.STCOMMENT.deserialize(args.get))
case Kind.STEXPR if args.isDefined =>
STExpr(Serializers.STEXPR.deserialize(args.get))
case Kind.STCONTROLFLOW if args.isDefined =>
STControlFlow(Serializers.STCONTROLFLOW.deserialize(args.get))
case Kind.STSHOW if args.isDefined =>
STShow(Serializers.STSHOW.deserialize(args.get))
case Kind.STFAIL if args.isDefined =>
STFail(Serializers.STFAIL.deserialize(args.get))
case Kind.STASSERT if args.isDefined =>
STAssert(Serializers.STASSERT.deserialize(args.get))
case Kind.STFAILURE if args.isDefined =>
STFailure(Serializers.STFAILURE.deserialize(args.get))
case Kind.STVAL if args.isDefined =>
STVal.tupled(Serializers.STVAL.deserialize(args.get))
case Kind.STVALINTRO if args.isDefined =>
STValIntro(Serializers.STVALINTRO.deserialize(args.get))
case Kind.STASSIGN if args.isDefined =>
STAssign.tupled(Serializers.STASSIGN.deserialize(args.get))
case Kind.STDEF if args.isDefined =>
STDef.tupled(Serializers.STDEF.deserialize(args.get))
case Kind.DEFCASE if args.isDefined =>
DefCase.tupled(Serializers.DEFCASE.deserialize(args.get))
case Kind.DATATYPECONSTR if args.isDefined =>
DatatypeConstr.tupled(Serializers.DATATYPECONSTR.deserialize(args.get))
case Kind.DATATYPECASE if args.isDefined =>
DatatypeCase.tupled(Serializers.DATATYPECASE.deserialize(args.get))
case Kind.STDATATYPE if args.isDefined =>
STDatatype(Serializers.STDATATYPE.deserialize(args.get))
case Kind.STRETURN if args.isDefined =>
STReturn(Serializers.STRETURN.deserialize(args.get))
case Kind.STASSUME if args.isDefined =>
STAssume.tupled(Serializers.STASSUME.deserialize(args.get))
case Kind.STLET if args.isDefined =>
STLet.tupled(Serializers.STLET.deserialize(args.get))
case Kind.STCHOOSE if args.isDefined =>
STChoose.tupled(Serializers.STCHOOSE.deserialize(args.get))
case Kind.STTHEOREM if args.isDefined =>
STTheorem.tupled(Serializers.STTHEOREM.deserialize(args.get))
case Kind.STTHEOREMBY if args.isDefined =>
STTheoremBy.tupled(Serializers.STTHEOREMBY.deserialize(args.get))
case Kind.STTHEORY if args.isDefined =>
STTheory.tupled(Serializers.STTHEORY.deserialize(args.get))
case Kind.BLOCK if args.isDefined =>
Block(Serializers.BLOCK.deserialize(args.get))
case Kind.FRESHQUOTE if args.isDefined =>
FreshQuote.tupled(Serializers.FRESHQUOTE.deserialize(args.get))
case _ => throw new RuntimeException("ParseTreeSerializerBase: cannot deserialize " + (kind, args))
}
}
}
private def decodeInt(b : Any) : Int = {
b match {
case i : Int => i
case l : Long => l.toInt
case _ => throw new RuntimeException("ParseTreeSerializer.decodeInt " + b + " failed")
}
}
def serialize(parsetree : TracksSourcePosition) = {
val (kind, args) = ParseTreeSerializerBase.decomposeAndSerialize(parsetree)
val serializedSourcePosition = SourcePositionSerializer.serialize(parsetree.sourcePosition)
args match {
case None => Vector(kind, serializedSourcePosition)
case Some(args) => Vector(kind, serializedSourcePosition, args)
}
}
def deserialize(serialized : Any) : TracksSourcePosition = {
serialized match {
case Vector(_kind, serializedSourcePosition) =>
val kind = decodeInt(_kind)
val sourcePosition = SourcePositionSerializer.deserialize(serializedSourcePosition)
val tree = ParseTreeSerializerBase.deserializeAndCompose(kind.toInt, None)
tree.sourcePosition = sourcePosition
tree
case Vector(_kind, serializedSourcePosition, args) =>
val kind = decodeInt(_kind)
val sourcePosition = SourcePositionSerializer.deserialize(serializedSourcePosition)
val tree = ParseTreeSerializerBase.deserializeAndCompose(kind.toInt, Some(args))
tree.sourcePosition = sourcePosition
tree
case _ => throw new RuntimeException("cannot deserialize parse tree: " + serialized)
}
}
}
/** This is code used to create most of the above code. It is not needed during runtime, just during programming. */
object ParseTreeSerializerGenerator {
val names : Vector[Any] = Vector(
"NilExpr",
"LiteralcontextExpr",
("Bool", "BooleanSerializer"),
("Integer", "BigIntSerializer"),
("StringLiteral", "VectorSerializer(IntSerializer)"),
("QualifiedId", "NamespaceSerializer", "StringSerializer"),
("Id", "StringSerializer"),
("UnaryOperation", "UnaryOperatorSerializer", "ExprSerializer"),
("BinaryOperation", "BinaryOperatorSerializer", "ExprSerializer", "ExprSerializer"),
("CmpOperation", "VectorSerializer(CmpOperatorSerializer)", "VectorSerializer(ExprSerializer)"),
("Tuple", "VectorSerializer(ExprSerializer)"),
("SetLiteral", "VectorSerializer(ExprSerializer)"),
("MapLiteral", "VectorSerializer(PairSerializer(ExprSerializer, ExprSerializer))"),
("App", "ExprSerializer", "ExprSerializer"),
("Fun", "PatternSerializer", "BlockSerializer"),
("TypeCast", "ExprSerializer", "ValueTypeSerializer"),
("Lazy", "ExprSerializer"),
("LogicTerm", "PretermSerializer"),
("LogicType", "PretypeSerializer"),
("ControlFlowExpr", "ControlFlowSerializer"),
("Do", "BlockSerializer", "BooleanSerializer"),
("If", "ExprSerializer", "BlockSerializer", "BlockSerializer"),
("While", "ExprSerializer", "BlockSerializer"),
("For", "PatternSerializer", "ExprSerializer", "BlockSerializer"),
("Timeit", "BlockSerializer"),
("MatchCase", "PatternSerializer", "BlockSerializer"),
("Match", "ExprSerializer", "VectorSerializer(MatchCaseSerializer)"),
("ContextControl", "OptionSerializer(ExprSerializer)", "BlockSerializer"),
("InContextControl", "OptionSerializer(ExprSerializer)", "BlockSerializer"),
("InLiteralcontextControl", "OptionSerializer(ExprSerializer)", "BlockSerializer"),
"Neg",
"Not",
"Bang",
"RangeTo",
"RangeDownto",
"Add",
"Sub",
"Mul",
"Div",
"Mod",
"And",
"Or",
"Prepend",
"Append",
"Concat",
"Minus",
"Eq",
"NEq",
"Le",
"Leq",
"Gr",
"Geq",
"PAny",
("PId", "StringSerializer"),
("PInt", "BigIntSerializer"),
("PBool", "BooleanSerializer"),
("PString", "VectorSerializer(IntSerializer)"),
("PLogicTerm", "PretermSerializer"),
("PLogicType", "PretypeSerializer"),
("PTuple", "VectorSerializer(PatternSerializer)"),
("PPrepend", "PatternSerializer", "PatternSerializer"),
("PAppend", "PatternSerializer", "PatternSerializer"),
("PIf", "PatternSerializer", "ExprSerializer"),
("PAs", "PatternSerializer", "StringSerializer"),
"PNil",
"PNilBang",
("PType", "PatternSerializer", "ValueTypeSerializer"),
("PConstr", "NameSerializer", "OptionSerializer(PatternSerializer)"),
("PDestruct", "StringSerializer", "PatternSerializer"),
"TyAny",
"TyNil",
"TyContext",
"TyTheorem",
"TyTerm",
"TyType",
"TyBoolean",
"TyInteger",
"TyFunction",
"TyString",
"TyTuple",
"TyMap",
"TySet",
("TyOption", "ValueTypeSerializer"),
("TyUnion", "ValueTypeSerializer", "ValueTypeSerializer"),
("TyCustom", "OptionSerializer(NamespaceSerializer)", "StringSerializer"),
("Comment", "StringSerializer"),
("STComment", "CommentSerializer"),
("STExpr", "ExprSerializer"),
("STControlFlow", "ControlFlowSerializer"),
("STShow", "ExprSerializer"),
("STFail", "OptionSerializer(ExprSerializer)"),
("STAssert", "ExprSerializer"),
("STFailure", "BlockSerializer"),
("STVal", "PatternSerializer", "BlockSerializer"),
("STValIntro", "ListSerializer(IdSerializer)"),
("STAssign", "PatternSerializer", "BlockSerializer"),
("STDef", "MapSerializer(StringSerializer,VectorSerializer(DefCaseSerializer))", "BooleanSerializer", "OptionSerializer(ExprSerializer)"),
("DefCase", "StringSerializer", "PatternSerializer", "OptionSerializer(ValueTypeSerializer)", "BlockSerializer"),
("DatatypeConstr", "StringSerializer", "OptionSerializer(PatternSerializer)"),
("DatatypeCase", "StringSerializer", "VectorSerializer(DatatypeConstrSerializer)"),
("STDatatype", "VectorSerializer(DatatypeCaseSerializer)"),
("STReturn", "OptionSerializer(ExprSerializer)"),
("STAssume", "OptionSerializer(StringSerializer)", "ExprSerializer"),
("STLet", "OptionSerializer(StringSerializer)", "ExprSerializer"),
("STChoose", "OptionSerializer(StringSerializer)", "ExprSerializer", "BlockSerializer"),
("STTheorem", "OptionSerializer(StringSerializer)", "ExprSerializer", "BlockSerializer"),
("STTheoremBy", "OptionSerializer(StringSerializer)", "ExprSerializer", "ExprSerializer"),
("STTheory", "NamespaceSerializer", "ListSerializer(PairSerializer(IdSerializer,NamespaceSerializer))", "ListSerializer(NamespaceSerializer)"),
("Block", "VectorSerializer(StatementSerializer)"),
("FreshQuote", "BooleanSerializer", "IdSerializer")
)
/** Rename _main to main to generate the code. */
def _main(args : Array[String]) {
val tool = new CaseClassSerializerTool("ParseTreeSerializerBase", names, "TracksSourcePosition")
print("private ")
tool.output()
}
}
| proofpeer/proofpeer-proofscript | shared/src/main/scala/proofpeer/proofscript/serialization/ParseTreeSerializer.scala | Scala | mit | 38,527 |
package scala.meta
package internal
package ast
import org.scalameta.data._
import org.scalameta.unreachable
@data class MergeException(culprits: Seq[Tree], message: String, cause: Option[Throwable] = None)
extends Exception(message, cause.orNull) with ScalametaException {
override def toString = super.toString
}
| beni55/scalameta | scalameta/trees/src/main/scala/scala/meta/internal/ast/Exceptions.scala | Scala | bsd-3-clause | 319 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql
/**
* A set of APIs for adding data sources to Spark SQL.
*/
package object sources
| bravo-zhang/spark | sql/core/src/main/scala/org/apache/spark/sql/sources/package.scala | Scala | apache-2.0 | 917 |
package com.karasiq.nanoboard.streaming
import boopickle.CompositePickler
import boopickle.Default._
import com.karasiq.nanoboard.api.NanoboardMessageData
sealed trait NanoboardEvent
object NanoboardEvent {
case class PostAdded(post: NanoboardMessageData) extends NanoboardEvent
case class PostDeleted(hash: String) extends NanoboardEvent
case class PostVerified(post: NanoboardMessageData) extends NanoboardEvent
implicit val eventPickler: CompositePickler[NanoboardEvent] = compositePickler[NanoboardEvent]
eventPickler
.addConcreteType[PostAdded]
.addConcreteType[PostDeleted]
.addConcreteType[PostVerified]
} | Karasiq/nanoboard | shared/shared/src/main/scala/com/karasiq/nanoboard/streaming/NanoboardEvent.scala | Scala | apache-2.0 | 639 |
package lettergenerator
package renderer
import scala.swing.{MainFrame, Label, CheckBox, TextField}
import scala.swing.{Dialog, ComboBox, FileChooser}
import scala.swing.event.ValueChanged
import java.io.File
import javax.swing.filechooser.FileNameExtensionFilter
/**
* the main frame. Responsible for laying out the elements
* @param medium an Interactor object
*/
class Wizard(medium: Interactor) extends MainFrame {
private val TextWidth: Int = 56
// to make the buttons, labels and textfields
private[renderer] val elementMaker = new ElementMaker()
// to arrange the interface's elements
private val elementOrganiser = new ElementOrganiser(this)
// for opening files and directories
private val csvOpener, docxOpener, dirOpener = elementMaker.makeFileChooser()
// source of letter header details
private[renderer] val (detailsLabel, detailsText, detailsButton) =
elementMaker.makeOpenFileElements("Please choose the details file with the"
+ " column names to create the letters", csvOpener, TextWidth)
// source of letter template
private[renderer] val (templateLabel, templateText, templateButton) =
elementMaker.makeOpenFileElements("Please choose the file with the "
+ " letter template", docxOpener, TextWidth)
// destination folder
private[renderer] val (destinationLabel, destinationText, destinationButton) =
elementMaker.makeOpenFileElements("Please choose a destination "
+ "folder for the letters", dirOpener, TextWidth)
// drop down box for file name column
private var textChangeFlag: String = detailsText.text
private[renderer] val fileNameLabel = elementMaker.makeLabel(" ")
private[renderer] val fileNameColumn = elementMaker.makeComboBox()
// check box to allow blank values (empty cells) in details file
private[renderer] val allowEmptyCels_ : CheckBox =
elementMaker.makeCheckBox("Allow empty cells")
// check box to check if file name is also present in template
// as a variable to be replaced
private[renderer] val fnAlsoInTemplate_ : CheckBox =
elementMaker.makeCheckBox("File name also part of letter")
private[renderer] val msg: Label = elementMaker.makeLabel("Ready")
def setLayout(title: String): Unit = {
this.title = title
fnAlsoInTemplate_.selected = false
allowEmptyCels_.selected = true
listenTo(detailsText)
reactions += { case ValueChanged(detailsText) => comboBoxRoutine() }
setPreferredFileExtensions()
elementOrganiser.organise()
}
def message(text: String): Unit = msg.text = text
def alert(text: String): Unit = Dialog.showMessage(this,text,"Alert")
def allowEmptyCells: Boolean = allowEmptyCels_.selected
def fNameColumn: String = fileNameColumn.selection.item
def fnAlsoInTemplate: Boolean = fnAlsoInTemplate_.selected
def submit(): Unit = medium.submit()
def detailsFile: String = detailsText.text
def templateFile: String = templateText.text
def destinationFolder: String = destinationText.text
private def comboBoxRoutine(): Unit = {
if (detailsFile != textChangeFlag) {
fileNameColumn.peer.setModel(
ComboBox.newConstantModel(
medium.detailsFileHeaders()))
textChangeFlag = detailsFile
fileNameColumn.selection.item = ""
if (fileNameColumn.peer.getModel.getSize > 1)
fileNameLabel.text = "Please select the column which contains "+
"the file names for the new documents"
else fileNameLabel.text = ""
}
}
def setPreferredFileExtensions(): Unit = {
csvOpener.fileFilter = (new FileNameExtensionFilter("CSV (Comma Separated Values)","csv"))
docxOpener.fileFilter = (new FileNameExtensionFilter("Word Document","docx"))
dirOpener.fileSelectionMode = FileChooser.SelectionMode.DirectoriesOnly
}
} | claudiusbr/LetterGenerator | src/main/scala/lettergenerator/renderer/Wizard.scala | Scala | mit | 3,873 |
import com.typesafe.sbt.SbtScalariform._
import scalariform.formatter.preferences._
object ScalariformSettings {
lazy val ourScalariformSettings = scalariformSettings ++ Seq(
ScalariformKeys.preferences := FormattingPreferences()
.setPreference(AlignParameters, true)
.setPreference(AlignSingleLineCaseStatements, true)
.setPreference(DoubleIndentClassDeclaration, true)
)
}
| equites-chess/equites-core | project/ScalariformSettings.scala | Scala | gpl-3.0 | 402 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.spark.util
import scala.collection.JavaConverters._
import org.apache.spark.sql.common.util.Spark2QueryTest
import org.apache.spark.sql.hive.CarbonRelation
import org.apache.spark.sql.test.Spark2TestQueryExecutor
import org.apache.spark.sql.{CarbonEnv, SparkSession}
import org.scalatest.BeforeAndAfterAll
import org.apache.carbondata.core.cache.{Cache, CacheProvider, CacheType}
import org.apache.carbondata.core.cache.dictionary.{Dictionary, DictionaryColumnUniqueIdentifier}
import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.core.util.{CarbonProperties, CarbonUtil}
/**
* Test Case for Dictionary LRU Cache.
*/
class DictionaryLRUCacheTestCase extends Spark2QueryTest with BeforeAndAfterAll {
var spark : SparkSession = null
var path : String = null
def checkDictionaryAccessCount(databaseName: String, tableName: String): Unit = {
val carbonTable = CarbonEnv.getInstance(Spark2TestQueryExecutor.spark).carbonMetaStore
.lookupRelation(Option(databaseName), tableName)(Spark2TestQueryExecutor.spark)
.asInstanceOf[CarbonRelation].carbonTable
val absoluteTableIdentifier = carbonTable.getAbsoluteTableIdentifier
val dimensions = carbonTable.getAllDimensions.asScala.toList
dimensions.foreach { dim =>
val columnIdentifier = dim.getColumnIdentifier
// Check the dictionary cache access.
val identifier: DictionaryColumnUniqueIdentifier = new DictionaryColumnUniqueIdentifier(
absoluteTableIdentifier,
columnIdentifier,
columnIdentifier.getDataType)
val isDictExists: Boolean = CarbonUtil.isFileExistsForGivenColumn(identifier)
var dictionary: Dictionary = null
if (isDictExists) {
val dictCacheReverse: Cache[DictionaryColumnUniqueIdentifier, Dictionary]
= CacheProvider.getInstance().createCache(CacheType.REVERSE_DICTIONARY)
dictionary = dictCacheReverse.get(identifier)
assert(dictionary.getAccessCount == 1)
CarbonUtil.clearDictionaryCache(dictionary)
val dictCacheForward: Cache[DictionaryColumnUniqueIdentifier, Dictionary]
= CacheProvider.getInstance().createCache(CacheType.FORWARD_DICTIONARY)
dictionary = dictCacheForward.get(identifier)
assert(dictionary.getAccessCount == 1)
CarbonUtil.clearDictionaryCache(dictionary)
}
}
}
override def beforeAll {
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.CARBON_MAX_DRIVER_LRU_CACHE_SIZE, "1")
.addProperty(CarbonCommonConstants.CARBON_MAX_EXECUTOR_LRU_CACHE_SIZE, "1")
path = s"$resourcesPath/restructure/data_2000.csv"
sql("drop table if exists carbon_new1")
sql("drop table if exists carbon_new2")
sql("drop table if exists carbon_new3")
sql("drop table if exists carbon_new4")
sql("drop table if exists carbon_new5")
sql("drop table if exists carbon_new6")
sql("drop table if exists carbon_new7")
sql("drop table if exists carbon_new8")
sql("drop table if exists carbon_new9")
sql("drop table if exists carbon_new10")
}
test("test for dictionary LRU Cache for Load Single Pass") {
sql(
"CREATE TABLE carbon_new1 (CUST_ID INT,CUST_NAME STRING,ACTIVE_EMUI_VERSION STRING, DOB " +
"TIMESTAMP, DOJ TIMESTAMP, BIGINT_COLUMN1 BIGINT,BIGINT_COLUMN2 BIGINT,DECIMAL_COLUMN1 " +
"decimal(30,10), DECIMAL_COLUMN2 DECIMAL(36,10),Double_COLUMN1 double, Double_COLUMN2 " +
"double,INTEGER_COLUMN1 INT) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES " +
"('dictionary_include'='CUST_NAME, ACTIVE_EMUI_VERSION,BIGINT_COLUMN1,Double_COLUMN1, " +
"Double_COLUMN2')")
sql(
s"LOAD DATA INPATH '$path' INTO TABLE carbon_new1 OPTIONS" +
"('DELIMITER'=',' , 'QUOTECHAR'='\\"','BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='TRUE'," +
"'FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1," +
"BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2, " +
"INTEGER_COLUMN1')")
sql(
s"LOAD DATA INPATH '$path' INTO TABLE carbon_new1 OPTIONS" +
"('DELIMITER'=',' , 'QUOTECHAR'='\\"','BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='TRUE'," +
"'FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1," +
"BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2, " +
"INTEGER_COLUMN1')")
sql(
"CREATE TABLE carbon_new2 (CUST_ID INT,CUST_NAME STRING,ACTIVE_EMUI_VERSION STRING, DOB " +
"TIMESTAMP, DOJ TIMESTAMP, BIGINT_COLUMN1 BIGINT,BIGINT_COLUMN2 BIGINT,DECIMAL_COLUMN1 " +
"decimal(30,10), DECIMAL_COLUMN2 DECIMAL(36,10),Double_COLUMN1 double, Double_COLUMN2 " +
"double,INTEGER_COLUMN1 INT) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES " +
"('dictionary_include'='CUST_NAME, ACTIVE_EMUI_VERSION,BIGINT_COLUMN1,Double_COLUMN1, " +
"Double_COLUMN2')")
sql(
s"LOAD DATA INPATH '$path' INTO TABLE carbon_new2 OPTIONS" +
"('DELIMITER'=',' , 'QUOTECHAR'='\\"','BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='TRUE'," +
"'FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1," +
"BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2, " +
"INTEGER_COLUMN1')")
sql(
s"LOAD DATA INPATH '$path' INTO TABLE carbon_new2 OPTIONS" +
"('DELIMITER'=',' , 'QUOTECHAR'='\\"','BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='TRUE'," +
"'FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1," +
"BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2, " +
"INTEGER_COLUMN1')")
checkDictionaryAccessCount("default", "carbon_new2")
}
test("test for dictionary LRU Cache for Load Non Single Pass") {
sql(
"CREATE TABLE carbon_new3 (CUST_ID INT,CUST_NAME STRING,ACTIVE_EMUI_VERSION STRING, DOB " +
"TIMESTAMP, DOJ TIMESTAMP, BIGINT_COLUMN1 BIGINT,BIGINT_COLUMN2 BIGINT,DECIMAL_COLUMN1 " +
"decimal(30,10), DECIMAL_COLUMN2 DECIMAL(36,10),Double_COLUMN1 double, Double_COLUMN2 " +
"double,INTEGER_COLUMN1 INT) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES " +
"('dictionary_include'='CUST_NAME, ACTIVE_EMUI_VERSION,BIGINT_COLUMN1,Double_COLUMN1, " +
"Double_COLUMN2')")
sql(
s"LOAD DATA INPATH '$path' INTO TABLE carbon_new3 OPTIONS" +
"('DELIMITER'=',' , 'QUOTECHAR'='\\"','BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='FALSE'," +
"'FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1," +
"BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2, " +
"INTEGER_COLUMN1')")
sql(
s"LOAD DATA INPATH '$path' INTO TABLE carbon_new3 OPTIONS" +
"('DELIMITER'=',' , 'QUOTECHAR'='\\"','BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='FALSE'," +
"'FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1," +
"BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2, " +
"INTEGER_COLUMN1')")
sql(
"CREATE TABLE carbon_new4 (CUST_ID INT,CUST_NAME STRING,ACTIVE_EMUI_VERSION STRING, DOB " +
"TIMESTAMP, DOJ TIMESTAMP, BIGINT_COLUMN1 BIGINT,BIGINT_COLUMN2 BIGINT,DECIMAL_COLUMN1 " +
"decimal(30,10), DECIMAL_COLUMN2 DECIMAL(36,10),Double_COLUMN1 double, Double_COLUMN2 " +
"double,INTEGER_COLUMN1 INT) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES " +
"('dictionary_include'='CUST_NAME, ACTIVE_EMUI_VERSION,BIGINT_COLUMN1,Double_COLUMN1, " +
"Double_COLUMN2')")
sql(
s"LOAD DATA INPATH '$path' INTO TABLE carbon_new4 OPTIONS" +
"('DELIMITER'=',' , 'QUOTECHAR'='\\"','BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='FALSE'," +
"'FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1," +
"BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2, " +
"INTEGER_COLUMN1')")
sql(
s"LOAD DATA INPATH '$path' INTO TABLE carbon_new4 OPTIONS" +
"('DELIMITER'=',' , 'QUOTECHAR'='\\"','BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='FALSE'," +
"'FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1," +
"BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2, " +
"INTEGER_COLUMN1')")
checkDictionaryAccessCount("default", "carbon_new4")
}
test("test for dictionary LRU Cache for Select On Table") {
sql(
"CREATE TABLE carbon_new5 (CUST_ID INT,CUST_NAME STRING,ACTIVE_EMUI_VERSION STRING, DOB " +
"TIMESTAMP, DOJ TIMESTAMP, BIGINT_COLUMN1 BIGINT,BIGINT_COLUMN2 BIGINT,DECIMAL_COLUMN1 " +
"decimal(30,10), DECIMAL_COLUMN2 DECIMAL(36,10),Double_COLUMN1 double, Double_COLUMN2 " +
"double,INTEGER_COLUMN1 INT) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES " +
"('dictionary_include'='CUST_NAME, ACTIVE_EMUI_VERSION,BIGINT_COLUMN1,Double_COLUMN1, " +
"Double_COLUMN2')")
sql(
s"LOAD DATA INPATH '$path' INTO TABLE carbon_new5 OPTIONS" +
"('DELIMITER'=',' , 'QUOTECHAR'='\\"','BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='FALSE'," +
"'FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1," +
"BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2, " +
"INTEGER_COLUMN1')")
sql(
s"LOAD DATA INPATH '$path' INTO TABLE carbon_new5 OPTIONS" +
"('DELIMITER'=',' , 'QUOTECHAR'='\\"','BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='FALSE'," +
"'FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1," +
"BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2, " +
"INTEGER_COLUMN1')")
sql("select * from carbon_new5")
checkDictionaryAccessCount("default", "carbon_new5")
sql(
"CREATE TABLE carbon_new6 (CUST_ID INT,CUST_NAME STRING,ACTIVE_EMUI_VERSION STRING, DOB " +
"TIMESTAMP, DOJ TIMESTAMP, BIGINT_COLUMN1 BIGINT,BIGINT_COLUMN2 BIGINT,DECIMAL_COLUMN1 " +
"decimal(30,10), DECIMAL_COLUMN2 DECIMAL(36,10),Double_COLUMN1 double, Double_COLUMN2 " +
"double,INTEGER_COLUMN1 INT) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES " +
"('dictionary_include'='CUST_NAME, ACTIVE_EMUI_VERSION,BIGINT_COLUMN1,Double_COLUMN1, " +
"Double_COLUMN2')")
sql(
s"LOAD DATA INPATH '$path' INTO TABLE carbon_new6 OPTIONS" +
"('DELIMITER'=',' , 'QUOTECHAR'='\\"','BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='FALSE'," +
"'FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1," +
"BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2, " +
"INTEGER_COLUMN1')")
sql(
s"LOAD DATA INPATH '$path' INTO TABLE carbon_new6 OPTIONS" +
"('DELIMITER'=',' , 'QUOTECHAR'='\\"','BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='FALSE'," +
"'FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1," +
"BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2, " +
"INTEGER_COLUMN1')")
sql("select * from carbon_new6")
checkDictionaryAccessCount("default", "carbon_new6")
}
test("test for dictionary LRU Cache for Select With Filter On Table") {
sql(
"CREATE TABLE carbon_new7 (CUST_ID INT,CUST_NAME STRING,ACTIVE_EMUI_VERSION STRING, DOB " +
"TIMESTAMP, DOJ TIMESTAMP, BIGINT_COLUMN1 BIGINT,BIGINT_COLUMN2 BIGINT,DECIMAL_COLUMN1 " +
"decimal(30,10), DECIMAL_COLUMN2 DECIMAL(36,10),Double_COLUMN1 double, Double_COLUMN2 " +
"double,INTEGER_COLUMN1 INT) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES " +
"('dictionary_include'='CUST_NAME, ACTIVE_EMUI_VERSION,BIGINT_COLUMN1,Double_COLUMN1, " +
"Double_COLUMN2')")
sql(
s"LOAD DATA INPATH '$path' INTO TABLE carbon_new7 OPTIONS" +
"('DELIMITER'=',' , 'QUOTECHAR'='\\"','BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='FALSE'," +
"'FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1," +
"BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2, " +
"INTEGER_COLUMN1')")
sql(
s"LOAD DATA INPATH '$path' INTO TABLE carbon_new7 OPTIONS" +
"('DELIMITER'=',' , 'QUOTECHAR'='\\"','BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='FALSE'," +
"'FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1," +
"BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2, " +
"INTEGER_COLUMN1')")
sql("select * from carbon_new7 where CUST_ID > 10")
checkDictionaryAccessCount("default", "carbon_new7")
sql(
"CREATE TABLE carbon_new8 (CUST_ID INT,CUST_NAME STRING,ACTIVE_EMUI_VERSION STRING, DOB " +
"TIMESTAMP, DOJ TIMESTAMP, BIGINT_COLUMN1 BIGINT,BIGINT_COLUMN2 BIGINT,DECIMAL_COLUMN1 " +
"decimal(30,10), DECIMAL_COLUMN2 DECIMAL(36,10),Double_COLUMN1 double, Double_COLUMN2 " +
"double,INTEGER_COLUMN1 INT) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES " +
"('dictionary_include'='CUST_NAME, ACTIVE_EMUI_VERSION,BIGINT_COLUMN1,Double_COLUMN1, " +
"Double_COLUMN2')")
sql(
s"LOAD DATA INPATH '$path' INTO TABLE carbon_new8 OPTIONS" +
"('DELIMITER'=',' , 'QUOTECHAR'='\\"','BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='FALSE'," +
"'FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1," +
"BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2, " +
"INTEGER_COLUMN1')")
sql(
s"LOAD DATA INPATH '$path' INTO TABLE carbon_new8 OPTIONS" +
"('DELIMITER'=',' , 'QUOTECHAR'='\\"','BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='FALSE'," +
"'FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1," +
"BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2, " +
"INTEGER_COLUMN1')")
sql("select * from carbon_new8 where CUST_ID > 100")
checkDictionaryAccessCount("default", "carbon_new8")
}
test("test for dictionary LRU Cache for Insert Into") {
sql(
"CREATE TABLE carbon_new9 (CUST_ID INT,CUST_NAME STRING,ACTIVE_EMUI_VERSION STRING, DOB " +
"TIMESTAMP, DOJ TIMESTAMP, BIGINT_COLUMN1 BIGINT,BIGINT_COLUMN2 BIGINT,DECIMAL_COLUMN1 " +
"decimal(30,10), DECIMAL_COLUMN2 DECIMAL(36,10),Double_COLUMN1 double, Double_COLUMN2 " +
"double,INTEGER_COLUMN1 INT) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES " +
"('dictionary_include'='CUST_NAME, ACTIVE_EMUI_VERSION,BIGINT_COLUMN1,Double_COLUMN1, " +
"Double_COLUMN2')")
sql(
s"LOAD DATA INPATH '$path' INTO TABLE carbon_new9 OPTIONS" +
"('DELIMITER'=',' , 'QUOTECHAR'='\\"','BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='FALSE'," +
"'FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1," +
"BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2, " +
"INTEGER_COLUMN1')")
sql(
s"LOAD DATA INPATH '$path' INTO TABLE carbon_new9 OPTIONS" +
"('DELIMITER'=',' , 'QUOTECHAR'='\\"','BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='FALSE'," +
"'FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1," +
"BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2, " +
"INTEGER_COLUMN1')")
sql("select * from carbon_new9 where CUST_ID > 10")
checkDictionaryAccessCount("default", "carbon_new9")
sql(
"CREATE TABLE carbon_new10 (CUST_ID INT,CUST_NAME STRING,ACTIVE_EMUI_VERSION STRING, DOB " +
"TIMESTAMP, DOJ TIMESTAMP, BIGINT_COLUMN1 BIGINT,BIGINT_COLUMN2 BIGINT,DECIMAL_COLUMN1 " +
"decimal(30,10), DECIMAL_COLUMN2 DECIMAL(36,10),Double_COLUMN1 double, Double_COLUMN2 " +
"double,INTEGER_COLUMN1 INT) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES " +
"('dictionary_include'='CUST_NAME')")
sql("insert into carbon_new10 select * from carbon_new9")
checkDictionaryAccessCount("default", "carbon_new10")
sql(
s"LOAD DATA INPATH '$path' INTO TABLE carbon_new10 OPTIONS" +
"('DELIMITER'=',' , 'QUOTECHAR'='\\"','BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='FALSE'," +
"'FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1," +
"BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2, " +
"INTEGER_COLUMN1')")
checkDictionaryAccessCount("default", "carbon_new10")
}
override def afterAll {
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.CARBON_MAX_DRIVER_LRU_CACHE_SIZE,
CarbonCommonConstants.CARBON_MAX_LRU_CACHE_SIZE_DEFAULT)
.addProperty(CarbonCommonConstants.CARBON_MAX_EXECUTOR_LRU_CACHE_SIZE,
CarbonCommonConstants.CARBON_MAX_LRU_CACHE_SIZE_DEFAULT)
sql("drop table if exists carbon_new1")
sql("drop table if exists carbon_new2")
sql("drop table if exists carbon_new3")
sql("drop table if exists carbon_new4")
sql("drop table if exists carbon_new5")
sql("drop table if exists carbon_new6")
sql("drop table if exists carbon_new7")
sql("drop table if exists carbon_new8")
sql("drop table if exists carbon_new9")
sql("drop table if exists carbon_new10")
}
}
| manishgupta88/carbondata | integration/spark2/src/test/scala/org/apache/carbondata/spark/util/DictionaryLRUCacheTestCase.scala | Scala | apache-2.0 | 18,483 |
/* __ *\\
** ________ ___ / / ___ Scala API **
** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\\___/_/ |_/____/_/ | | **
** |/ **
\\* */
package scala
package reflect
import scala.collection.mutable.{ WrappedArray, ArrayBuilder }
import java.lang.{ Class => jClass }
@deprecated("use scala.reflect.ClassTag instead", "2.10.0")
trait ClassManifestDeprecatedApis[T] extends OptManifest[T] {
self: ClassManifest[T] =>
// Still in use in target test.junit.comp.
@deprecated("use runtimeClass instead", "2.10.0")
def erasure: jClass[_] = runtimeClass
private def subtype(sub: jClass[_], sup: jClass[_]): Boolean = {
def loop(left: Set[jClass[_]], seen: Set[jClass[_]]): Boolean = {
left.nonEmpty && {
val next = left.head
val supers = next.getInterfaces.toSet ++ Option(next.getSuperclass)
supers(sup) || {
val xs = left ++ supers filterNot seen
loop(xs - next, seen + next)
}
}
}
loop(Set(sub), Set())
}
private def subargs(args1: List[OptManifest[_]], args2: List[OptManifest[_]]) = (args1 corresponds args2) {
// !!! [Martin] this is wrong, need to take variance into account
case (x: ClassManifest[_], y: ClassManifest[_]) => x <:< y
case (x, y) => (x eq NoManifest) && (y eq NoManifest)
}
/** Tests whether the type represented by this manifest is a subtype
* of the type represented by `that` manifest, subject to the limitations
* described in the header.
*/
@deprecated("use scala.reflect.runtime.universe.TypeTag for subtype checking instead", "2.10.0")
def <:<(that: ClassManifest[_]): Boolean = {
// All types which could conform to these types will override <:<.
def cannotMatch = {
import Manifest._
that.isInstanceOf[AnyValManifest[_]] || (that eq AnyVal) || (that eq Nothing) || (that eq Null)
}
// This is wrong, and I don't know how it can be made right
// without more development of Manifests, due to arity-defying
// relationships like:
//
// List[String] <: AnyRef
// Map[Int, Int] <: Iterable[(Int, Int)]
//
// Given the manifest for Map[A, B] how do I determine that a
// supertype has single type argument (A, B) ? I don't see how we
// can say whether X <:< Y when type arguments are involved except
// when the erasure is the same, even before considering variance.
!cannotMatch && {
// this part is wrong for not considering variance
if (this.runtimeClass == that.runtimeClass)
subargs(this.typeArguments, that.typeArguments)
// this part is wrong for punting unless the rhs has no type
// arguments, but it's better than a blindfolded pinata swing.
else
that.typeArguments.isEmpty && subtype(this.runtimeClass, that.runtimeClass)
}
}
/** Tests whether the type represented by this manifest is a supertype
* of the type represented by `that` manifest, subject to the limitations
* described in the header.
*/
@deprecated("use scala.reflect.runtime.universe.TypeTag for subtype checking instead", "2.10.0")
def >:>(that: ClassManifest[_]): Boolean =
that <:< this
override def canEqual(other: Any) = other match {
case _: ClassManifest[_] => true
case _ => false
}
protected def arrayClass[T](tp: jClass[_]): jClass[Array[T]] =
java.lang.reflect.Array.newInstance(tp, 0).getClass.asInstanceOf[jClass[Array[T]]]
@deprecated("use wrap instead", "2.10.0")
def arrayManifest: ClassManifest[Array[T]] =
ClassManifest.classType[Array[T]](arrayClass[T](runtimeClass), this)
override def newArray(len: Int): Array[T] =
java.lang.reflect.Array.newInstance(runtimeClass, len).asInstanceOf[Array[T]]
@deprecated("use wrap.newArray instead", "2.10.0")
def newArray2(len: Int): Array[Array[T]] =
java.lang.reflect.Array.newInstance(arrayClass[T](runtimeClass), len)
.asInstanceOf[Array[Array[T]]]
@deprecated("use wrap.wrap.newArray instead", "2.10.0")
def newArray3(len: Int): Array[Array[Array[T]]] =
java.lang.reflect.Array.newInstance(arrayClass[Array[T]](arrayClass[T](runtimeClass)), len)
.asInstanceOf[Array[Array[Array[T]]]]
@deprecated("use wrap.wrap.wrap.newArray instead", "2.10.0")
def newArray4(len: Int): Array[Array[Array[Array[T]]]] =
java.lang.reflect.Array.newInstance(arrayClass[Array[Array[T]]](arrayClass[Array[T]](arrayClass[T](runtimeClass))), len)
.asInstanceOf[Array[Array[Array[Array[T]]]]]
@deprecated("use wrap.wrap.wrap.wrap.newArray instead", "2.10.0")
def newArray5(len: Int): Array[Array[Array[Array[Array[T]]]]] =
java.lang.reflect.Array.newInstance(arrayClass[Array[Array[Array[T]]]](arrayClass[Array[Array[T]]](arrayClass[Array[T]](arrayClass[T](runtimeClass)))), len)
.asInstanceOf[Array[Array[Array[Array[Array[T]]]]]]
@deprecated("create WrappedArray directly instead", "2.10.0")
def newWrappedArray(len: Int): WrappedArray[T] =
// it's safe to assume T <: AnyRef here because the method is overridden for all value type manifests
new WrappedArray.ofRef[T with AnyRef](newArray(len).asInstanceOf[Array[T with AnyRef]]).asInstanceOf[WrappedArray[T]]
@deprecated("use ArrayBuilder.make(this) instead", "2.10.0")
def newArrayBuilder(): ArrayBuilder[T] =
// it's safe to assume T <: AnyRef here because the method is overridden for all value type manifests
new ArrayBuilder.ofRef[T with AnyRef]()(this.asInstanceOf[ClassManifest[T with AnyRef]]).asInstanceOf[ArrayBuilder[T]]
@deprecated("use scala.reflect.runtime.universe.TypeTag to capture type structure instead", "2.10.0")
def typeArguments: List[OptManifest[_]] = List()
protected def argString =
if (typeArguments.nonEmpty) typeArguments.mkString("[", ", ", "]")
else if (runtimeClass.isArray) "["+ClassManifest.fromClass(runtimeClass.getComponentType)+"]"
else ""
}
/** `ClassManifestFactory` defines factory methods for manifests.
* It is intended for use by the compiler and should not be used in client code.
*
* Unlike `ClassManifest`, this factory isn't annotated with a deprecation warning.
* This is done to prevent avalanches of deprecation warnings in the code that calls methods with manifests.
*
* In a perfect world, we would just remove the @deprecated annotation from `ClassManifest` the object
* and then delete it in 2.11. After all, that object is explicitly marked as internal, so noone should use it.
* However a lot of existing libraries disregarded the scaladoc that comes with `ClassManifest`,
* so we need to somehow nudge them into migrating prior to removing stuff out of the blue.
* Hence we've introduced this design decision as the lesser of two evils.
*/
object ClassManifestFactory {
val Byte = ManifestFactory.Byte
val Short = ManifestFactory.Short
val Char = ManifestFactory.Char
val Int = ManifestFactory.Int
val Long = ManifestFactory.Long
val Float = ManifestFactory.Float
val Double = ManifestFactory.Double
val Boolean = ManifestFactory.Boolean
val Unit = ManifestFactory.Unit
val Any = ManifestFactory.Any
val Object = ManifestFactory.Object
val AnyVal = ManifestFactory.AnyVal
val Nothing = ManifestFactory.Nothing
val Null = ManifestFactory.Null
def fromClass[T](clazz: jClass[T]): ClassManifest[T] = clazz match {
case java.lang.Byte.TYPE => Byte.asInstanceOf[ClassManifest[T]]
case java.lang.Short.TYPE => Short.asInstanceOf[ClassManifest[T]]
case java.lang.Character.TYPE => Char.asInstanceOf[ClassManifest[T]]
case java.lang.Integer.TYPE => Int.asInstanceOf[ClassManifest[T]]
case java.lang.Long.TYPE => Long.asInstanceOf[ClassManifest[T]]
case java.lang.Float.TYPE => Float.asInstanceOf[ClassManifest[T]]
case java.lang.Double.TYPE => Double.asInstanceOf[ClassManifest[T]]
case java.lang.Boolean.TYPE => Boolean.asInstanceOf[ClassManifest[T]]
case java.lang.Void.TYPE => Unit.asInstanceOf[ClassManifest[T]]
case _ => classType[T with AnyRef](clazz).asInstanceOf[ClassManifest[T]]
}
def singleType[T <: AnyRef](value: AnyRef): Manifest[T] = Manifest.singleType(value)
/** ClassManifest for the class type `clazz`, where `clazz` is
* a top-level or static class.
* @note This no-prefix, no-arguments case is separate because we
* it's called from ScalaRunTime.boxArray itself. If we
* pass varargs as arrays into this, we get an infinitely recursive call
* to boxArray. (Besides, having a separate case is more efficient)
*/
def classType[T](clazz: jClass[_]): ClassManifest[T] =
new ClassTypeManifest[T](None, clazz, Nil)
/** ClassManifest for the class type `clazz[args]`, where `clazz` is
* a top-level or static class and `args` are its type arguments */
def classType[T](clazz: jClass[_], arg1: OptManifest[_], args: OptManifest[_]*): ClassManifest[T] =
new ClassTypeManifest[T](None, clazz, arg1 :: args.toList)
/** ClassManifest for the class type `clazz[args]`, where `clazz` is
* a class with non-package prefix type `prefix` and type arguments `args`.
*/
def classType[T](prefix: OptManifest[_], clazz: jClass[_], args: OptManifest[_]*): ClassManifest[T] =
new ClassTypeManifest[T](Some(prefix), clazz, args.toList)
def arrayType[T](arg: OptManifest[_]): ClassManifest[Array[T]] = arg match {
case NoManifest => Object.asInstanceOf[ClassManifest[Array[T]]]
case m: ClassManifest[_] => m.asInstanceOf[ClassManifest[T]].arrayManifest
}
@SerialVersionUID(1L)
private class AbstractTypeClassManifest[T](prefix: OptManifest[_], name: String, clazz: jClass[_], args: OptManifest[_]*) extends ClassManifest[T] {
override def runtimeClass = clazz
override val typeArguments = args.toList
override def toString = prefix.toString+"#"+name+argString
}
/** ClassManifest for the abstract type `prefix # name`. `upperBound` is not
* strictly necessary as it could be obtained by reflection. It was
* added so that erasure can be calculated without reflection. */
def abstractType[T](prefix: OptManifest[_], name: String, clazz: jClass[_], args: OptManifest[_]*): ClassManifest[T] =
new AbstractTypeClassManifest(prefix, name, clazz)
/** ClassManifest for the abstract type `prefix # name`. `upperBound` is not
* strictly necessary as it could be obtained by reflection. It was
* added so that erasure can be calculated without reflection.
* todo: remove after next bootstrap
*/
def abstractType[T](prefix: OptManifest[_], name: String, upperbound: ClassManifest[_], args: OptManifest[_]*): ClassManifest[T] =
new AbstractTypeClassManifest(prefix, name, upperbound.runtimeClass)
}
/** Manifest for the class type `clazz[args]`, where `clazz` is
* a top-level or static class */
@SerialVersionUID(1L)
private class ClassTypeManifest[T](
prefix: Option[OptManifest[_]],
val runtimeClass: jClass[_],
override val typeArguments: List[OptManifest[_]]) extends ClassManifest[T]
{
override def toString =
(if (prefix.isEmpty) "" else prefix.get.toString+"#") +
(if (runtimeClass.isArray) "Array" else runtimeClass.getName) +
argString
}
| slothspot/scala | src/library/scala/reflect/ClassManifestDeprecatedApis.scala | Scala | bsd-3-clause | 11,700 |
/*
* Copyright 2015 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.computations
import uk.gov.hmrc.ct.box.{CtBoxIdentifier, CtOptionalBoolean, Input}
case class CPQ20(value: Option[Boolean]) extends CtBoxIdentifier(name = "Is the company claiming to carry losses of this period back to an earlier period") with CtOptionalBoolean with Input
| keithhall/ct-calculations | src/main/scala/uk/gov/hmrc/ct/computations/CPQ20.scala | Scala | apache-2.0 | 903 |
package com.softwaremill.play24.modules
import com.softwaremill.play24.dao.{CoffeeDao, SupplierDao}
import org.specs2.mock.Mockito
trait MockDaoModule extends Mockito {
lazy val coffeeDoa = mock[CoffeeDao]
lazy val supplierDoa = mock[SupplierDao]
}
| guersam/macwire | examples/play24/test/com/softwaremill/play24/modules/MockDaoModule.scala | Scala | apache-2.0 | 255 |
/*
* Copyright 2014–2018 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.qscript.qsu
import slamdata.Predef._
import quasar.{Qspec, TreeMatchers}
import quasar.ejson.{EJson, Fixed}
import quasar.fp._
import quasar.qscript.{construction, MapFuncsCore}
import quasar.qscript.provenance.JoinKeys
import quasar.Planner.PlannerError
import matryoshka.data.Fix
import pathy.Path
import pathy.Path.Sandboxed
import scalaz.{EitherT, INil, Need, StateT}
object ReifyAutoJoinSpecs extends Qspec with TreeMatchers with QSUTTypes[Fix] {
import QSUGraph.Extractors._
import ApplyProvenance.AuthenticatedQSU
type F[A] = EitherT[StateT[Need, Long, ?], PlannerError, A]
val qsu = QScriptUniform.DslT[Fix]
val func = construction.Func[Fix]
val J = Fixed[Fix[EJson]]
val afile1 = Path.rootDir[Sandboxed] </> Path.file("afile")
val afile2 = Path.rootDir[Sandboxed] </> Path.file("afile2")
val afile3 = Path.rootDir[Sandboxed] </> Path.file("afile3")
"autojoin reification" >> {
"reify an autojoin2" >> {
val qgraph = QSUGraph.fromTree[Fix](
qsu.autojoin2((
qsu.map(
qsu.read(afile1),
func.ProjectKeyS(func.Hole, "foo")),
qsu.map(
qsu.read(afile2),
func.ProjectKeyS(func.Hole, "bar")),
_(MapFuncsCore.Add(_, _)))))
runOn(qgraph) must beLike {
case QSAutoJoin(
Map(Read(`afile1`), fmL),
Map(Read(`afile2`), fmR),
JoinKeys(INil()),
fmCombiner) =>
fmL must beTreeEqual(
func.ProjectKeyS(func.Hole, "foo"))
fmR must beTreeEqual(
func.ProjectKeyS(func.Hole, "bar"))
fmCombiner must beTreeEqual(
func.Add(func.LeftSide, func.RightSide))
}
}
"reify an autojoin3" >> {
val qgraph = QSUGraph.fromTree[Fix](
qsu._autojoin3((
qsu.map(
qsu.read(afile1),
func.ProjectKeyS(func.Hole, "foo")),
qsu.map(
qsu.read(afile2),
func.ProjectKeyS(func.Hole, "bar")),
qsu.map(
qsu.read(afile3),
func.ProjectKeyS(func.Hole, "baz")),
func.Subtract(func.Add(func.LeftSide3, func.RightSide3), func.Center))))
runOn(qgraph) must beLike {
case QSAutoJoin(
QSAutoJoin(
Map(Read(`afile1`), fmL),
Map(Read(`afile2`), fmC),
JoinKeys(INil()),
fmInner),
Map(Read(`afile3`), fmR),
JoinKeys(INil()),
fmOuter) =>
fmL must beTreeEqual(
func.ProjectKeyS(func.Hole, "foo"))
fmC must beTreeEqual(
func.ProjectKeyS(func.Hole, "bar"))
fmR must beTreeEqual(
func.ProjectKeyS(func.Hole, "baz"))
fmInner must beTreeEqual(
func.ConcatMaps(
func.MakeMap(func.Constant(J.str("leftAccess1")), func.LeftSide),
func.MakeMap(func.Constant(J.str("centerAccess2")), func.RightSide)))
fmOuter must beTreeEqual(
func.Subtract(
func.Add(
func.ProjectKey(func.LeftSide, func.Constant(J.str("leftAccess1"))),
func.RightSide),
func.ProjectKey(func.LeftSide, func.Constant(J.str("centerAccess2")))))
}
}
}
def runOn(qgraph: QSUGraph): QSUGraph =
runOn_(qgraph).graph
def runOn_(qgraph: QSUGraph): AuthenticatedQSU[Fix] = {
val resultsF = for {
prov <- ApplyProvenance[Fix, F](qgraph)
back <- ReifyAutoJoins[Fix, F](prov)
} yield back
val results = resultsF.run.eval(0L).value.toEither
results must beRight
results.right.get
}
}
| jedesah/Quasar | connector/src/test/scala/quasar/qscript/qsu/ReifyAutoJoinsSpec.scala | Scala | apache-2.0 | 4,238 |
/*
* MOIS: Ordinary Differential Equations (Compat)
* Copyright (C) 2014 University of Edinburgh School of Informatics
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package uk.ac.ed.inf.mois.ode
import uk.ac.ed.inf.mois.Process
abstract class ODE[T, D] extends Process with ODEBase[T, D] with ODESyntax[T, D]
| edinburgh-rbm/mois | src/main/scala/uk/ac/ed/inf/mois/ode/Compat.scala | Scala | gpl-3.0 | 932 |
sealed trait Option[+A]
case object None extends Option[Nothing]
case class Some[A](a: A) extends Option[A] | hmemcpy/milewski-ctfp-pdf | src/content/3.14/code/scala/snippet04.scala | Scala | gpl-3.0 | 107 |