code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
package example.project.main
trait ClearingHouse
case object LchScm extends ClearingHouse
case object LchFcm extends ClearingHouse | agmenc/doc-jockey | src/test/scala/example/project/main/ClearingHouse.scala | Scala | mit | 132 |
/**
* Created by faganp on 3/19/15.
*/
import java.util.Calendar
import com.typesafe.config.ConfigFactory
//import org.apache.spark.sql.catalyst.types.{StringType, StructField, StructType} // spark 1.2 codeline
//import java.util.Properties
//import _root_.kafka.producer.Producer
//import org.apache.spark.storage.StorageLevel
import org.apache.spark.{SparkConf, SparkContext}
//import org.apache.spark.serializer.KryoSerializer
//import org.elasticsearch.spark.rdd.EsSpark
//import org.apache.spark.sql.SQLContext
//import org.apache.spark.streaming.kafka.KafkaUtils
import java.text.SimpleDateFormat
//import org.elasticsearch.spark._
//import org.apache.spark.SparkContext._
//import org.apache.spark.sql._
//import org.elasticsearch.spark.sql._
import com.google.common.net.InetAddresses
//import util.Properties
/**
* Created by faganpe on 17/03/15.
*/
object RandomNetflowGenBowen extends Serializable {
def getIPGenRand(randNum: Int): String = {
// val r = scala.util.Random
if (randNum % 2 == 0) getIPRand()
else getIPAddressSkew("132.146.5")
// getIPAddressSkew("132.146.5")
}
//
// /* End of the random generation values used to influence the data that is produced */
//
def getIPAddressSkew(IPSubnet: String): String = {
val r = scala.util.Random
val dotCount = IPSubnet.count(_ == '.') // let's count the number of dots
if (dotCount == 3) IPSubnet // return the complete IP address without making anything up
else if (dotCount == 2) IPSubnet + "." + r.nextInt(255)
else if (dotCount == 1) IPSubnet + "." + r.nextInt(255) + "." + r.nextInt(255)
else IPSubnet // otherwise just return the original ip string
}
def getIPRand(): String = {
val r = scala.util.Random
InetAddresses.fromInteger(r.nextInt()).getHostAddress()
}
// randNum method limit's random number of integers i.e. if 100 passed in number returned can be in the range 0 to 99
def randNum(ranNum: Int): Int = {
val r = scala.util.Random
r.nextInt(ranNum)
}
/* Start of the random generation values used to influence the data that is produced */
val r = scala.util.Random
def main(args: Array[String]) {
val conf = ConfigFactory.load()
val appName = conf.getString("netflow-app.name")
// val appRandomDistributionMin = conf.getInt("netflow-app.randomDistributionMin")
// val appRandomDistributionMax = conf.getInt("netflow-app.randomDistributionMax")
println("The application name is: " + appName)
if (args.length != 5) {
System.err.println("Usage: " + "hdfs://quickstart.cloudera:8020/user/cloudera/randomNetflow <numRecords> <numFilesPerDir> <numDirectories> <CountryEnrichment>")
System.err.println("Example: " + "hdfs://quickstart.cloudera:8020/user/cloudera/randomNetflow 30000000 4 10 true")
System.exit(1)
}
else {
println("Supplied arguments to the program are : " + args(0).toString + " " + args(1).toInt + " " + args(2).toInt + " " + args(3) + " " + args(4))
}
val format = new SimpleDateFormat("dd-MM-yyyy-hh-mm-ss")
// val hdfsPartitionDir = format.format(Calendar.getInstance().getTime())
// setup Spark
val sparkConf = new SparkConf()
// sparkConf.setMaster("local[4]")
sparkConf.setMaster("spark://vm-cluster-node2:7077")
// sparkConf.setMaster("spark://192.168.56.102:7077")
// sparkConf.setMaster("spark://79d4dd97b170:7077")
sparkConf.set("spark.executor.memory", "256m")
sparkConf.set("spark.driver.memory", "256m")
sparkConf.set("spark.cores.max", "1")
// sparkConf.set("spark.worker.cleanup.enabled", "true")
// sparkConf.set("spark.worker.cleanup.interval", "1")
// sparkConf.set("spark.worker.cleanup.appDataTtl", "30")
/* Change to Kyro Serialization */
// sparkConf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
// Now it's 24 Mb of buffer by default instead of 0.064 Mb
// sparkConf.set("spark.kryoserializer.buffer.mb","24")
/*
https://ogirardot.wordpress.com/2015/01/09/changing-sparks-default-java-serialization-to-kryo/
spark.kryoserializer.buffer.max.mb (64 Mb by default) : useful if your default buffer size goes further than 64 Mb;
spark.kryo.referenceTracking (true by default) : c.f. reference tracking in Kryo
spark.kryo.registrationRequired (false by default) : Kryo’s parameter to define if all serializable classes must be registered
spark.kryo.classesToRegister (empty string list by default) : you can add a list of the qualified names of all classes that must be registered (c.f. last parameter)
*/
sparkConf.setAppName("randomNetflowGen")
// Below line is the hostname or IP address for the driver to listen on. This is used for communicating with the executors and the standalone Master.
sparkConf.set("spark.driver.host", "192.168.56.1")
sparkConf.set("spark.hadoop.validateOutputSpecs", "false") // overwrite hdfs files which are written
val jars = Array("C:\\\\Users\\\\801762473\\\\.m2\\\\repository\\\\org\\\\apache\\\\spark\\\\spark-streaming-kafka_2.10\\\\1.3.0-cdh5.4.5\\\\spark-streaming-kafka_2.10-1.3.0-cdh5.4.5.jar",
"C:\\\\Users\\\\801762473\\\\.m2\\\\repository\\\\org\\\\apache\\\\kafka\\\\kafka_2.10\\\\0.8.0\\\\kafka_2.10-0.8.0.jar",
"C:\\\\Users\\\\801762473\\\\.m2\\\\repository\\\\org\\\\apache\\\\spark\\\\spark-core_2.10\\\\1.3.0-cdh5.4.5\\\\spark-core_2.10-1.3.0-cdh5.4.5.jar",
"C:\\\\Users\\\\801762473\\\\.m2\\\\repository\\\\com\\\\101tec\\\\zkclient\\\\0.3\\\\zkclient-0.3.jar",
"C:\\\\Users\\\\801762473\\\\.m2\\\\repository\\\\com\\\\yammer\\\\metrics\\\\metrics-core\\\\2.2.0\\\\metrics-core-2.2.0.jar",
"C:\\\\Users\\\\801762473\\\\.m2\\\\repository\\\\com\\\\esotericsoftware\\\\kryo\\\\kryo\\\\2.21\\\\kryo-2.21.jar",
"C:\\\\Users\\\\801762473\\\\.m2\\\\repository\\\\org\\\\elasticsearch\\\\elasticsearch-spark_2.10\\\\2.1.0.Beta3\\\\elasticsearch-spark_2.10-2.1.0.Beta3.jar",
"C:\\\\Users\\\\801762473\\\\.m2\\\\repository\\\\com\\\\maxmind\\\\db\\\\maxmind-db\\\\1.0.0\\\\maxmind-db-1.0.0.jar",
"C:\\\\Users\\\\801762473\\\\.m2\\\\repository\\\\com\\\\maxmind\\\\geoip2\\\\geoip2\\\\2.1.0\\\\geoip2-2.1.0.jar",
"C:\\\\Users\\\\801762473\\\\.m2\\\\repository\\\\org\\\\apache\\\\spark\\\\spark-hive_2.10\\\\1.3.0-cdh5.4.5\\\\spark-hive_2.10-1.3.0-cdh5.4.5.jar",
"D:\\\\Bowen_Raw_Source\\\\IntelijProjects\\\\KafkaStreamingPOC\\\\target\\\\netflow-streaming-0.0.1-SNAPSHOT-jar-with-dependencies.jar")
//
sparkConf.setJars(jars)
// val ssc = new StreamingContext(sparkConf, Seconds(120))
val sc = new SparkContext(sparkConf)
val sqlContext = new org.apache.spark.sql.SQLContext(sc)
// val numRecords: Int = 30000000
// val numRecords: Int = args(1).toInt
// val partitions: Int = args(2).toInt
val hdfsURI = args(0).toString
println("The application hdfsURI is: " + hdfsURI)
val numDirectories = args(3).toInt
for (dirNum <- 1 to numDirectories) {
val appRandomDistributionMin = conf.getInt("netflow-app.randomDistributionMin")
val appRandomDistributionMax = conf.getInt("netflow-app.randomDistributionMax")
val numPartitions = args(2).toInt
val countryEnrichment = args(4).toBoolean
/* Start of working out if we need to randomize or not */
val recordsPerPartition = {
if (appRandomDistributionMin == 0 & appRandomDistributionMax == 0) {
// no randomness to the number of netflow records
// we are assuming here that numRecords is divisible by partitions, otherwise we need to compensate for the residual
println("Using the standard number of lines per partition of " + args(1).toInt)
args(1).toInt
// BigInt(args(1))
}
else {
val tempRecordsPerPartition = randNum(appRandomDistributionMax - appRandomDistributionMin) + appRandomDistributionMin
println("Using the randomized number of lines per partition of " + tempRecordsPerPartition)
tempRecordsPerPartition
}
}
// val seedRdd = sc.parallelize(Seq[String](), numPartitions).mapPartitions { _ => {
val broadcastVar = sc.broadcast("Hi Paul")
val seedRdd = sc.parallelize(Seq[String](), numPartitions).mapPartitions { x => {
// (1 to recordsPerPartition).map { _ =>
(1 to recordsPerPartition).map { x =>
val r = scala.util.Random
val currentTimeForDirPart = Calendar.getInstance().getTime()
//
// // start of define hours and mins and maybe secs here
val formatDateDayForDir = new SimpleDateFormat("YYYY-MM-dd")
val formatDateHourForDir = new SimpleDateFormat("HH")
val formatDateMinuteForDir = new SimpleDateFormat("mm")
val formatDateSecondForDir = new SimpleDateFormat("ss")
val formatDateMilliSecondForDir = new SimpleDateFormat("SSS")
val flowDay = formatDateDayForDir.format(currentTimeForDirPart)
val flowHour = formatDateHourForDir.format(currentTimeForDirPart)
val flowMinute = formatDateMinuteForDir.format(currentTimeForDirPart)
val flowSecond = formatDateSecondForDir.format(currentTimeForDirPart)
val flowMilliSecond = formatDateMilliSecondForDir.format(currentTimeForDirPart)
// // end of define hours and mins and maybe secs here
//
// // start of maps
// // start of maps
val event_id = Map( 0 -> "ec00761e-1629-4e64-afb5-8e9f5a625a59#4055",
1 -> "ec00761e-1629-4e64-afb5-8e9f5a625a59#4087",
2 -> "14969dea-2cc1-4498-97ce-df198927e868#7365",
3 -> "f666f69e-d2b8-4ff0-aaaa-4c1284b08a89#238",
4 -> "14969dea-2cc1-4498-97ce-df198927e868#7402")
val sensor_id = Map(0 -> 22, 1 -> 73, 2 -> 46, 3 -> 73, 4 -> 73)
val ts = Map(0 -> "2015-08-19 20:56:41",
1 -> "2015-08-19 20:56:42",
2 -> "2015-08-19 20:55:57",
3 -> "2015-08-19 20:56:02",
4 -> "2015-08-19 20:55:57")
val te = Map(0 -> "2015-08-19 20:56:41",
1 -> "2015-08-19 20:56:42",
2 -> "2015-08-19 20:55:57",
3 -> "2015-08-19 20:56:02",
4 -> "2015-08-19 20:55:57")
val src_ip = Map(0 -> "147.150.17.77",
1 -> "147.151.1.240",
2 -> "74.125.0.57",
3 -> "147.149.12.168",
4 -> "147.150.19.100")
val src_ip_long = Map(0 -> 1369706041,
1 -> 1149706041,
2 -> 1249706041,
3 -> 1149406041,
4 -> 1245705041)
val src_port = Map(0 -> 18490, 1 -> 3287, 2 -> 139, 3 -> 55348, 4 -> 22)
val dst_ip = Map(0 -> "82.146.43.41",
1 -> "74.125.0.83",
2 -> "147.149.5.19",
3 -> "61.158.247.74",
4 -> "82.146.43.137")
val dst_ip_long = Map(0 -> 1385311017,
1 -> 1249706067,
2 -> 1149406041,
3 -> 1033828170,
4 -> 1385311113)
val dst_port = Map(0 -> 139, 1 -> 139, 2 -> 24167, 3 -> 443, 4 -> 61686)
val protocol = Map(0 -> "6", 1 -> "6", 2 -> "6", 3 -> "6", 4 -> "6")
val ip_version = Map(0 -> 4, 1 -> 4, 2 -> 4, 3 -> 4, 4 -> 4)
val packets = Map(0 -> 8, 1 -> 8, 2 -> 7, 3 -> 1, 4 -> 1)
val bytes = Map(0 -> 1241, 1 -> 1239, 2 -> 1184, 3 -> 66, 4 -> 66)
val tcp_flag = Map(0 -> 11, 1 -> 11, 2 -> 11, 3 -> 0, 4 -> 0)
val tos = Map(0 -> 0, 1 -> 0, 2 -> 0, 3 -> 0, 4 -> 0)
val traffic_fragmented = Map(0 -> 0, 1 -> 0, 2 -> 0, 3 -> 0, 4 -> 0)
val sensor_site = Map(0 -> "Alpha Roads Site1",
1 -> "Delta Postal Site1",
2 -> "Charlie Water Site1",
3 -> "Delta Postal Site1",
4 -> "Delta Postal Site1")
val sensor_org_name = Map(0 -> "Alpha Roads",
1 -> "Delta Postal",
2 -> "Charlie Water",
3 -> "Delta Postal",
4 -> "Delta Postal")
val sensor_org_sector = Map(0 -> "Transport",
1 -> "Communications",
2 -> "Water",
3 -> "Communications",
4 -> "Communications")
val sensor_org_type = Map(0 -> "CNI", 1 -> "CNI", 2 -> "CNI", 3 -> "CNI", 4 -> "CNI")
val sensor_priority = Map(0 -> 3, 1 -> 2, 2 -> 2, 3 -> 2, 4 -> 2)
val sensor_country = Map(0 -> "UK", 1 -> "UK", 2 -> "UK", 3 -> "UK", 4 -> "UK")
val sensor_db = Map(0 -> "Mock_sensor_db-01",
1 -> "Mock_sensor_db-01",
2 -> "Mock_sensor_db-01",
3 -> "Mock_sensor_db-01",
4 -> "Mock_sensor_db-01")
val geoip_src_country = Map(0 -> "United Kingdom",
1 -> "United Kingdom",
2 -> "United States",
3 -> "United Kingdom",
4 -> "United Kingdom")
// Not needed for ""S
// val geoip_src_subdivisions = Map(0 -> "", 1 -> "", 2 -> "", 3 -> "", 4 -> "")
val geoip_src_city = Map(0 -> "", 1 -> "", 2 -> "Mountain View", 3 -> "", 4 -> "")
val geoip_src_lat = Map(0 -> 51.5,
1 -> 51.5,
2 -> 37.419200000000004,
3 -> 51.5,
4 -> 51.5)
val geoip_src_long = Map(0 -> -0.13, 1 -> -0.13, 2 -> -122.0574, 3 -> -0.13, 4 -> -0.13)
val geoip_src_isp_org = Map(0 -> "BT", 1 -> "BT", 2 -> "Google", 3 -> "BT", 4 -> "BT")
val geoip_src_as = Map(0 -> 2856, 1 -> 2856, 2 -> 15169, 3 -> 2856, 4 -> 2856)
val geoip_src_as_org = Map(0 -> "", 1 -> "", 2 -> "", 3 -> "", 4 -> "")
val geoip_dst_country = Map(0 -> "Russia",
1 -> "United States",
2 -> "United Kingdom",
3 -> "China",
4 -> "Russia")
val geoip_dst_subdivisions = Map(0 -> "", 1 -> "", 2 -> "", 3 -> "", 4 -> "")
val geoip_dst_city = Map(0 -> "Moscow",
1 -> "Mountain View",
2 -> "",
3 -> "Zhengzhou",
4 -> "Moscow")
val geoip_dst_lat = Map(0 -> 55.752200000000002,
1 -> 37.419200000000004,
2 -> 51.5,
3 -> 34.683599999999998,
4 -> 55.752200000000002)
val geoip_dst_long = Map(0 -> 37.615600000000001,
1 -> -122.0574,
2 -> -0.13,
3 -> 113.5325,
4 -> 37.615600000000001)
val geoip_dst_isp_org = Map(0 -> "ISPsystem, cjsc",
1 -> "Google",
2 -> "BT",
3 -> "China Unicom Liaoning",
4 -> "ISPsystem, cjsc")
val geoip_dst_as = Map(0 -> 29182, 1 -> 15169, 2 -> 2856, 3 -> 4837, 4 -> 29182)
val geoip_dst_as_org = Map(0 -> "", 1 -> "", 2 -> "", 3 -> "", 4 -> "")
val geoip_db = Map(0 -> "2", 1 -> "2", 2 -> "2", 3 -> "2", 4 -> "2")
val port_src_well_known_service = Map(0 -> "",
1 -> "directvdata",
2 -> "netbios-ssn",
3 -> "",
4 -> "ssh")
val port_dst_well_known_service = Map(0 -> "netbios-ssn",
1 -> "netbios-ssn",
2 -> "",
3 -> "https",
4 -> "")
val service_db = Map(0 -> "", 1 -> "", 2 -> "", 3 -> "", 4 -> "")
val asset_src_site = Map(0 -> "", 1 -> "", 2 -> "", 3 -> "", 4 -> "")
val asset_src_org_name = Map(0 -> "", 1 -> "", 2 -> "", 3 -> "", 4 -> "")
val asset_src_org_sector = Map(0 -> "", 1 -> "", 2 -> "", 3 -> "", 4 -> "")
val asset_src_org_type = Map(0 -> "", 1 -> "", 2 -> "", 3 -> "", 4 -> "")
val asset_src_priority = Map(0 -> "", 1 -> "", 2 -> "", 3 -> "", 4 -> "")
val asset_src_country = Map(0 -> "", 1 -> "", 2 -> "", 3 -> "", 4 -> "")
val asset_dst_site = Map(0 -> "", 1 -> "", 2 -> "", 3 -> "", 4 -> "")
val asset_dst_org_name = Map(0 -> "", 1 -> "", 2 -> "", 3 -> "", 4 -> "")
val asset_dst_org_sector = Map(0 -> "", 1 -> "", 2 -> "", 3 -> "", 4 -> "")
val asset_dst_org_type = Map(0 -> "", 1 -> "", 2 -> "", 3 -> "", 4 -> "")
val asset_dst_priority = Map(0 -> "", 1 -> "", 2 -> "", 3 -> "", 4 -> "")
val asset_dst_country = Map(0 -> "", 1 -> "", 2 -> "", 3 -> "", 4 -> "")
val asset_db = Map(0 -> "", 1 -> "", 2 -> "", 3 -> "", 4 -> "")
val threat_src_type = Map(0 -> "", 1 -> "", 2 -> "", 3 -> "", 4 -> "")
val threat_src_attacker = Map(0 -> "", 1 -> "", 2 -> "", 3 -> "", 4 -> "")
val threat_src_malware = Map(0 -> "", 1 -> "", 2 -> "", 3 -> "", 4 -> "")
val threat_src_campaign = Map(0 -> "", 1 -> "", 2 -> "", 3 -> "", 4 -> "")
val threat_src_infrastructure = Map(0 -> "", 1 -> "", 2 -> "", 3 -> "", 4 -> "")
val threat_dst_type = Map(0 -> "", 1 -> "", 2 -> "", 3 -> "", 4 -> "")
val threat_dst_attacker = Map(0 -> "", 1 -> "", 2 -> "", 3 -> "", 4 -> "")
val threat_dst_malware = Map(0 -> "", 1 -> "", 2 -> "", 3 -> "", 4 -> "")
val threat_dst_campaign = Map(0 -> "", 1 -> "", 2 -> "", 3 -> "", 4 -> "")
val threat_dst_infrastructure = Map(0 -> "", 1 -> "", 2 -> "", 3 -> "", 4 -> "")
val threat_db = Map(0 -> "MOCK_THREAT_DB_0.3",
1 -> "MOCK_THREAT_DB_0.3",
2 -> "MOCK_THREAT_DB_0.3",
3 -> "MOCK_THREAT_DB_0.3",
4 -> "MOCK_THREAT_DB_0.3")
val dredge_id = Map(0 -> "", 1 -> "", 2 -> "", 3 -> "", 4 -> "")
val dredge_updated_fields = Map(0 -> "", 1 -> "", 2 -> "", 3 -> "", 4 -> "")
val dredge_date = Map(0 -> "", 1 -> "", 2 -> "", 3 -> "", 4 -> "")
val yyyy = Map(0 -> 2015, 1 -> 2015, 2 -> 2015, 3 -> 2015, 4 -> 2015)
val mm = Map(0 -> 8, 1 -> 8, 2 -> 8, 3 -> 8, 4 -> 8)
val dd = Map(0 -> 19, 1 -> 19, 2 -> 19, 3 -> 19, 4 -> 19)
// // end of maps
//
val formatDate = new SimpleDateFormat("YYYY-MM-dd HH:MM:ss.SSSSSS")
val formatDateDuration = new SimpleDateFormat("ss.SSSSSS")
val formatDateDay = new SimpleDateFormat("YYYY-MM-dd")
val formatDateHour = new SimpleDateFormat("HH")
//
// // get the current time for flowDuration so we get variability
val currentTime = Calendar.getInstance().getTime()
//
val flowTimestamp = formatDate.format(currentTimeForDirPart)
// val flowDay = formatDateDay.format(currentTimeForDirPart)
// val flowHour = formatDateHour.format(currentTimeForDirPart)
val flowDuration = formatDateDuration.format(currentTime)
// val SourceIPString = InetAddresses.fromInteger(r.nextInt()).getHostAddress()
val SourceIPString = getIPGenRand(r.nextInt())
val DestIPString = InetAddresses.fromInteger(r.nextInt()).getHostAddress()
//
""
// if (countryEnrichment) {
// flowTimestamp + "," + flowDuration + "," + protoMap(r.nextInt(5)) + "," +
// SourceIPString + "," + flowDirMap(r.nextInt(6)) + "," + DestIPString + "," +
// r.nextInt(65535) + "," + flowStatMap(r.nextInt(11)) + "," + sTosMap(r.nextInt(3)) +
// "," + dTosMap(r.nextInt(4)) + "," + totPktsMap(r.nextInt(2)) + "," +
// totBytesMap(r.nextInt(1)) + "," + labelMap(r.nextInt(9)) +
// "," + MaxMindSingleton.getInstance().getCountry(SourceIPString)
// }
// else {
// event_id(r.nextInt(4)) + "," + flowDuration + "," + protoMap(r.nextInt(5)) + "," +
// SourceIPString + "," + flowDirMap(r.nextInt(6)) + "," + DestIPString + "," +
// r.nextInt(65535) + "," + flowStatMap(r.nextInt(11)) + "," + sTosMap(r.nextInt(3)) +
// "," + dTosMap(r.nextInt(4)) + "," + totPktsMap(r.nextInt(2)) + "," +
// totBytesMap(r.nextInt(1)) + "," + labelMap(r.nextInt(9))
// }
}
}.iterator
}
/* End of working out if we need to randomize or not */
seedRdd.saveAsTextFile(hdfsURI + "/" + "runNum=" + dirNum)
// seedRdd.saveAsTextFile("randNetflow" + "/" + "runNum=" + dirNum)
}
}
}
// end of object
/* End of new code */
| faganpe/KafkaStreamingPOC | src/main/scala/RandomNetflowGenBowen.scala | Scala | apache-2.0 | 20,572 |
package org.jetbrains.sbt.language.utils
import com.intellij.openapi.application.ApplicationManager
import com.intellij.openapi.project.Project
import com.intellij.openapi.util.{Key, ModificationTracker}
import com.intellij.psi.PsiElement
import org.jetbrains.plugins.scala.codeInspection.collections.isSeq
import org.jetbrains.plugins.scala.extensions.PsiElementExt
import org.jetbrains.plugins.scala.lang.psi.api.base.literals.ScStringLiteral
import org.jetbrains.plugins.scala.lang.psi.api.expr.{ScExpression, ScInfixExpr, ScParenthesisedExpr, ScReferenceExpression}
import org.jetbrains.plugins.scala.macroAnnotations.Cached
import org.jetbrains.plugins.scala.project.ScalaLanguageLevel
import org.jetbrains.sbt.language.completion.SbtScalacOptionsCompletionContributor
import org.jetbrains.sbt.language.utils.SbtScalacOptionInfo.ArgType
import org.slf4j.LoggerFactory
import spray.json.DefaultJsonProtocol._
import spray.json._
import scala.annotation.tailrec
import scala.io.Source
import scala.util.{Failure, Success, Using}
object SbtScalacOptionUtils {
private val log = LoggerFactory.getLogger(getClass)
val SCALAC_OPTIONS = "scalacOptions"
val SCALAC_OPTIONS_DOC_KEY: Key[String] = Key.create("SCALAC_OPTION_DOC")
val SEQ_OPS = Set("++=", "--=", ":=")
val SINGLE_OPS = Set("+=", "-=")
def projectVersions(project: Project): List[ScalaLanguageLevel] =
if (ApplicationManager.getApplication.isUnitTestMode)
List(ScalaLanguageLevel.getDefault)
else
SbtDependencyUtils.getAllScalaVers(project).flatMap(ScalaLanguageLevel.findByVersion)
def projectVersionsSorted(project: Project, reverse: Boolean): List[ScalaLanguageLevel] = {
val ordering = implicitly[Ordering[ScalaLanguageLevel]]
projectVersions(project).distinct.sorted(if (reverse) ordering.reverse else ordering)
}
@tailrec
def matchesScalacOptionsSbtSetting(expr: ScExpression): Boolean = expr match {
case ref: ScReferenceExpression => ref.refName == SCALAC_OPTIONS
// e.g.: ThisBuild / scalacOptions
case ScInfixExpr(_, op, right: ScReferenceExpression) =>
op.refName == "/" && right.refName == SCALAC_OPTIONS
case ScParenthesisedExpr(e) => matchesScalacOptionsSbtSetting(e)
case _ => false
}
def withScalacOption[T](element: PsiElement)(onMismatch: => T, onMatch: ScStringLiteral => T): T =
element.getParent match {
case str: ScStringLiteral if isScalacOption(str) =>
onMatch(str)
case _ => onMismatch
}
def isScalacOption(str: ScStringLiteral): Boolean = isScalacOptionInternal(str)
def isScalacOption(ref: ScReferenceExpression): Boolean = isScalacOptionInternal(ref)
def getScalacOptionsSbtSettingParent(element: PsiElement): Option[ScInfixExpr] =
element.contexts.collectFirst {
case expr: ScInfixExpr if matchesScalacOptionsSbtSetting(expr.left) &&
(if (isSeq(expr.right)) SEQ_OPS(expr.operation.refName) else SINGLE_OPS(expr.operation.refName)) =>
expr
}
private def isScalacOptionInternal(element: PsiElement): Boolean =
getScalacOptionsSbtSettingParent(element).isDefined
@Cached(ModificationTracker.NEVER_CHANGED, null)
def scalacOptionsByFlag: Map[String, Seq[SbtScalacOptionInfo]] =
getScalacOptions.groupBy(_.flag)
@Cached(ModificationTracker.NEVER_CHANGED, null)
private def scalacOptionFlagsWithPrefix: Seq[(String, String)] = getScalacOptions.collect {
case SbtScalacOptionInfo(flag, _, _, ArgType.OneAfterPrefix(prefix), _, _) =>
prefix -> flag
}
@Cached(ModificationTracker.NEVER_CHANGED, null)
def getScalacOptions: Seq[SbtScalacOptionInfo] = {
if (ApplicationManager.getApplication.isUnitTestMode) return scalacOptionsForUnitTests
def scalacOptionsSource = {
val completionContributorClass = SbtScalacOptionsCompletionContributor.getClass
val inputStream = completionContributorClass.getResourceAsStream("scalac-options.json")
Source.fromInputStream(inputStream)
}
val options = Using(scalacOptionsSource) { src =>
src
.mkString
.parseJson
.convertTo[Seq[SbtScalacOptionInfo]]
}
options match {
case Success(value) => value
case Failure(exception) =>
log.error("Could not load scalac options", exception)
Seq.empty
}
}
def getScalacOptionsForLiteralValue(str: ScStringLiteral): Seq[SbtScalacOptionInfo] =
Option(str.getValue).filter(_.startsWith("-")).toSeq.flatMap { value =>
def prefixed: Seq[SbtScalacOptionInfo] =
scalacOptionFlagsWithPrefix
.collect { case (prefix, flag) if value.startsWith(prefix) => flag }
.flatMap(scalacOptionsByFlag.getOrElse(_, Seq.empty))
scalacOptionsByFlag.getOrElse(value.split(":", 2).head, prefixed)
}
private def scalacOptionsForUnitTests: Seq[SbtScalacOptionInfo] = {
import org.jetbrains.plugins.scala.project.ScalaLanguageLevel._
import org.jetbrains.sbt.language.utils.SbtScalacOptionInfo.ArgType
val versions = Set(Scala_2_11, Scala_2_12, Scala_2_13, Scala_3_0, Scala_3_1)
Seq(
SbtScalacOptionInfo(
flag = "-deprecation",
descriptions = Map(
Scala_2_11 -> "Emit warning and location for usages of deprecated APIs.",
Scala_2_12 -> "Emit warning and location for usages of deprecated APIs. See also -Wconf. [false]",
Scala_2_13 -> "Emit warning and location for usages of deprecated APIs. See also -Wconf. [false]",
Scala_3_0 -> "Emit warning and location for usages of deprecated APIs.",
Scala_3_1 -> "Emit warning and location for usages of deprecated APIs.",
),
choices = Map.empty,
argType = ArgType.No,
scalaVersions = versions,
defaultValue = None,
),
SbtScalacOptionInfo(
flag = "-classpath",
descriptions = versions.map(_ -> "Specify where to find user class files.").toMap,
choices = Map.empty,
argType = ArgType.OneSeparate,
scalaVersions = versions,
defaultValue = Some("."),
),
SbtScalacOptionInfo(
flag = "-bootclasspath",
descriptions = versions.map(_ -> "Override location of bootstrap class files.").toMap,
choices = Map.empty,
argType = ArgType.OneSeparate,
scalaVersions = versions,
defaultValue = None,
),
SbtScalacOptionInfo(
flag = "-Ydump-classes",
descriptions = versions.map(_ -> "Dump the generated bytecode to .class files (useful for reflective compilation that utilizes in-memory classloaders).").toMap,
choices = Map.empty,
argType = ArgType.OneSeparate,
scalaVersions = versions,
defaultValue = None,
),
SbtScalacOptionInfo(
flag = "-Yno-generic-signatures",
descriptions = Map(
Scala_2_11 -> "Suppress generation of generic signatures for Java.",
Scala_2_12 -> "Suppress generation of generic signatures for Java. [false]",
Scala_2_13 -> "Suppress generation of generic signatures for Java. [false]",
Scala_3_0 -> "Suppress generation of generic signatures for Java.",
Scala_3_1 -> "Suppress generation of generic signatures for Java.",
),
choices = Map.empty,
argType = ArgType.No,
scalaVersions = versions,
defaultValue = None,
),
SbtScalacOptionInfo(
flag = "-Xprint",
argType = ArgType.Multiple,
choices = Map.empty,
descriptions = Map(
Scala_2_11 -> "Print out program after <phases>",
Scala_2_12 -> "Print out program after <phases>",
Scala_2_13 -> "Print out program after <phases>",
Scala_3_0 -> "Print out program after",
Scala_3_1 -> "Print out program after",
),
scalaVersions = versions,
defaultValue = None,
),
SbtScalacOptionInfo(
flag = "-language",
argType = ArgType.Multiple,
choices = Map(
Scala_2_11 -> Set(
"experimental.macros",
"higherKinds",
"existentials",
"dynamics",
"reflectiveCalls",
"implicitConversions",
"postfixOps"
),
Scala_2_12 -> Set(
"experimental.macros",
"higherKinds",
"existentials",
"dynamics",
"reflectiveCalls",
"implicitConversions",
"postfixOps"
),
Scala_2_13 -> Set(
"experimental.macros",
"higherKinds",
"existentials",
"dynamics",
"reflectiveCalls",
"implicitConversions",
"postfixOps"
)
),
descriptions = Map(
Scala_2_11 -> "Enable or disable language features: `_' for all, `-language:help' to list",
Scala_2_12 -> "Enable or disable language features: `_' for all, `-language:help' to list choices.",
Scala_2_13 -> "Enable or disable language features",
Scala_3_0 -> "Enable one or more language features.",
Scala_3_1 -> "Enable one or more language features.",
),
scalaVersions = versions,
defaultValue = None
)
)
}
}
| JetBrains/intellij-scala | scala/scala-impl/src/org/jetbrains/sbt/language/utils/SbtScalacOptionUtils.scala | Scala | apache-2.0 | 9,282 |
package com.temportalist.href.client.gui
import com.temportalist.href.common.inventory.ContainerTransmitter
import com.temportalist.href.common.tile.TETransmitter
import com.temportalist.origin.wrapper.client.gui.GuiContainerWrapper
import net.minecraft.entity.player.EntityPlayer
/**
*
*
* @author TheTemportalist
*/
class GuiTransmitter(p: EntityPlayer, te: TETransmitter) extends GuiContainerWrapper(
176, 166, new ContainerTransmitter(p, te)
) {
}
| TheTemportalist/href | src/main/scala/com/temportalist/href/client/gui/GuiTransmitter.scala | Scala | apache-2.0 | 460 |
package com.twitter.finagle
import com.twitter.util.Var
import java.net.InetSocketAddress
import org.junit.runner.RunWith
import org.scalatest.junit.{AssertionsForJUnit, JUnitRunner}
import org.scalatest.FunSuite
import scala.language.implicitConversions
@RunWith(classOf[JUnitRunner])
class ServersetNamerTest
extends FunSuite
with AssertionsForJUnit
{
def mkNamer(f: String => Var[Addr]): Namer = new com.twitter.serverset {
override protected[this] def resolve(spec: String) = f(spec)
}
def schemeOk(scheme: String): Unit = {
val addr = Addr.Bound(new InetSocketAddress(7127))
var named = 0
val namer = mkNamer { spec =>
assert(spec == s"$scheme!hosts!/twitter/service/role/env/job!endpoint")
named += 1
Var.value(addr)
}
assert(named == 0)
val path = Path.read("/hosts/twitter/service/role/env/job:endpoint")
namer.bind(NameTree.Leaf(path)).sample() match {
case NameTree.Leaf(bound: Name.Bound) =>
assert(named == 1)
assert(bound.addr.sample() == addr)
assert(bound.path == Path.empty)
assert(bound.id == Path.Utf8(
"$", "com.twitter.serverset",
"hosts", "twitter", "service", "role", "env", "job:endpoint"))
case _ => fail(s"invalid name: ${path.show}")
}
}
test("negative resolution") {
var named = 0
val namer = mkNamer { spec =>
assert(spec == s"zk2!hosts!/twitter/service/role/env/job:endpoint/extra")
named += 1
Var.value(Addr.Neg)
}
assert(named == 0)
val path = Path.read("/hosts/twitter/service/role/env/job:endpoint/extra")
assert(namer.bind(NameTree.Leaf(path)).sample() == NameTree.Neg)
assert(named == 1)
}
}
| liamstewart/finagle | finagle-serversets/src/test/scala/com/twitter/finagle/ServersetNamerTest.scala | Scala | apache-2.0 | 1,714 |
package core
import ch.qos.logback.classic.Logger
import com.martiansoftware.nailgun.NGContext
import core.config._
import core.execution.{ExecutionHelper, Task, TaskExecutor}
import core.execution.tasks._
import util.LazyNailLogging
object Fsbt extends LazyNailLogging {
def main(args: Array[String]): Unit = {
println("Not running as nailgun! Exiting")
}
def nailMain(context: NGContext): Unit = {
implicit val logger: Logger = getLogger(context)
implicit val ctx: NGContext = context
val args = context.getArgs.toList
if(args.length == 1 && args.head == "stop"){
context.getNGServer.shutdown(true)
}
val tasks: List[Task] = args.flatMap {
case "stop" => List(Stop())
case "compile" => List(Compile())
case "test" => List(Compile(), Test())
case "run" => List(Compile(), Run())
case "package" => List(Compile(), Test(), JarPackage())
case "clean" => List(Clean())
case unknown =>
context.out.println(s"Command not found: $unknown")
List()
}
// try{
val modules = ModuleBuilder.buildModules(context)
val executionConfig = ExecutionHelper.build(modules)
tasks.foreach(new TaskExecutor(modules, executionConfig, _).execute())
// }catch{
// case ex: Throwable => logger.error("Task failure", ex)
// }
}
}
| Humblehound/fsbt | server/src/main/scala/core/Fsbt.scala | Scala | mit | 1,348 |
/**
* Copyright (C) 2012-2013 Vadim Bartko ([email protected]).
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the
* Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* See file LICENSE.txt for License information.
*/
package com.nevilon.nomad.storage.graph
/*
We do not track files with the same hash here!
*/
/*
class APIFacade extends SynchronizedDBService with FileStorage {
// def test() {
// val entities = findAllPdfFiles()
// entities.foreach(entity => {
// getIncoming(entity.url).foreach(u => println(u.location))
// getOutgoing(entity.url).foreach(t => println(t.location))
// println(entity)
// // println(entity.url + " " + entity.contentType + Transformers.vertex2Url(getUrl(entity.url).get).status)
// val path = FileSystems.getDefault().getPath("/tmp/pdfs/", System.currentTimeMillis().toString + ".pdf");
// Files.copy(getFileStream(entity.id), path)
// })
// }
/*
contentType
domain
*/
private implicit def DBObject2Entity(dbObject: DBObject) = {
new Entity(
dbObject.getAs[Long]("length").get,
dbObject.getAs[String]("filename").get,
new DateTime(dbObject.getAs[java.util.Date]("uploadDate").get.getTime),
dbObject.getAs[ObjectId]("_id").get.toString,
dbObject.getAs[String]("contentType").get,
dbObject.getAs[String]("md5").get,
dbObject.getAs[String]("urlId").get
)
}
def findAllPdfFiles(): List[Entity] = {
val entities = new ListBuffer[Entity]
val q = ("length" $gt 100000) ++ ("contentType" -> "application/pdf")
//val q = ("length" $gt 1)
val result = getGridFS().files(q)
result.foreach(obj => entities += obj)
entities.toList
}
def getFileStream(fileId: String): InputStream = {
getGridFS().findOne(new ObjectId(fileId)) match {
case None => throw new RuntimeException("wrong fileId")
case Some(gridFsFile) => gridFsFile.inputStream
}
}
private def getConnectedUrls(url: String, direction: Direction): List[Url] = {
getUrl(url) match {
case None => List[Url]()
case Some(v) => {
val incoming = v.getVertices(direction, "relation").map(v => {
Transformers.vertex2Url(v)
})
incoming.toList
}
}
}
def getIncoming(url: String): List[Url] = {
getConnectedUrls(url, Direction.IN)
}
def getOutgoing(url: String): List[Url] = {
getConnectedUrls(url, Direction.OUT)
}
}
class Entity(val size: Long, val url: String,
val timestamp: DateTime, val id: String,
val contentType: String, val md5: String, val urlId: String) extends ToStringImpl
*/
| hudvin/nomad | src/main/scala/com/nevilon/nomad/storage/graph/APIFacade.scala | Scala | gpl-2.0 | 2,839 |
package mesosphere.marathon.plugin
/**
* A [[https://mesosphere.github.io/marathon/docs/application-groups.html Marathon Application Group]]
*/
trait Group {
def id: PathId
def apps: Iterable[(PathId, RunSpec)]
def groupsById: Iterable[(PathId, Group)]
def dependencies: Iterable[PathId]
}
| timcharper/marathon | plugin-interface/src/main/scala/mesosphere/marathon/plugin/Group.scala | Scala | apache-2.0 | 303 |
package com.yahoo.scalops.dsl.actions
/*
* Copyright (c) 2012 Yahoo! Inc. All rights reserved. Licensed under the
* Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law
* or agreed to in writing, software distributed under the License is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying LICENSE file.
*/
import com.yahoo.scalops.dsl.Queryable
case class CountAction(input: Queryable[_]) extends Action[Long] {
override def toString = input.toString() + ".count()"
} | markusweimer/ScalOps | src/main/scala/com/yahoo/scalops/dsl/actions/CountAction.scala | Scala | apache-2.0 | 836 |
package org.automanlang.core.logging.tables
import java.util.UUID
import org.automanlang.core.scheduler.SchedulerState
import org.automanlang.core.scheduler.SchedulerState.SchedulerState
import scala.slick.driver.H2Driver.simple._
import java.util.Date
object DBTaskHistory {
val javaUtilDateMapper =
MappedColumnType.base[java.util.Date, java.sql.Timestamp] (
d => new java.sql.Timestamp(d.getTime),
d => new java.util.Date(d.getTime))
}
class DBTaskHistory(tag: Tag) extends Table[(Int, UUID, Date, SchedulerState)](tag, "DBTASK_HISTORY") {
implicit val javaUtilDateMapper = DBTaskHistory.javaUtilDateMapper
implicit val schedulerStateMapper = SchedulerState.mapper
def history_id = column[Int]("HISTORY_ID", O.PrimaryKey, O.AutoInc)
def task_id = column[UUID]("TASK_ID", O.NotNull)
def state_change_time = column[Date]("STATE_CHANGE_TIME", O.NotNull)
def scheduler_state = column[SchedulerState]("SCHEDULER_STATE", O.NotNull)
override def * = (history_id, task_id, state_change_time, scheduler_state)
} | dbarowy/AutoMan | libautoman/src/main/scala/org/automanlang/core/logging/tables/DBTaskHistory.scala | Scala | gpl-2.0 | 1,041 |
/* Title: Pure/Tools/bibtex.scala
Author: Makarius
BibTeX support.
*/
package isabelle
import scala.collection.mutable
import scala.util.parsing.input.{Reader, CharSequenceReader}
import scala.util.parsing.combinator.RegexParsers
object Bibtex
{
/** content **/
private val months = List(
"jan",
"feb",
"mar",
"apr",
"may",
"jun",
"jul",
"aug",
"sep",
"oct",
"nov",
"dec")
def is_month(s: String): Boolean = months.contains(s.toLowerCase)
private val commands = List("preamble", "string")
def is_command(s: String): Boolean = commands.contains(s.toLowerCase)
sealed case class Entry(
kind: String,
required: List[String],
optional_crossref: List[String],
optional_other: List[String])
{
def is_required(s: String): Boolean = required.contains(s.toLowerCase)
def is_optional(s: String): Boolean =
optional_crossref.contains(s.toLowerCase) || optional_other.contains(s.toLowerCase)
def fields: List[String] = required ::: optional_crossref ::: optional_other
def template: String =
"@" + kind + "{,\\n" + fields.map(x => " " + x + " = {},\\n").mkString + "}\\n"
}
val entries: List[Entry] =
List(
Entry("Article",
List("author", "title"),
List("journal", "year"),
List("volume", "number", "pages", "month", "note")),
Entry("InProceedings",
List("author", "title"),
List("booktitle", "year"),
List("editor", "volume", "number", "series", "pages", "month", "address",
"organization", "publisher", "note")),
Entry("InCollection",
List("author", "title", "booktitle"),
List("publisher", "year"),
List("editor", "volume", "number", "series", "type", "chapter", "pages",
"edition", "month", "address", "note")),
Entry("InBook",
List("author", "editor", "title", "chapter"),
List("publisher", "year"),
List("volume", "number", "series", "type", "address", "edition", "month", "pages", "note")),
Entry("Proceedings",
List("title", "year"),
List(),
List("booktitle", "editor", "volume", "number", "series", "address", "month",
"organization", "publisher", "note")),
Entry("Book",
List("author", "editor", "title"),
List("publisher", "year"),
List("volume", "number", "series", "address", "edition", "month", "note")),
Entry("Booklet",
List("title"),
List(),
List("author", "howpublished", "address", "month", "year", "note")),
Entry("PhdThesis",
List("author", "title", "school", "year"),
List(),
List("type", "address", "month", "note")),
Entry("MastersThesis",
List("author", "title", "school", "year"),
List(),
List("type", "address", "month", "note")),
Entry("TechReport",
List("author", "title", "institution", "year"),
List(),
List("type", "number", "address", "month", "note")),
Entry("Manual",
List("title"),
List(),
List("author", "organization", "address", "edition", "month", "year", "note")),
Entry("Unpublished",
List("author", "title", "note"),
List(),
List("month", "year")),
Entry("Misc",
List(),
List(),
List("author", "title", "howpublished", "month", "year", "note")))
def get_entry(kind: String): Option[Entry] =
entries.find(entry => entry.kind.toLowerCase == kind.toLowerCase)
def is_entry(kind: String): Boolean = get_entry(kind).isDefined
/** tokens and chunks **/
object Token
{
object Kind extends Enumeration
{
val COMMAND = Value("command")
val ENTRY = Value("entry")
val KEYWORD = Value("keyword")
val NAT = Value("natural number")
val STRING = Value("string")
val NAME = Value("name")
val IDENT = Value("identifier")
val SPACE = Value("white space")
val COMMENT = Value("ignored text")
val ERROR = Value("bad input")
}
}
sealed case class Token(kind: Token.Kind.Value, source: String)
{
def is_kind: Boolean =
kind == Token.Kind.COMMAND ||
kind == Token.Kind.ENTRY ||
kind == Token.Kind.IDENT
def is_name: Boolean =
kind == Token.Kind.NAME ||
kind == Token.Kind.IDENT
def is_ignored: Boolean =
kind == Token.Kind.SPACE ||
kind == Token.Kind.COMMENT
def is_malformed: Boolean = kind ==
Token.Kind.ERROR
}
case class Chunk(kind: String, tokens: List[Token])
{
val source = tokens.map(_.source).mkString
private val content: Option[List[Token]] =
tokens match {
case Token(Token.Kind.KEYWORD, "@") :: body if body.nonEmpty =>
(body.init.filterNot(_.is_ignored), body.last) match {
case (tok :: Token(Token.Kind.KEYWORD, "{") :: toks, Token(Token.Kind.KEYWORD, "}"))
if tok.is_kind => Some(toks)
case (tok :: Token(Token.Kind.KEYWORD, "(") :: toks, Token(Token.Kind.KEYWORD, ")"))
if tok.is_kind => Some(toks)
case _ => None
}
case _ => None
}
def name: String =
content match {
case Some(tok :: _) if tok.is_name => tok.source
case _ => ""
}
def is_ignored: Boolean = kind == "" && tokens.forall(_.is_ignored)
def is_malformed: Boolean = kind == "" || tokens.exists(_.is_malformed)
def is_command: Boolean = Bibtex.is_command(kind) && name != "" && content.isDefined
def is_entry: Boolean = Bibtex.is_entry(kind) && name != "" && content.isDefined
}
/** parsing **/
// context of partial line-oriented scans
abstract class Line_Context
case object Ignored extends Line_Context
case object At extends Line_Context
case class Item_Start(kind: String) extends Line_Context
case class Item_Open(kind: String, end: String) extends Line_Context
case class Item(kind: String, end: String, delim: Delimited) extends Line_Context
case class Delimited(quoted: Boolean, depth: Int)
val Closed = Delimited(false, 0)
private def token(kind: Token.Kind.Value)(source: String): Token = Token(kind, source)
private def keyword(source: String): Token = Token(Token.Kind.KEYWORD, source)
// See also http://ctan.org/tex-archive/biblio/bibtex/base/bibtex.web
// module @<Scan for and process a \\.{.bib} command or database entry@>.
object Parsers extends RegexParsers
{
/* white space and comments */
override val whiteSpace = "".r
private val space = """[ \\t\\n\\r]+""".r ^^ token(Token.Kind.SPACE)
private val spaces = rep(space)
/* ignored text */
private val ignored: Parser[Chunk] =
rep1("""(?i)([^@]+|@[ \\t]*comment)""".r) ^^ {
case ss => Chunk("", List(Token(Token.Kind.COMMENT, ss.mkString))) }
private def ignored_line: Parser[(Chunk, Line_Context)] =
ignored ^^ { case a => (a, Ignored) }
/* delimited string: outermost "..." or {...} and body with balanced {...} */
// see also bibtex.web: scan_a_field_token_and_eat_white, scan_balanced_braces
private def delimited_depth(delim: Delimited): Parser[(String, Delimited)] =
new Parser[(String, Delimited)]
{
require(if (delim.quoted) delim.depth > 0 else delim.depth >= 0)
def apply(in: Input) =
{
val start = in.offset
val end = in.source.length
var i = start
var q = delim.quoted
var d = delim.depth
var finished = false
while (!finished && i < end) {
val c = in.source.charAt(i)
if (c == '"' && d == 0) { i += 1; d = 1; q = true }
else if (c == '"' && d == 1 && q) {
i += 1; d = 0; q = false; finished = true
}
else if (c == '{') { i += 1; d += 1 }
else if (c == '}') {
if (d == 1 && !q || d > 1) { i += 1; d -= 1; if (d == 0) finished = true }
else {i = start; finished = true }
}
else if (d > 0) i += 1
else finished = true
}
if (i == start) Failure("bad input", in)
else {
val s = in.source.subSequence(start, i).toString
Success((s, Delimited(q, d)), in.drop(i - start))
}
}
}.named("delimited_depth")
private def delimited: Parser[Token] =
delimited_depth(Closed) ^?
{ case (s, delim) if delim == Closed => Token(Token.Kind.STRING, s) }
private def recover_delimited: Parser[Token] =
"""["{][^@]*""".r ^^ token(Token.Kind.ERROR)
def delimited_line(ctxt: Item): Parser[(Chunk, Line_Context)] =
delimited_depth(ctxt.delim) ^^ { case (s, delim1) =>
(Chunk(ctxt.kind, List(Token(Token.Kind.STRING, s))), ctxt.copy(delim = delim1)) } |
recover_delimited ^^ { case a => (Chunk(ctxt.kind, List(a)), Ignored) }
/* other tokens */
private val at = "@" ^^ keyword
private val nat = "[0-9]+".r ^^ token(Token.Kind.NAT)
private val name = """[\\x21-\\x7f&&[^"#%'(),={}]]+""".r ^^ token(Token.Kind.NAME)
private val identifier =
"""[\\x21-\\x7f&&[^"#%'(),={}0-9]][\\x21-\\x7f&&[^"#%'(),={}]]*""".r
private val ident = identifier ^^ token(Token.Kind.IDENT)
val other_token = "[=#,]".r ^^ keyword | (nat | (ident | space))
/* body */
private val body =
delimited | (recover_delimited | other_token)
private def body_line(ctxt: Item) =
if (ctxt.delim.depth > 0)
delimited_line(ctxt)
else
delimited_line(ctxt) |
other_token ^^ { case a => (Chunk(ctxt.kind, List(a)), ctxt) } |
ctxt.end ^^ { case a => (Chunk(ctxt.kind, List(keyword(a))), Ignored) }
/* items: command or entry */
private val item_kind =
identifier ^^ { case a =>
val kind =
if (is_command(a)) Token.Kind.COMMAND
else if (is_entry(a)) Token.Kind.ENTRY
else Token.Kind.IDENT
Token(kind, a)
}
private val item_begin =
"{" ^^ { case a => ("}", keyword(a)) } |
"(" ^^ { case a => (")", keyword(a)) }
private def item_name(kind: String) =
kind.toLowerCase match {
case "preamble" => failure("")
case "string" => identifier ^^ token(Token.Kind.NAME)
case _ => name
}
private val item_start =
at ~ spaces ~ item_kind ~ spaces ^^
{ case a ~ b ~ c ~ d => (c.source, List(a) ::: b ::: List(c) ::: d) }
private val item: Parser[Chunk] =
(item_start ~ item_begin ~ spaces) into
{ case (kind, a) ~ ((end, b)) ~ c =>
opt(item_name(kind)) ~ rep(body) ~ opt(end ^^ keyword) ^^ {
case d ~ e ~ f => Chunk(kind, a ::: List(b) ::: c ::: d.toList ::: e ::: f.toList) } }
private val recover_item: Parser[Chunk] =
at ~ "[^@]*".r ^^ { case a ~ b => Chunk("", List(a, Token(Token.Kind.ERROR, b))) }
/* chunks */
val chunk: Parser[Chunk] = ignored | (item | recover_item)
def chunk_line(ctxt: Line_Context): Parser[(Chunk, Line_Context)] =
{
ctxt match {
case Ignored =>
ignored_line |
at ^^ { case a => (Chunk("", List(a)), At) }
case At =>
space ^^ { case a => (Chunk("", List(a)), ctxt) } |
item_kind ^^ { case a => (Chunk(a.source, List(a)), Item_Start(a.source)) } |
recover_item ^^ { case a => (a, Ignored) } |
ignored_line
case Item_Start(kind) =>
space ^^ { case a => (Chunk(kind, List(a)), ctxt) } |
item_begin ^^ { case (end, a) => (Chunk(kind, List(a)), Item_Open(kind, end)) } |
recover_item ^^ { case a => (a, Ignored) } |
ignored_line
case Item_Open(kind, end) =>
space ^^ { case a => (Chunk(kind, List(a)), ctxt) } |
item_name(kind) ^^ { case a => (Chunk(kind, List(a)), Item(kind, end, Closed)) } |
body_line(Item(kind, end, Closed)) |
ignored_line
case item_ctxt: Item =>
body_line(item_ctxt) |
ignored_line
case _ => failure("")
}
}
}
/* parse */
def parse(input: CharSequence): List[Chunk] =
{
val in: Reader[Char] = new CharSequenceReader(input)
Parsers.parseAll(Parsers.rep(Parsers.chunk), in) match {
case Parsers.Success(result, _) => result
case _ => error("Unexpected failure to parse input:\\n" + input.toString)
}
}
def parse_line(input: CharSequence, context: Line_Context): (List[Chunk], Line_Context) =
{
var in: Reader[Char] = new CharSequenceReader(input)
val chunks = new mutable.ListBuffer[Chunk]
var ctxt = context
while (!in.atEnd) {
Parsers.parse(Parsers.chunk_line(ctxt), in) match {
case Parsers.Success((x, c), rest) => chunks += x; ctxt = c; in = rest
case Parsers.NoSuccess(_, rest) =>
error("Unepected failure to parse input:\\n" + rest.source.toString)
}
}
(chunks.toList, ctxt)
}
}
| wneuper/libisabelle | pide/2015/src/main/scala/Tools/bibtex.scala | Scala | mit | 13,069 |
package ore.models.project
import scala.collection.immutable
import enumeratum.values._
sealed abstract class ReviewState(val value: Int, val apiName: String) extends IntEnumEntry {
def isChecked: Boolean = this == ReviewState.Reviewed || this == ReviewState.PartiallyReviewed
}
object ReviewState extends IntEnum[ReviewState] {
case object Unreviewed extends ReviewState(0, "unreviewed")
case object Reviewed extends ReviewState(1, "reviewed")
case object Backlog extends ReviewState(2, "backlog")
case object PartiallyReviewed extends ReviewState(3, "partially_reviewed")
val values: immutable.IndexedSeq[ReviewState] = findValues
}
| SpongePowered/Ore | models/src/main/scala/ore/models/project/ReviewState.scala | Scala | mit | 678 |
package net.usersource.twitpipe
import org.apache.http.client.methods.HttpPost
import org.apache.http.params.HttpConnectionParams
import org.apache.http.impl.client.DefaultHttpClient
import java.io.{InputStreamReader, BufferedReader}
class TwitterEndpoint extends Endpoint with OAuth {
def uri = "http://stream.twitter.com/1/statuses/sample.json"
def connect:Either[Error,BufferedReader] = {
try {
val request = new HttpPost(uri)
HttpConnectionParams.setConnectionTimeout(request.getParams,connectionTimeout)
HttpConnectionParams.setSoTimeout(request.getParams,soTimeout)
consumer.sign(request);
val httpClient = new DefaultHttpClient();
val response = httpClient.execute(request);
if( response.getStatusLine.getStatusCode == 200 ) {
Right(new BufferedReader(new InputStreamReader(response.getEntity.getContent)))
}
else {
Left(new Error( "HTTP: " + response.getStatusLine))
}
}
catch {
case e: Exception => Left(new Error( "Exception: " + e.getMessage ))
}
}
}
| glenford/TwitterPipeline | src/main/scala/net/usersource/twitpipe/TwitterEndpoint.scala | Scala | apache-2.0 | 1,073 |
package com.yannick_cw.elastic_indexer4s.elasticsearch.index_ops
import cats.data.EitherT
import cats.implicits._
import com.yannick_cw.elastic_indexer4s.Index_results.{IndexError, StageSucceeded}
import com.yannick_cw.elastic_indexer4s.specs.AsyncSpec
import scala.concurrent.Future
class IndexDeletionSpec extends AsyncSpec {
val newIndex = IndexWithInfo("newIndex", List("alias"), 99)
"The IndexDeletion" should {
"never delete the new index" in {
val opsClient = testEsOpsClient(newIndex)
val deleter = IndexDeletion(opsClient)
deleter.deleteOldest("new", "newIndex", 0, false).map { deletionResult =>
deletionResult.right.value shouldBe a[StageSucceeded]
opsClient.deletedIndices shouldBe empty
}
}
"never delete an index with alias if protected" in {
val protectedIndices = (1 to 10).map(i => IndexWithInfo(s"index$i", List(s"alias$i"), i))
val opsClient = testEsOpsClient(newIndex +: protectedIndices: _*)
val deleter = IndexDeletion(opsClient)
deleter.deleteOldest("inde", "index0", 0, true).map { deletionResult =>
deletionResult.right.value shouldBe a[StageSucceeded]
opsClient.deletedIndices shouldBe empty
}
}
"only delete indices with the same prefix" in {
val indicesWithSamePrefix = (1 to 10).map(i => IndexWithInfo(s"index$i", List(s"alias"), i))
val differentIndices = (1 to 10).map(i => IndexWithInfo(s"some$i", List(s"alias"), i))
val opsClient = testEsOpsClient(newIndex +: (indicesWithSamePrefix ++ differentIndices): _*)
val deleter = IndexDeletion(opsClient)
deleter.deleteOldest("inde", "index0", 0, false).map { deletionResult =>
deletionResult.right.value shouldBe a[StageSucceeded]
opsClient.deletedIndices should contain theSameElementsAs indicesWithSamePrefix.map(_.index)
}
}
"keep at least defined amount of indices, even if there are newer indices with different prefix" in {
val indicesWithSamePrefix = (1 to 10).map(i => IndexWithInfo(s"index$i", List(s"alias"), i))
val differentIndices = (11 to 20).map(i => IndexWithInfo(s"some$i", List(s"alias"), i))
val opsClient = testEsOpsClient(newIndex +: (indicesWithSamePrefix ++ differentIndices): _*)
val deleter = IndexDeletion(opsClient)
deleter.deleteOldest("inde", "index0", 3, false).map { deletionResult =>
deletionResult.right.value shouldBe a[StageSucceeded]
opsClient.deletedIndices should have length 7
}
}
"delete the oldest indices first if more indices than defined to keep" in {
val indices = scala.util.Random.shuffle((1 to 10).map(i => IndexWithInfo(s"index$i", List.empty, i)))
val opsClient = testEsOpsClient(newIndex +: indices: _*)
val deleter = IndexDeletion(opsClient)
deleter.deleteOldest("inde", "newIndex", 5, false).map { deletionResult =>
deletionResult.right.value shouldBe a[StageSucceeded]
opsClient.deletedIndices should contain theSameElementsAs indices.sortBy(_.creationTime).take(5).map(_.index)
}
}
}
private def testEsOpsClient(oldIndicesWithAlias: IndexWithInfo*) =
new EsOpsClientApi {
val deletedIndices = scala.collection.mutable.Buffer.empty[String]
def removeAliasFromIndex(index: String, alias: String): OpsResult[Boolean] = ???
def addAliasToIndex(index: String, alias: String): OpsResult[Boolean] = ???
def sizeFor(index: String): OpsResult[Long] = ???
def delete(index: String): OpsResult[Boolean] = {
deletedIndices += index
EitherT.pure[Future, IndexError](true)
}
def allIndicesWithAliasInfo: OpsResult[List[IndexWithInfo]] =
EitherT.pure[Future, IndexError](oldIndicesWithAlias.toList)
}
}
| yannick-cw/elastic-indexer4s | src/test/scala/com/yannick_cw/elastic_indexer4s/elasticsearch/index_ops/IndexDeletionSpec.scala | Scala | mit | 3,915 |
package play.api.templates
case class Html(string: String) {
override def toString = string
}
| mslinn/PFView | src/test/scala/play/api/templates/Mocks.scala | Scala | mit | 104 |
package com.github.alexanderscott.twitterstream.auth
import javax.crypto
import java.nio.charset.Charset
import spray.http.{HttpEntity, MediaTypes, ContentType, HttpRequest}
import spray.http.HttpHeaders.RawHeader
import org.parboiled.common.Base64
import scala.collection.immutable.TreeMap
import java.net.URLEncoder
object OAuth {
case class Consumer(key: String, secret: String)
case class Token(value: String, secret: String)
def oAuthAuthorizer(consumer: Consumer, token: Token): HttpRequest => HttpRequest = {
// construct the key and cryptographic entity
val SHA1 = "HmacSHA1"
val keyString = percentEncode(consumer.secret :: token.secret :: Nil)
val key = new crypto.spec.SecretKeySpec(bytes(keyString), SHA1)
val mac = crypto.Mac.getInstance(SHA1)
{ httpRequest: HttpRequest =>
val timestamp = (System.currentTimeMillis / 1000).toString
// nonce is unique enough for our purposes here
val nonce = System.nanoTime.toString
// pick out x-www-form-urlencoded body
val (requestParams, newEntity) = httpRequest.entity match {
case HttpEntity.NonEmpty(ContentType(MediaTypes.`application/x-www-form-urlencoded`, _), data) =>
val params = data.asString.split("&")
val pairs = params.map { param =>
val p = param.split("=")
p(0) -> percentEncode(p(1))
}
(pairs.toMap, HttpEntity(ContentType(MediaTypes.`application/x-www-form-urlencoded`), "%s=%s" format (pairs(0)._1, pairs(0)._2)))
case e => (Map(), e)
}
// prepare the OAuth parameters
val oauthParams = Map(
"oauth_consumer_key" -> consumer.key,
"oauth_signature_method" -> "HMAC-SHA1",
"oauth_timestamp" -> timestamp,
"oauth_nonce" -> nonce,
"oauth_token" -> token.value,
"oauth_version" -> "1.0"
)
// construct parts of the signature base string
val encodedOrderedParams = (TreeMap[String, String]() ++ oauthParams ++ requestParams) map { case (k, v) => k + "=" + v } mkString "&"
val url = httpRequest.uri.toString()
// construct the signature base string
val signatureBaseString = percentEncode(httpRequest.method.toString() :: url :: encodedOrderedParams :: Nil)
mac.init(key)
val sig = Base64.rfc2045().encodeToString(mac.doFinal(bytes(signatureBaseString)), false)
mac.reset()
val oauth = TreeMap[String, String]() ++ (oauthParams + ("oauth_signature" -> percentEncode(sig))) map { case (k, v) => "%s=\\"%s\\"" format (k, v) } mkString ", "
// return the signed request
httpRequest.withHeaders(List(RawHeader("Authorization", "OAuth " + oauth))).withEntity(newEntity)
}
}
private def percentEncode(str: String): String = URLEncoder.encode(str, "UTF-8") replace ("+", "%20") replace ("%7E", "~")
private def percentEncode(s: Seq[String]): String = s map percentEncode mkString "&"
private def bytes(str: String) = str.getBytes(Charset.forName("UTF-8"))
}
| alexanderscott/akka-twitter-streaming-cluster | src/main/scala/com/crunchdevelopment/twitterstreaming/auth/OAuth.scala | Scala | apache-2.0 | 3,016 |
trait Base {
def getID: String
}
abstract class X extends Base {
override def getID: String = ""
private[this] def bar = {
def foo = /*start*/true/*end*/
""
}
}
()
//expected: <none> | triggerNZ/intellij-scala | testdata/typeInference/bugs4/SCL3178.scala | Scala | apache-2.0 | 200 |
import edu.uta.diql._
import org.apache.spark._
import org.apache.spark.rdd._
import org.apache.log4j._
import scala.util.Random
object KMeans {
def main ( args: Array[String] ) {
val repeats = args(0).toInt
val length = args(1).toLong
val num_steps = 1
val conf = new SparkConf().setAppName("KMeans")
val sc = new SparkContext(conf)
conf.set("spark.logConf","false")
conf.set("spark.eventLog.enabled","false")
LogManager.getRootLogger().setLevel(Level.WARN)
val rand = new Random()
def getd (): Double = {
val v = rand.nextDouble()*20.0D
if (v.toInt % 2 == 0) getd() else v
}
val points = sc.parallelize(1L to length/100)
.flatMap{ i => (1 to 100).map{ i => (getd(),getd()) } }
.cache()
val size = sizeof((1.0D,1.0D))
println("*** %d %.2f GB".format(length,length.toDouble*size/(1024.0*1024.0*1024.0)))
var initial_centroids
= (for { i <- 0 to 9; j <- 0 to 9 }
yield ((i*2+1.2).toDouble,(j*2+1.2).toDouble)).toArray
var centroids = initial_centroids
def distance ( x: (Double,Double), y: (Double,Double) ): Double
= Math.sqrt((x._1-y._1)*(x._1-y._1)+(x._2-y._2)*(x._2-y._2))
def test () {
case class ArgMin ( index: Long, distance: Double ) {
def ^ ( x: ArgMin ): ArgMin
= if (distance <= x.distance) this else x
}
case class Avg ( sum: (Double,Double), count: Long ) {
def ^^ ( x: Avg ): Avg
= Avg((sum._1+x.sum._1,sum._2+x.sum._2),count+x.count)
def value(): (Double,Double)
= (sum._1/count,sum._2/count)
}
var t: Long = System.currentTimeMillis()
try {
for ( i <- 1 to num_steps ) {
val cs = sc.broadcast(centroids)
centroids = points.map { p => (cs.value.minBy(distance(p,_)), Avg(p,1)) }
.reduceByKey(_^^_)
.map(_._2.value())
.collect()
}
println(centroids.length)
println("**** KMeansSpark run time: "+(System.currentTimeMillis()-t)/1000.0+" secs")
} catch { case x: Throwable => println(x) }
var P = points.zipWithIndex.map{ case (p,i) => (i.toLong,p) }
var C = initial_centroids.zipWithIndex.map{ case (p,i) => (i.toLong,p) }
val K = C.length
val N = P.count()
var avg = (1 to K).map{ i => (i.toLong-1,Avg((0.0,0.0),0)) }.toArray
t = System.currentTimeMillis()
v(sc,"""
var closest: vector[ArgMin] = vector();
var steps: Int = 0;
while (steps < num_steps) {
steps += 1;
for i = 0, N-1 do {
closest[i] := ArgMin(0,10000.0);
for j = 0, K-1 do
closest[i] := closest[i] ^ ArgMin(j,distance(P[i],C[j]));
avg[closest[i].index] := avg[closest[i].index] ^^ Avg(P[i],1);
};
for i = 0, K-1 do
C[i] := avg[i].value();
};
""")
println(C.length)
println("**** KMeansDiablo run time: "+(System.currentTimeMillis()-t)/1000.0+" secs")
}
for ( i <- 1 to repeats )
test()
sc.stop()
}
}
| fegaras/DIQL | benchmarks/diablo/kmeans.scala | Scala | apache-2.0 | 3,213 |
package net.spals.appbuilder.mapstore.mongodb
import java.util.Optional
import io.opentracing.mock.{MockSpan, MockTracer}
import net.spals.appbuilder.mapstore.core.model.MapQueryOptions.defaultOptions
import net.spals.appbuilder.mapstore.core.model.MultiValueMapRangeKey.in
import net.spals.appbuilder.mapstore.core.model.SingleValueMapRangeKey.{equalTo => range_equalTo, greaterThan => range_greaterThan, greaterThanOrEqualTo => range_greaterThanOrEqualTo, lessThan => range_lessThan, lessThanOrEqualTo => range_lessThanOrEqualTo}
import net.spals.appbuilder.mapstore.core.model.TwoValueMapRangeKey.between
import net.spals.appbuilder.mapstore.core.model.ZeroValueMapRangeKey.all
import net.spals.appbuilder.mapstore.core.model.{MapStoreKey, MapStoreTableKey}
import net.spals.appbuilder.mapstore.mongodb.MongoDBSpanMatcher.mongoDBSpan
import org.bson.Document
import org.hamcrest.MatcherAssert.assertThat
import org.hamcrest.Matchers._
import org.hamcrest.{Description, Matcher, TypeSafeMatcher}
import org.slf4j.LoggerFactory
import org.testng.annotations._
import scala.collection.JavaConverters._
/**
* Integration tests for [[MongoDBMapStorePlugin]].
*
* @author tkral
*/
class MongoDBMapStorePluginIT {
private val LOGGER = LoggerFactory.getLogger(classOf[MongoDBMapStorePluginIT])
private val mongoDBTracer = new MockTracer()
private lazy val mongoClient = {
val mongoClientProvider = new MongoClientProvider(mongoDBTracer)
mongoClientProvider.host = System.getenv("MONGODB_IP")
mongoClientProvider.port = System.getenv("MONGODB_PORT").toInt
LOGGER.info(s"Connecting to mongoDB instance at ${mongoClientProvider.host}:${mongoClientProvider.port}")
mongoClientProvider.get()
}
private val applicationName = "MongoDBMapStorePluginIT"
private lazy val mongoDatabase = {
val mongoDatabaseProvider = new MongoDatabaseProvider(applicationName, mongoClient)
mongoDatabaseProvider.get()
}
private lazy val mapStorePlugin = new MongoDBMapStorePlugin(mongoClient, mongoDatabase)
private val hashTableName = "hashTable"
private val hashTableKey = new MapStoreTableKey.Builder()
.setHash("myHashField", classOf[String])
.build
private val rangeTableName = "rangeTable"
private val rangeTableKey = new MapStoreTableKey.Builder()
.setHash("myHashField", classOf[String])
.setRange("myRangeField", classOf[String])
.build()
@BeforeClass def createTables() {
mapStorePlugin.createTable(hashTableName, hashTableKey)
mapStorePlugin.createTable(rangeTableName, rangeTableKey)
}
@BeforeMethod def resetTracer() {
mongoDBTracer.reset()
}
@AfterClass(alwaysRun = true) def tearDownClass() {
mapStorePlugin.dropTable(hashTableName)
mapStorePlugin.dropTable(rangeTableName)
mapStorePlugin.close()
}
@Test def testCreateTableIdempotent() {
assertThat(mapStorePlugin.createTable(hashTableName, hashTableKey), is(true))
}
@DataProvider def emptyGetProvider(): Array[Array[AnyRef]] = {
Array(
// Case: Hash-only key
Array(hashTableName,
new MapStoreKey.Builder().setHash("myHashField", "deadbeef").build),
// Case: Hash and range key
Array(rangeTableName,
new MapStoreKey.Builder().setHash("myHashField", "deadbeef")
.setRange("myRangeField", range_equalTo[String]("deadbeef")).build)
)
}
@Test(
dataProvider = "emptyGetProvider",
groups = Array("MongoDBMapStorePluginIT.empty")
)
def testEmptyGetItem(
tableName: String,
storeKey: MapStoreKey
) {
assertThat(mapStorePlugin.getItem(tableName, storeKey), is(Optional.empty[java.util.Map[String, AnyRef]]))
assertThat(mongoDBTracer.finishedSpans(), contains[MockSpan](mongoDBSpan("find")))
}
@Test(
dataProvider = "emptyGetProvider",
groups = Array("MongoDBMapStorePluginIT.empty")
)
def testEmptyGetItems(
tableName: String,
storeKey: MapStoreKey
) {
assertThat(mapStorePlugin.getItems(tableName, storeKey, defaultOptions()), empty[java.util.Map[String, AnyRef]])
assertThat(mongoDBTracer.finishedSpans(), contains[MockSpan](mongoDBSpan("find")))
}
@DataProvider def putItemProvider(): Array[Array[AnyRef]] = {
Array(
Array(hashTableName,
new MapStoreKey.Builder().setHash("myHashField", "myHashValue").build,
Map("key" -> "value"),
new Document(Map("myHashField" -> "myHashValue", "key" -> "value").toMap[String, AnyRef].asJava)),
Array(rangeTableName,
new MapStoreKey.Builder().setHash("myHashField", "myHashValue")
.setRange("myRangeField", range_equalTo[String]("myRangeValue1")).build,
Map("key" -> "value"),
new Document(Map("myHashField" -> "myHashValue", "myRangeField" -> "myRangeValue1", "key" -> "value").toMap[String, AnyRef].asJava)),
// Inserted for getItems tests below
Array(rangeTableName,
new MapStoreKey.Builder().setHash("myHashField", "myHashValue")
.setRange("myRangeField", range_equalTo[String]("myRangeValue2")).build,
Map("key" -> "value"),
new Document(Map("myHashField" -> "myHashValue", "myRangeField" -> "myRangeValue2", "key" -> "value").toMap[String, AnyRef].asJava)),
Array(rangeTableName,
new MapStoreKey.Builder().setHash("myHashField", "myHashValue")
.setRange("myRangeField", range_equalTo[String]("myRangeValue3")).build,
Map("key" -> "value"),
new Document(Map("myHashField" -> "myHashValue", "myRangeField" -> "myRangeValue3", "key" -> "value").toMap[String, AnyRef].asJava)),
Array(rangeTableName,
new MapStoreKey.Builder().setHash("myHashField", "myHashValue")
.setRange("myRangeField", range_equalTo[String]("myRangeValue4")).build,
Map("key" -> "value"),
new Document(Map("myHashField" -> "myHashValue", "myRangeField" -> "myRangeValue4", "key" -> "value").toMap[String, AnyRef].asJava))
)
}
@Test(
dataProvider = "putItemProvider",
groups = Array("MongoDBMapStorePluginIT.put"),
dependsOnGroups = Array("MongoDBMapStorePluginIT.empty")
)
def testPutItem(
tableName: String,
storeKey: MapStoreKey,
payload: Map[String, AnyRef],
expectedResult: Document
) {
// The asserts here are technically backwards, but we have to
assertThat(mapStorePlugin.putItem(tableName, storeKey, payload.asJava).asInstanceOf[Document],
is(expectedResult))
assertThat(mapStorePlugin.getItem(tableName, storeKey).asInstanceOf[Optional[Document]],
is(Optional.of(expectedResult)))
assertThat(mongoDBTracer.finishedSpans(),
contains[MockSpan](mongoDBSpan("insert"), mongoDBSpan("find")))
}
@DataProvider def updateItemProvider(): Array[Array[AnyRef]] = {
Array(
Array(Map("numberKey" -> Long.box(1L)),
new Document(Map("myHashField" -> "myHashValue", "myRangeField" -> "myRangeValue1",
"key" -> "value", "numberKey" -> java.lang.Long.valueOf(1L)).toMap[String, AnyRef].asJava)),
Array(Map("numberKey" -> ""),
new Document(Map("myHashField" -> "myHashValue", "myRangeField" -> "myRangeValue1",
"key" -> "value").toMap[String, AnyRef].asJava))
)
}
@Test(
dataProvider = "updateItemProvider",
groups = Array("MongoDBMapStorePluginIT.update"),
dependsOnGroups = Array("MongoDBMapStorePluginIT.put")
)
def testUpdateItem(
payload: Map[String, AnyRef],
expectedResult: Document
) {
val storeKey = new MapStoreKey.Builder().setHash("myHashField", "myHashValue")
.setRange("myRangeField", range_equalTo[String]("myRangeValue1")).build
assertThat(mapStorePlugin.updateItem(rangeTableName, storeKey, payload.asJava).asInstanceOf[Document],
is(expectedResult))
assertThat(mapStorePlugin.getItem(rangeTableName, storeKey).asInstanceOf[Optional[Document]],
is(Optional.of(expectedResult)))
assertThat(mongoDBTracer.finishedSpans(), contains[MockSpan](mongoDBSpan("findandmodify"),
mongoDBSpan("find")))
}
@Test(
groups = Array("MongoDBMapStorePluginIT.get"),
dependsOnGroups = Array("MongoDBMapStorePluginIT.put", "MongoDBMapStorePluginIT.update")
)
def testGetAllItems() {
assertThat(mapStorePlugin.getAllItems(rangeTableName).asInstanceOf[java.util.List[Document]],
// getAllItems isn't ordered on range key
containsInAnyOrder[Document](result(1), result(2), result(3), result(4)))
assertThat(mongoDBTracer.finishedSpans(), contains[MockSpan](mongoDBSpan("find")))
}
@DataProvider def getItemsProvider(): Array[Array[AnyRef]] = {
Array(
Array(new MapStoreKey.Builder().setHash("myHashField", "myHashValue")
.setRange("myRangeField", all()).build,
List(result(1), result(2), result(3), result(4))),
// Case: Between different values
Array(new MapStoreKey.Builder().setHash("myHashField", "myHashValue")
.setRange("myRangeField", between[String]("myRangeValue2", "myRangeValue4")).build,
List(result(2), result(3), result(4))),
Array(new MapStoreKey.Builder().setHash("myHashField", "myHashValue")
.setRange("myRangeField", between[String]("myRangeValue2", "myRangeValue2")).build,
List(result(2))),
Array(new MapStoreKey.Builder().setHash("myHashField", "myHashValue")
.setRange("myRangeField", range_equalTo[String]("myRangeValue1")).build,
List(result(1))),
Array(new MapStoreKey.Builder().setHash("myHashField", "myHashValue")
.setRange("myRangeField", range_greaterThan[String]("myRangeValue2")).build,
List(result(3), result(4))),
Array(new MapStoreKey.Builder().setHash("myHashField", "myHashValue")
.setRange("myRangeField", range_greaterThanOrEqualTo[String]("myRangeValue2")).build,
List(result(2), result(3), result(4))),
Array(new MapStoreKey.Builder().setHash("myHashField", "myHashValue")
.setRange("myRangeField", in[String]("myRangeValue2", "myRangeValue3")).build,
List(result(2), result(3))),
Array(new MapStoreKey.Builder().setHash("myHashField", "myHashValue")
.setRange("myRangeField", range_lessThan[String]("myRangeValue3")).build,
List(result(1), result(2))),
Array(new MapStoreKey.Builder().setHash("myHashField", "myHashValue")
.setRange("myRangeField", range_lessThanOrEqualTo[String]("myRangeValue3")).build,
List(result(1), result(2), result(3)))
)
}
@Test(
dataProvider = "getItemsProvider",
groups = Array("MongoDBMapStorePluginIT.get"),
dependsOnGroups = Array("MongoDBMapStorePluginIT.put", "MongoDBMapStorePluginIT.update")
)
def testGetItems(
storeKey: MapStoreKey,
expectedResults: List[Document]
) {
assertThat(mapStorePlugin.getItems(rangeTableName, storeKey, defaultOptions()).asInstanceOf[java.util.List[Document]],
contains[Document](expectedResults: _*))
assertThat(mongoDBTracer.finishedSpans(), contains[MockSpan](mongoDBSpan("find")))
}
@Test(
groups = Array("MongoDBMapStorePluginIT.delete"),
dependsOnGroups = Array("MongoDBMapStorePluginIT.get")
)
def testDeleteItem() {
val storeKey = new MapStoreKey.Builder().setHash("myHashField", "myHashValue")
.setRange("myRangeField", range_equalTo[String]("myRangeValue4")).build
mapStorePlugin.deleteItem(rangeTableName, storeKey)
assertThat(mapStorePlugin.getAllItems(rangeTableName).asInstanceOf[java.util.List[Document]],
// getAllItems isn't ordered on range key
containsInAnyOrder[Document](result(1), result(2), result(3)))
assertThat(mongoDBTracer.finishedSpans(), contains[MockSpan](mongoDBSpan("delete"), mongoDBSpan("find")))
}
private def result(i: Int): Document = {
new Document(Map("myHashField" -> "myHashValue",
"myRangeField" -> s"myRangeValue$i", "key" -> "value").toMap[String, AnyRef].asJava)
}
}
| spals/appbuilder | mapstore-mongodb-test/src/test/scala/net/spals/appbuilder/mapstore/mongodb/MongoDBMapStorePluginIT.scala | Scala | bsd-3-clause | 11,893 |
/***********************************************************************
* Copyright (c) 2013-2017 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.web.data
import com.typesafe.scalalogging.LazyLogging
import org.geotools.data.DataStoreFinder
import org.locationtech.geomesa.web.core.GeoMesaScalatraServlet
import org.scalatra.{BadRequest, InternalServerError, Ok}
import scala.collection.JavaConversions._
class DataEndpoint extends GeoMesaScalatraServlet with LazyLogging {
override val root: String = "data"
delete("/:catalog/:feature") {
delete()
}
post("/:catalog/:feature/delete") {
delete()
}
def delete(): Unit = {
val fn = params("feature")
try {
val ds = DataStoreFinder.getDataStore(datastoreParams)
if (ds == null) {
BadRequest()
} else {
ds.removeSchema(fn)
ds.dispose()
Ok()
}
} catch {
case e: Exception =>
logger.error(s"Error deleting feature $fn", e)
InternalServerError()
}
}
}
| ronq/geomesa | geomesa-web/geomesa-web-data/src/main/scala/org/locationtech/geomesa/web/data/DataEndpoint.scala | Scala | apache-2.0 | 1,370 |
// UseALibrary.scala
import com.yoururl.libraryname._
new X
| P7h/ScalaPlayground | Atomic Scala/atomic-scala-examples/examples/17_Summary2/UseALibrary.scala | Scala | apache-2.0 | 60 |
package com.tysonjh
import org.scalatest.FunSpec
/**
* Created by tysonjh on 2/7/2014.
*/
class TuplerSpec extends FunSpec {
describe("a Tupler macro should") {
it("make a tuple from a case class") {
case class Cat(name: String, colour: String, age: Int)
implicit val writes: Writes[Cat] = Tupler.tupleWrites[Cat]
val floydCat = Cat("Floyd", "grey", 6)
val result = Tupler.toTupleSeq(floydCat)
assert(result === Seq(("name", "Floyd"), ("colour", "grey"), ("age", "6")))
}
}
}
| tysonjh/tupler | src/test/scala/com/tysonjh/TuplerSpec.scala | Scala | apache-2.0 | 524 |
/* Copyright (C) 2008-2016 University of Massachusetts Amherst.
This file is part of "FACTORIE" (Factor graphs, Imperative, Extensible)
http://factorie.cs.umass.edu, http://github.com/factorie
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
package cc.factorie.app.strings
/**
* Rewritten from http://tartarus.org/martin/PorterStemmer/scala.txt
* for thread-safety and style (but definitely not too pretty yet).
* @author Brian Martin
*/
object PorterStemmer {
val vowels = "aeiou"
val step1aVals = List(("sses", "ss"), ("ies","i"), ("ss","ss"), ("s", ""))
val step1bVals = List(("at", "ate"), ("bl","ble"), ("iz","ize"))
val step2Vals = List(("ational", "ate"),("tional","tion"),("enci","ence"),("anci","ance"),("izer","ize"),("bli","ble"),("alli", "al"), ("entli","ent"),("eli","e"),("ousli","ous"),("ization","ize"),("ation","ate"),("ator","ate"),("alism","al"), ("iveness","ive"),("fulness","ful"),("ousness", "ous"),("aliti", "al"),("iviti","ive"),("biliti", "ble"),("logi", "log"))
val step3Vals = List(("icate", "ic"),("ative",""),("alize","al"),("iciti","ic"),("ical","ic"),("ful",""),("ness",""))
val step4aVals = List(("al",""),("ance",""),("ence",""),("er",""),("ic",""),("able",""),("ible",""),("ant",""),("ement",""), ("ment",""),("ent",""))
val step4bVals = List(("ou",""),("ism",""),("ate",""),("iti",""),("ous",""),("ive",""),("ize",""))
def applySteps(_b: String): String = {
if (_b.size <= 2) return _b
var b = _b
def isConsonant(i: Int): Boolean = {
if (b(i) == 'y') {
if (i == 0) true
else !isConsonant(i-1)
}
else !vowels.contains(b(i))
}
/* m() measures the number of consonant sequences between 0 and j. if c is
a consonant sequence and v a vowel sequence, and <..> indicates arbitrary
presence,
<c><v> gives 0
<c>vc<v> gives 1
<c>vcvc<v> gives 2
<c>vcvcvc<v> gives 3
....
*/
def calcM(s:String): Int = {
if (b.length == 0) return 0
var count = 0
(1 until s.length).foldLeft(isConsonant(0)) {
case (lastIsC, c) =>
val isC = isConsonant(c)
if (isC && !lastIsC) count += 1
isC
}
count
}
/* removing the suffix string, s, does a vowel exist?' */
def vowelInStem(s: String): Boolean = (0 until b.length - s.length).exists(!isConsonant(_))
/* doublec(j) is true <=> j,(j-1) contain a double consonant. */
def doublec(): Boolean = {
var l = b.length - 1
l >= 1 && b(l) == b(l-1) && isConsonant(l)
}
/* cvc(i) is true <=> i-2,i-1,i has the form consonant - vowel - consonant
and also if the second c is not w,x or y. this is used when trying to
restore an e at the end of a short word. e.g.
cav(e), lov(e), hop(e), crim(e), but
snow, box, tray.
*/
def cvc(s:String): Boolean = {
val i = b.length - 1 - s.length
if (i < 2 || !isConsonant(i) || isConsonant(i-1) || !isConsonant(i-2)) false
else if ("wxy".contains(b(i))) false
else true
}
def replacer(orig: String, replace:String, checker: Int => Boolean): Boolean = {
if (b.endsWith(orig)) {
var n = b.dropRight(orig.length)
if (checker(calcM(n)))
b = n + replace
true
}
else false
}
def processSubList( l:List[(String, String)], checker: Int=>Boolean ): Boolean =
l.exists(v => replacer(v._1, v._2, checker))
// step 1a
processSubList(step1aVals, _ >= 0)
// step 1b
if (!replacer("eed", "ee", _ > 0) &&
((vowelInStem("ed") && replacer("ed", "", _>=0) ) || ( vowelInStem("ing") && replacer( "ing", "", _>=0) ) ) &&
(! processSubList(step1bVals, _>=0))) {
if ( doublec() && !"lsz".contains( b.last ) )
b = b.substring( 0, b.length - 1 )
else if (calcM(b) == 1 && cvc(""))
b = b + "e"
}
// step 1c
vowelInStem("y") && replacer("y", "i", _ >= 0)
// step 2
processSubList(step2Vals, _>0 )
// step 3
processSubList(step3Vals, _>0)
// step 4
var res = processSubList(step4aVals, _>1)
if (!res && b.length > 4 && (b(b.length-4) == 's' || b(b.length-4) == 't'))
res = replacer("ion", "", _>1)
if (!res) processSubList(step4bVals, _>1)
// step 5a
replacer("e", "", _>1)
if ( !cvc("e") )
replacer("e", "", _==1)
// step 5b
if (calcM(b) > 1 && doublec() && b.last == 'l') b = b.dropRight(1)
b
}
def apply(s:String): String = applySteps(s)
def main(args: Array[String]): Unit = {
def getOWPL(f: String) = io.Source.fromFile(f).getLines().toSeq.map(_.trim)
if (args.length != 2)
println("Expected arguments are a OWPL file of unstemmed and a OWPL file of properly stemmed words to check against.\n" +
"These are available at http://tartarus.org/martin/PorterStemmer/")
val unstemmed = getOWPL(args(0))
val trueStemmed = getOWPL(args(1))
println("unstemmed.size: " + unstemmed.size)
println("trueStemmed.size: " + trueStemmed.size)
val stemmed = unstemmed.map(apply)
println("stemmed.size: " + stemmed.size)
stemmed.zip(trueStemmed).filter(s => s._1 != s._2).foreach(println(_))
stemmed.zip(trueStemmed).foreach(s => assert(s._1 == s._2, s._1 + " " + s._2))
stemmed.zip(trueStemmed).take(20).foreach(s => println("sample: " + s._1 + " " + s._2))
}
}
| Craigacp/factorie | src/main/scala/cc/factorie/app/strings/PorterStemmer.scala | Scala | apache-2.0 | 5,964 |
package com.mesosphere.cosmos.model
case class ListRequest(
packageName: Option[String] = None,
appId: Option[AppId] = None
)
| movicha/cosmos | cosmos-model/src/main/scala/com/mesosphere/cosmos/model/ListRequest.scala | Scala | apache-2.0 | 131 |
package com.themillhousegroup.edn.test
import org.specs2.mutable.Specification
trait StreamChecking {
this: Specification =>
def valueStreamMustHave[T](stream: Stream[T], items: T*) = {
val s = stream.toSeq
s must haveSize(items.size)
s must containTheSameElementsAs(items)
}
def keyStreamMustHave(stream: Stream[(String, AnyRef)], items: String*) = {
val s = stream.toSeq
s must haveSize(items.size)
s.map { case (k, v) => k } must containTheSameElementsAs(items)
}
def keyValueStreamMustHave(stream: Stream[(String, AnyRef)], items: (String, _)*) = {
val s = stream.toSeq
s must haveSize(items.size)
s must containTheSameElementsAs(items)
}
}
| themillhousegroup/edn-scala | src/test/scala/com/themillhousegroup/edn/test/StreamChecking.scala | Scala | gpl-2.0 | 701 |
/*
* slibexif - Scala library to parse JPEG EXIF data.
* Copyright (C) Niklas Grossmann
*
* This file is part of libexif.
*
* slibexif is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* net.n12n.exif is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser Public License for more details.
*
* You should have received a copy of the GNU Lesser Public License
* along with libexif. If not, see <http://www.gnu.org/licenses/>.
*/
package net.n12n.exif
import ByteOrder._
trait TypedTag[T] extends Tag {
val marker: Int
val name: String
def value(attr: IfdAttribute, order: ByteOrder): T
}
private[exif] class ValueTag[T : TypeConverter](marker: Int, name: String) extends TagImpl(marker, name) with TypedTag[T] {
def value(attr: IfdAttribute, order: ByteOrder): T = {
val genType = implicitly[TypeConverter[T]]
genType.toScala(attr, order).head
}
}
private[exif] class ListTag[T : TypeConverter](marker: Int, name: String) extends TagImpl(marker, name) with TypedTag[List[T]] {
def value(attr: IfdAttribute, order: ByteOrder): List[T] = {
val converter = implicitly[TypeConverter[T]]
converter.toScala(attr, order)
}
}
class ByteTag(marker: Int, name: String) extends ValueTag[ByteSeq](marker, name)
class UndefinedTag(marker: Int, name: String) extends ValueTag[Undefined](marker, name)
class AsciiTag(marker: Int, name: String) extends ValueTag[String](marker, name)
class LongListTag(marker: Int, name: String) extends ListTag[Long](marker, name)
class LongTag(marker: Int, name: String) extends ValueTag[Long](marker, name)
class ShortListTag(marker: Int, name: String) extends ListTag[Int](marker, name)
class ShortTag(marker: Int, name: String) extends ValueTag[Int](marker, name)
class RationalListTag(marker: Int, name: String) extends ListTag[Rational](marker, name)
class RationalTag(marker: Int, name: String) extends ValueTag[Rational](marker, name)
class SignedRationalTag(marker: Int, name: String) extends ValueTag[SignedRational](marker, name)
class NumericListTag(marker: Int, name: String) extends ListTag[Numeric](marker, name)
class NumericTag(marker: Int, name: String) extends ValueTag[Numeric](marker, name)
class UserCommentTag(marker: Int, name: String) extends ValueTag[MultiByteString](marker, name)
| ngrossmann/slibexif | src/main/scala/net/n12n/exif/TypedTag.scala | Scala | gpl-3.0 | 2,615 |
/* Copyright (C) 2008-2014 University of Massachusetts Amherst.
This file is part of "FACTORIE" (Factor graphs, Imperative, Extensible)
http://factorie.cs.umass.edu, http://github.com/factorie
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
package cc.factorie.infer
import cc.factorie.directed._
import cc.factorie.la.{DenseTensor1, Tensor, Outer1Tensor2}
import cc.factorie
import cc.factorie.variable._
import cc.factorie.model.{DotFamily, Family, Factor}
import scala.Some
/** The result of inference: a collection of Marginal objects.
@author Andrew McCallum */
trait Summary {
/** The collection of all univariate Marginals available in this Summary */
def marginals: Iterable[Marginal1]
/** All the variables for which this Summary has a univariate Marginal. */
def variables: Iterable[Var] = marginals.map(_._1)
/** If this Summary has a univariate Marginal for variable v, return it; otherwise return null. */
def marginal(v:Var): Marginal1
/** If this Summary has a Marginal that touches all or a subset of the neighbors of this factor
return the Marginal with the maximally-available subset. */
def marginal(factor:Factor): FactorMarginal
def factorMarginals: Iterable[FactorMarginal]
def logZ: Double
/** If this summary has a univariate Marginal for variable v, return it in an Option; otherwise return None. */
def getMarginal(v:Var): Option[Marginal1] = { val m = marginal(v); if (m eq null) None else Some(m) }
def setToMaximize(implicit d:DiffList): Unit = marginals.foreach(_.setToMaximize(d)) // Note that order may matter here if Marginals are not Marginal1 and overlap with each other!
}
/*
trait IncrementableInfer[-A,-B] extends Infer[A,B] {
override def infer(variables: A, model: B): Option[WarmStartSummary]
}
trait WarmStartSummary {
def updateScores(v: DiscreteVar, t: Tensor): Unit
def updateSummary(): Unit
}
case class ModelWithInference[M](model: M, variables: Seq[DiscreteVar], inference: IncrementableInfer[Seq[DiscreteVar], M])
object InferByDualDecomposition extends Infer[Seq[ModelWithInference[_]],Seq[(Any,Any,DiscreteVar,DiscreteVar)]] {
def infer(variables: Seq[ModelWithInference[_]], model: Seq[(Any, Any, DiscreteVar, DiscreteVar)]) = null
}
*/
/** A Summary that can be used to gather weighted samples into its Marginals. */
// TODO Consider the relationship between this and Accumulator
// TODO Consider removing this
trait IncrementableSummary extends Summary {
def incrementCurrentValues(weight:Double): Unit
}
/** A Summary that contains multiple Marginals of type M, each a marginal distribution over a single variable. */
class Summary1[V<:Var,M<:Marginal1] extends Summary {
protected val _marginals = new scala.collection.mutable.HashMap[V,M]
protected val _factorMarginals = new scala.collection.mutable.HashMap[Factor,FactorMarginal]
def marginals = _marginals.values
def factorMarginals = _factorMarginals.values
def logZ = throw new Error("logZ is not defined")
def marginal(v:Var): M = _marginals(v.asInstanceOf[V]) // We don't actually care that this type check does nothing because only Vs could be added to the HashMap
def marginal(f:Factor): FactorMarginal = _factorMarginals(f)
def +=(marginal:M) = {
val vars = marginal.variables
require(vars.size == 1)
val v = vars.head.asInstanceOf[V]
if (_marginals.contains(v)) throw new Error("Marginal already present for variable "+v)
_marginals(v) = marginal
}
def +=(marginal:FactorMarginal) = {
if (_factorMarginals.contains(marginal.factor)) throw new Error("Marginal already present for factor "+marginal.factor)
_factorMarginals(marginal.factor) = marginal
}
}
/** A Summary containing only one Marginal. */
class SingletonSummary[M<:Marginal1](val marginal:M) extends Summary {
def marginals = Seq(marginal)
def marginal(v:Var): M = if (Seq(v) == marginal.variables) marginal else null.asInstanceOf[M]
def marginal(f:Factor) = null
def logZ = throw new Error("logZ not definable for SingletonSummary")
def factorMarginals = Nil
}
/** A Summary with all its probability on one variable-value Assignment. */
class AssignmentSummary(val assignment:Assignment) extends Summary {
lazy val _marginals = assignment.variables.map(v=> v -> new Marginal1 {
def _1 = v
def setToMaximize(implicit d: DiffList) = v match { case vv:MutableVar => vv.set(assignment(vv)) }
}).toMap
def marginals = _marginals.values
def marginal(v:Var): Marginal1 = _marginals.getOrElse(v, null)
def marginal(f:Factor): FactorMarginal = null.asInstanceOf[FactorMarginal]
override def setToMaximize(implicit d:DiffList): Unit = assignment.setVariables(d)
def logZ = throw new Error("AssignmentSummary does not define logZ")
def factorMarginals = Nil
}
/** A Summary with all its probability on one variable-value Assignment,
and which can also be used for learning because it includes factors. */
class MAPSummary(val mapAssignment: Assignment, factors: Seq[Factor]) extends Summary {
/** The collection of all Marginals available in this Summary */
class SingletonMarginal(val _1: Var) extends Marginal1 {
def setToMaximize(implicit d: DiffList) { _1 match { case v: MutableVar => v.set(mapAssignment(v)) } }
}
val marginals = mapAssignment.variables.map(new SingletonMarginal(_))
def marginal(v: Var) = mapAssignment.get(v) match {
case Some(_) => new SingletonMarginal(v)
case None => null
}
trait FactorMarginalWithScore extends FactorMarginal { val score: Double }
class SingletonFactorMarginal(val factor: Factor) extends FactorMarginalWithScore {
val tensorStatistics = factor.assignmentStatistics(mapAssignment).asInstanceOf[Tensor]
def variables = factor.variables
val score = factor.assignmentScore(mapAssignment)
}
class NonMarginalFactorMarginal(val factor: Factor) extends FactorMarginalWithScore {
val tensorStatistics = new DenseTensor1(1)
def variables = factor.variables
val score = factor.assignmentScore(mapAssignment)
}
def marginal(factor: Factor): FactorMarginalWithScore =
if (factor.isInstanceOf[Family#Factor] && factor.asInstanceOf[Family#Factor].family.isInstanceOf[DotFamily])
new SingletonFactorMarginal(factor)
else new NonMarginalFactorMarginal(factor)
def factorMarginals = factors.map(marginal)
def logZ = factors.map(marginal(_).score).sum
}
/** A summary with a separate Proportions distribution for each of its DiscreteVars */
// TODO Consider renaming FullyFactorizedDiscreteSummary or IndependentDiscreteSummary or PerVariableDiscreteSummary
// TODO Consider making this inherit from Summary1
class DiscreteSummary1[V<:DiscreteVar] extends IncrementableSummary {
def this(vs:Iterable[V]) = { this(); ++=(vs) }
//val variableClass = m.erasure
val _marginals1 = new scala.collection.mutable.HashMap[V,SimpleDiscreteMarginal1[V]]
def marginals = _marginals1.values
override def variables = _marginals1.keys
lazy val variableSet = variables.toSet
def marginal(v1:Var): SimpleDiscreteMarginal1[V] = _marginals1.getOrElse(v1.asInstanceOf[V], null)
def marginal2(vs:Var*): DiscreteMarginal = vs match {
case Seq(v:V @unchecked) => _marginals1(v) // Note, this doesn't actually check for a type match on V, because of erasure, but it shoudn't matter
// case Seq(v:V @unchecked, w:V @unchecked) => new DiscreteMarginal2[V,V](v, w, new DenseTensorProportions2(new Outer1Tensor2(_marginals1(v).proportions,_marginals1(w).proportions), false))
case _ => null
}
def marginal(f:Factor): FactorMarginal = null
def +=(m:SimpleDiscreteMarginal1[V]): Unit = _marginals1(m._1) = m
def +=(v:V): Unit = this += new SimpleDiscreteMarginal1(v, null) // but not yet initialized marginal proportions
def ++=(vs:Iterable[V]): Unit = vs.foreach(+=(_))
//def ++=(ms:Iterable[DiscreteMarginal1[V]]): Unit = ms.foreach(+=(_))
def incrementCurrentValues(weight:Double): Unit = for (m <- marginals) m.incrementCurrentValue(weight)
//def maximize(implicit d:DiffList): Unit = for (m <- marginals) m._1.asInstanceOf[DiscreteVariable].set(m.proportions.maxIndex)
def factorMarginals = Nil
def logZ = throw new Error("DiscreteSummary1 does not define logZ")
}
| hlin117/factorie | src/main/scala/cc/factorie/infer/Summary.scala | Scala | apache-2.0 | 8,721 |
package dotty.tools.dotc.config
import PathResolver.Defaults
class ScalaSettings extends Settings.SettingGroup {
protected def defaultClasspath = sys.env.getOrElse("CLASSPATH", ".")
/** Path related settings.
*/
val bootclasspath = PathSetting("-bootclasspath", "Override location of bootstrap class files.", Defaults.scalaBootClassPath)
val extdirs = PathSetting("-extdirs", "Override location of installed extensions.", Defaults.scalaExtDirs)
val javabootclasspath = PathSetting("-javabootclasspath", "Override java boot classpath.", Defaults.javaBootClassPath)
val javaextdirs = PathSetting("-javaextdirs", "Override java extdirs classpath.", Defaults.javaExtDirs)
val sourcepath = PathSetting("-sourcepath", "Specify location(s) of source files.", "") // Defaults.scalaSourcePath
/** Other settings.
*/
val dependencyfile = StringSetting("-dependencyfile", "file", "Set dependency tracking file.", ".scala_dependencies")
val deprecation = BooleanSetting("-deprecation", "Emit warning and location for usages of deprecated APIs.")
val encoding = StringSetting("-encoding", "encoding", "Specify character encoding used by source files.", Properties.sourceEncoding)
val explaintypes = BooleanSetting("-explaintypes", "Explain type errors in more detail.")
val feature = BooleanSetting("-feature", "Emit warning and location for usages of features that should be imported explicitly.")
val g = ChoiceSetting("-g", "level", "Set level of generated debugging info.", List("none", "source", "line", "vars", "notailcalls"), "vars")
val help = BooleanSetting("-help", "Print a synopsis of standard options")
val nowarn = BooleanSetting("-nowarn", "Generate no warnings.")
val print = BooleanSetting("-print", "Print program with Scala-specific features removed.")
val target = ChoiceSetting("-target", "target", "Target platform for object files. All JVM 1.5 targets are deprecated.",
List("jvm-1.5", "jvm-1.5-fjbg", "jvm-1.5-asm", "jvm-1.6", "jvm-1.7", "jvm-1.8", "msil"),
"jvm-1.8")
val unchecked = BooleanSetting("-unchecked", "Enable additional warnings where generated code depends on assumptions.")
val uniqid = BooleanSetting("-uniqid", "Uniquely tag all identifiers in debugging output.")
val usejavacp = BooleanSetting("-usejavacp", "Utilize the java.class.path in classpath resolution.")
val verbose = BooleanSetting("-verbose", "Output messages about what the compiler is doing.")
val version = BooleanSetting("-version", "Print product version and exit.")
val pageWidth = IntSetting("-pagewidth", "Set page width", 80)
val jvmargs = PrefixSetting("-J<flag>", "-J", "Pass <flag> directly to the runtime system.")
val defines = PrefixSetting("-Dproperty=value", "-D", "Pass -Dproperty=value directly to the runtime system.")
val toolcp = PathSetting("-toolcp", "Add to the runner classpath.", "")
val nobootcp = BooleanSetting("-nobootcp", "Do not use the boot classpath for the scala jars.")
val argfiles = BooleanSetting("@<file>", "A text file containing compiler arguments (options and source files)")
val classpath = PathSetting("-classpath", "Specify where to find user class files.", defaultClasspath) withAbbreviation "-cp"
val d = StringSetting("-d", "directory|jar", "destination for generated classfiles.", ".")
val nospecialization = BooleanSetting("-no-specialization", "Ignore @specialize annotations.")
val language = MultiStringSetting("-language", "feature", "Enable one or more language features.")
/** -X "Advanced" settings
*/
val Xhelp = BooleanSetting("-X", "Print a synopsis of advanced options.")
val assemname = StringSetting("-Xassem-name", "file", "(Requires -target:msil) Name of the output assembly.", "").dependsOn(target, "msil")
val assemrefs = StringSetting("-Xassem-path", "path", "(Requires -target:msil) List of assemblies referenced by the program.", ".").dependsOn(target, "msil")
val assemextdirs = StringSetting("-Xassem-extdirs", "dirs", "(Requires -target:msil) List of directories containing assemblies. default:lib", Defaults.scalaLibDir.path).dependsOn(target, "msil")
val sourcedir = StringSetting("-Xsourcedir", "directory", "(Requires -target:msil) Mirror source folder structure in output directory.", ".").dependsOn(target, "msil")
val checkInit = BooleanSetting("-Xcheckinit", "Wrap field accessors to throw an exception on uninitialized access.")
val noassertions = BooleanSetting("-Xdisable-assertions", "Generate no assertions or assumptions.")
// val elidebelow = IntSetting("-Xelide-below", "Calls to @elidable methods are omitted if method priority is lower than argument",
// elidable.MINIMUM, None, elidable.byName get _)
val noForwarders = BooleanSetting("-Xno-forwarders", "Do not generate static forwarders in mirror classes.")
val genPhaseGraph = StringSetting("-Xgenerate-phase-graph", "file", "Generate the phase graphs (outputs .dot files) to fileX.dot.", "")
val XlogImplicits = BooleanSetting("-Xlog-implicits", "Show more detail on why some implicits are not applicable.")
val XminImplicitSearchDepth = IntSetting("-Xmin-implicit-search-depth", "Set number of levels of implicit searches undertaken before checking for divergence.", 5)
val logImplicitConv = BooleanSetting("-Xlog-implicit-conversions", "Print a message whenever an implicit conversion is inserted.")
val logReflectiveCalls = BooleanSetting("-Xlog-reflective-calls", "Print a message when a reflective method call is generated")
val logFreeTerms = BooleanSetting("-Xlog-free-terms", "Print a message when reification creates a free term.")
val logFreeTypes = BooleanSetting("-Xlog-free-types", "Print a message when reification resorts to generating a free type.")
val maxClassfileName = IntSetting("-Xmax-classfile-name", "Maximum filename length for generated classes", 255, 72 to 255)
val Xmigration = VersionSetting("-Xmigration", "Warn about constructs whose behavior may have changed since version.")
val Xsource = VersionSetting("-Xsource", "Treat compiler input as Scala source for the specified version.")
val Xnojline = BooleanSetting("-Xnojline", "Do not use JLine for editing.")
val Xverify = BooleanSetting("-Xverify", "Verify generic signatures in generated bytecode (asm backend only.)")
val plugin = MultiStringSetting("-Xplugin", "file", "Load one or more plugins from files.")
val disable = MultiStringSetting("-Xplugin-disable", "plugin", "Disable the given plugin(s).")
val showPlugins = BooleanSetting("-Xplugin-list", "Print a synopsis of loaded plugins.")
val require = MultiStringSetting("-Xplugin-require", "plugin", "Abort unless the given plugin(s) are available.")
val pluginsDir = StringSetting("-Xpluginsdir", "path", "Path to search compiler plugins.", Defaults.scalaPluginPath)
val Xprint = PhasesSetting("-Xprint", "Print out program after")
val writeICode = PhasesSetting("-Xprint-icode", "Log internal icode to *.icode files after", "icode")
val Xprintpos = BooleanSetting("-Xprint-pos", "Print tree positions, as offsets.")
val printtypes = BooleanSetting("-Xprint-types", "Print tree types (debugging option).")
val prompt = BooleanSetting("-Xprompt", "Display a prompt after each error (debugging option).")
val resident = BooleanSetting("-Xresident", "Compiler stays resident: read source filenames from standard input.")
val script = StringSetting("-Xscript", "object", "Treat the source file as a script and wrap it in a main method.", "")
val mainClass = StringSetting("-Xmain-class", "path", "Class for manifest's Main-Class entry (only useful with -d <jar>)", "")
val Xshowcls = StringSetting("-Xshow-class", "class", "Show internal representation of class.", "")
val Xshowobj = StringSetting("-Xshow-object", "object", "Show internal representation of object.", "")
val showPhases = BooleanSetting("-Xshow-phases", "Print a synopsis of compiler phases.")
val sourceReader = StringSetting("-Xsource-reader", "classname", "Specify a custom method for reading source files.", "")
val XoldPatmat = BooleanSetting("-Xoldpatmat", "Use the pre-2.10 pattern matcher. Otherwise, the 'virtualizing' pattern matcher is used in 2.10.")
val XnoPatmatAnalysis = BooleanSetting("-Xno-patmat-analysis", "Don't perform exhaustivity/unreachability analysis. Also, ignore @switch annotation.")
val XfullLubs = BooleanSetting("-Xfull-lubs", "Retains pre 2.10 behavior of less aggressive truncation of least upper bounds.")
/** -Y "Private" settings
*/
val overrideObjects = BooleanSetting("-Yoverride-objects", "Allow member objects to be overridden.")
val overrideVars = BooleanSetting("-Yoverride-vars", "Allow vars to be overridden.")
val Yhelp = BooleanSetting("-Y", "Print a synopsis of private options.")
val browse = PhasesSetting("-Ybrowse", "Browse the abstract syntax tree after")
val Ycheck = PhasesSetting("-Ycheck", "Check the tree at the end of")
val YcheckMods = BooleanSetting("-Ycheck-mods", "Check that symbols and their defining trees have modifiers in sync")
val YcheckTypedTrees = BooleanSetting("-YcheckTypedTrees", "Check all constructured typed trees for type correctness")
val Yshow = PhasesSetting("-Yshow", "(Requires -Xshow-class or -Xshow-object) Show after")
val Xcloselim = BooleanSetting("-Yclosure-elim", "Perform closure elimination.")
val Ycompacttrees = BooleanSetting("-Ycompact-trees", "Use compact tree printer when displaying trees.")
val noCompletion = BooleanSetting("-Yno-completion", "Disable tab-completion in the REPL.")
val Xdce = BooleanSetting("-Ydead-code", "Perform dead code elimination.")
val debug = BooleanSetting("-Ydebug", "Increase the quantity of debugging output.")
val debugNames = BooleanSetting("-YdebugNames", "Show name-space indicators when printing names")
val debugTrace = BooleanSetting("-Ydebug-trace", "Trace core operations")
val debugFlags = BooleanSetting("-Ydebug-flags", "Print all flags of definitions")
val debugOwners = BooleanSetting("-Ydebug-owners", "Print all owners of definitions (requires -Yprint-syms)")
//val doc = BooleanSetting ("-Ydoc", "Generate documentation")
val termConflict = ChoiceSetting("-Yresolve-term-conflict", "strategy", "Resolve term conflicts", List("package", "object", "error"), "error")
val inline = BooleanSetting("-Yinline", "Perform inlining when possible.")
val inlineHandlers = BooleanSetting("-Yinline-handlers", "Perform exception handler inlining when possible.")
val YinlinerWarnings = BooleanSetting("-Yinline-warnings", "Emit inlining warnings. (Normally surpressed due to high volume)")
val Xlinearizer = ChoiceSetting("-Ylinearizer", "which", "Linearizer to use", List("normal", "dfs", "rpo", "dump"), "rpo")
val log = PhasesSetting("-Ylog", "Log operations during")
val Ylogcp = BooleanSetting("-Ylog-classpath", "Output information about what classpath is being applied.")
val Ynogenericsig = BooleanSetting("-Yno-generic-signatures", "Suppress generation of generic signatures for Java.")
val noimports = BooleanSetting("-Yno-imports", "Compile without importing scala.*, java.lang.*, or Predef.")
val nopredef = BooleanSetting("-Yno-predef", "Compile without importing Predef.")
val noAdaptedArgs = BooleanSetting("-Yno-adapted-args", "Do not adapt an argument list (either by inserting () or creating a tuple) to match the receiver.")
val selfInAnnots = BooleanSetting("-Yself-in-annots", "Include a \\"self\\" identifier inside of annotations.")
val Xshowtrees = BooleanSetting("-Yshow-trees", "(Requires -Xprint:) Print detailed ASTs in formatted form.")
val XshowtreesCompact = BooleanSetting("-Yshow-trees-compact", "(Requires -Xprint:) Print detailed ASTs in compact form.")
val XshowtreesStringified = BooleanSetting("-Yshow-trees-stringified", "(Requires -Xprint:) Print stringifications along with detailed ASTs.")
val Yshowsyms = BooleanSetting("-Yshow-syms", "Print the AST symbol hierarchy after each phase.")
val Yshowsymkinds = BooleanSetting("-Yshow-symkinds", "Print abbreviated symbol kinds next to symbol names.")
val Yskip = PhasesSetting("-Yskip", "Skip")
val Ygenjavap = StringSetting("-Ygen-javap", "dir", "Generate a parallel output directory of .javap files.", "")
val Ydumpclasses = StringSetting("-Ydump-classes", "dir", "Dump the generated bytecode to .class files (useful for reflective compilation that utilizes in-memory classloaders).", "")
val Ynosqueeze = BooleanSetting("-Yno-squeeze", "Disable creation of compact code in matching.")
val YstopAfter = PhasesSetting("-Ystop-after", "Stop after") withAbbreviation ("-stop") // backward compat
val YstopBefore = PhasesSetting("-Ystop-before", "Stop before") // stop before erasure as long as we have not debugged it fully
val refinementMethodDispatch = ChoiceSetting("-Ystruct-dispatch", "policy", "structural method dispatch policy", List("no-cache", "mono-cache", "poly-cache", "invoke-dynamic"), "poly-cache")
val Yrangepos = BooleanSetting("-Yrangepos", "Use range positions for syntax trees.")
val Ybuilderdebug = ChoiceSetting("-Ybuilder-debug", "manager", "Compile using the specified build manager.", List("none", "refined", "simple"), "none")
val Yreifycopypaste = BooleanSetting("-Yreify-copypaste", "Dump the reified trees in copypasteable representation.")
val Yreplsync = BooleanSetting("-Yrepl-sync", "Do not use asynchronous code for repl startup")
val YmethodInfer = BooleanSetting("-Yinfer-argument-types", "Infer types for arguments of overriden methods.")
val etaExpandKeepsStar = BooleanSetting("-Yeta-expand-keeps-star", "Eta-expand varargs methods to T* rather than Seq[T]. This is a temporary option to ease transition.")
val Yinvalidate = StringSetting("-Yinvalidate", "classpath-entry", "Invalidate classpath entry before run", "")
val noSelfCheck = BooleanSetting("-Yno-self-type-checks", "Suppress check for self-type conformance among inherited members.")
val YshowSuppressedErrors = BooleanSetting("-Yshow-suppressed-errors", "Also show follow-on errors and warnings that are normally supressed.")
val Yheartbeat = BooleanSetting("-Yheartbeat", "show heartbeat stack trace of compiler operations.")
val Yprintpos = BooleanSetting("-Yprintpos", "show tree positions.")
val YnoDeepSubtypes = BooleanSetting("-Yno-deep-subtypes", "throw an exception on deep subtyping call stacks.")
val YprintSyms = BooleanSetting("-Yprint-syms", "when printing trees print info in symbols instead of corresponding info in trees.")
def stop = YstopAfter
/** Area-specific debug output.
*/
val Ybuildmanagerdebug = BooleanSetting("-Ybuild-manager-debug", "Generate debug information for the Refined Build Manager compiler.")
val Ycompletion = BooleanSetting("-Ycompletion-debug", "Trace all tab completion activity.")
val Ydocdebug = BooleanSetting("-Ydoc-debug", "Trace all scaladoc activity.")
val Yidedebug = BooleanSetting("-Yide-debug", "Generate, validate and output trees using the interactive compiler.")
val Yinferdebug = BooleanSetting("-Yinfer-debug", "Trace type inference and implicit search.")
val Yissuedebug = BooleanSetting("-Yissue-debug", "Print stack traces when a context issues an error.")
val YmacrodebugLite = BooleanSetting("-Ymacro-debug-lite", "Trace essential macro-related activities.")
val YmacrodebugVerbose = BooleanSetting("-Ymacro-debug-verbose", "Trace all macro-related activities: compilation, generation of synthetics, classloading, expansion, exceptions.")
val Ypmatdebug = BooleanSetting("-Ypmat-debug", "Trace all pattern matcher activity.")
val Yposdebug = BooleanSetting("-Ypos-debug", "Trace position validation.")
val Yreifydebug = BooleanSetting("-Yreify-debug", "Trace reification.")
val Yrepldebug = BooleanSetting("-Yrepl-debug", "Trace all repl activity.")
val Ytyperdebug = BooleanSetting("-Ytyper-debug", "Trace all type assignments.")
val Ypatmatdebug = BooleanSetting("-Ypatmat-debug", "Trace pattern matching translation.")
val Yexplainlowlevel = BooleanSetting("-Yexplain-lowlevel", "When explaining type errors, show types at a lower level.")
val YnoDoubleBindings = BooleanSetting("-YnoDoubleBindings", "Assert no namedtype is bound twice (should be enabled only if program is error-free).")
val optimise = BooleanSetting("-optimise", "Generates faster bytecode by applying optimisations to the program") withAbbreviation "-optimize"
/** IDE-specific settings
*/
val YpresentationVerbose = BooleanSetting("-Ypresentation-verbose", "Print information about presentation compiler tasks.")
val YpresentationDebug = BooleanSetting("-Ypresentation-debug", "Enable debugging output for the presentation compiler.")
val YpresentationStrict = BooleanSetting("-Ypresentation-strict", "Do not report type errors in sources with syntax errors.")
val YpresentationLog = StringSetting("-Ypresentation-log", "file", "Log presentation compiler events into file", "")
val YpresentationReplay = StringSetting("-Ypresentation-replay", "file", "Replay presentation compiler events from file", "")
val YpresentationDelay = IntSetting("-Ypresentation-delay", "Wait number of ms after typing before starting typechecking", 0, 0 to 999)
} | AlexSikia/dotty | src/dotty/tools/dotc/config/ScalaSettings.scala | Scala | bsd-3-clause | 17,225 |
package scodec
package codecs
import scalaz.\\/
import scodec.bits.BitVector
class FloatCodecTest extends CodecSuite {
"the float codec" should { "roundtrip" in { forAll { (n: Float) => roundtrip(float, n) } } }
"the floatL codec" should { "roundtrip" in { forAll { (n: Float) => roundtrip(floatL, n) } } }
"the float codecs" should {
"support endianness correctly" in {
forAll { (n: Float) =>
floatL.decodeValidValue(float.encodeValid(n).reverseByteOrder) shouldBe n
float.decodeValidValue(floatL.encodeValid(n).reverseByteOrder) shouldBe n
}
}
"return an error when decoding with too few bits" in {
float.decode(BitVector.low(8)) shouldBe \\/.left(Err.insufficientBits(32, 8))
}
}
}
| danielwegener/scodec | src/test/scala/scodec/codecs/FloatCodecTest.scala | Scala | bsd-3-clause | 747 |
package mvp.volvo.tlv.tlv
import java.nio.ByteBuffer
import scala.collection.mutable.ArrayBuffer
import TlvParser._
case class TlvParser(tag: Int, length: Int, value: Array[Byte],
children: ArrayBuffer[TlvParser] = ArrayBuffer[TlvParser](),
config: TlvConfig = TlvConfig()) {
parse()
private def parse(): Unit = {
var i = 0
val endIndex = length - 2 * config.size
while (i < endIndex) {
val tag = getNext
val length = getNext
try {
val tlv = new TlvParser(tag, length, nextValue(i, length, value, config))
children += tlv
i += tlv.length
} catch {
case e: Throwable => throw new TlvException(s"Incorrect tlv structure: $this", value, e)
}
}
def getNext: Int = {
try {
val n = next(i, value, config)
i += config.size
n
} catch {
case e: IndexOutOfBoundsException =>
throw new TlvException(s"Cannot get data by offset: $i, tlv structure: $this", value, e)
}
}
}
override def toString: String = {
import spray.json._
import scala.collection.JavaConverters._
val source = "{" +
" \\"tag\\":" + tag +
", \\"length\\":" + length +
", \\"value\\":\\"" + value.map("%02X" format _).mkString(" ") + "\\"" +
", \\"children\\":" + children.toList.asJava +
"}"
source.parseJson.prettyPrint
}
}
object TlvParser {
def apply(valueHex: String): TlvParser = {
try {
val data = valueHex
.replaceAll(" ", "")
.replaceAll("\\n", "")
.replaceAll("\\t", "")
.trim
.sliding(2, 2).toArray.map(v => Integer.parseInt(v, 16)).map(_.toByte)
TlvParser(data, TlvConfig())
} catch {
case e: NumberFormatException => throw new TlvException("Not valid HEX data", Array.empty, e)
}
}
def apply(value: Array[Byte], config: TlvConfig): TlvParser = {
try {
val tag = next(0, value, config)
val length = next(config.size, value, config)
val v = nextValue(2 * config.size, length, value, config)
new TlvParser(tag, length, v, config = config)
} catch {
case e: TlvException => throw e
case e: Throwable => throw new TlvException(s"Incorrect tlv structure in top", value, e)
}
}
private def next(offset: Int, value: Array[Byte], config: TlvConfig) = {
ByteBuffer.wrap(value, offset, config.size).order(config.order).getShort()
}
private def nextValue(offset: Int, length: Int, value: Array[Byte], config: TlvConfig) : Array[Byte] = {
val nextValue = new Array[Byte](length)
System.arraycopy(value, offset, nextValue, 0, length)
nextValue
}
} | vaaction/tlv-parser | src/main/scala/mvp/volvo/tlv/tlv/TlvParser.scala | Scala | gpl-3.0 | 2,697 |
package com.textteaser.summarizer
import opennlp.tools.sentdetect._
import com.google.inject.Inject
import com.google.common.base.{CharMatcher, Splitter}
import scala.collection.JavaConverters
class Parser @Inject() (sentenceDetector: SentenceDetectorME, stopWordList: StopWords, config: Config) {
val ideal = config.words.ideal
lazy val stopWords = stopWordList.stopWords
/*
* Sentence Length: Computed using this formula
* (ideal - Math.abs(ideal - words.size)) / ideal
*/
def sentenceLength(sentence: Array[String]) = 1 - (Math.abs(ideal - sentence.size) / ideal.toDouble)
/*
* Split Words: Split words via white space and new lines. Then remove whites space in the resulting array.
*/
def splitWords(source: String) = JavaConverters.iterableAsScalaIterableConverter(
Splitter.on("""[^\\w]""".r.pattern)
.trimResults().omitEmptyStrings()
.split(source)).asScala.toArray
def titleScore(titleWords: Array[String], sentence: Array[String]) =
sentence.count(w => !stopWords.contains(w) && titleWords.contains(w)) / titleWords.size.toDouble
def getKeywords(text: String): KeywordList = {
val keyWords = splitWords(text)
val sizeWithRepeatingWords = keyWords.length
KeywordList(
keyWords.filterNot(w => stopWords.contains(w))
.groupBy(w => w)
.map(w => ArticleKeyword(w._1, w._2.length))
.toList.sortBy(-_.count),
sizeWithRepeatingWords)
}
def splitSentences(source: String) = sentenceDetector.sentDetect(source)
def sentencePosition(ctr: Int, sentenceCount: Double) = {
val normalized = ctr / sentenceCount
if(normalized > 1.0)
0d
else if (normalized > 0.9)
0.15
else if (normalized > 0.8)
0.04
else if (normalized > 0.7)
0.04
else if (normalized > 0.6)
0.06
else if (normalized > 0.5)
0.04
else if (normalized > 0.4)
0.05
else if (normalized > 0.3)
0.08
else if (normalized > 0.2)
0.14
else if (normalized > 0.1)
0.23
else if (normalized > 0)
0.17
0d
}
}
case class ArticleKeyword(word: String, count: Int)
case class KeywordList(keywords: List[ArticleKeyword], wordCount: Int)
| ahmadassaf/Text-Teaser | src/main/scala/com/textteaser/summarizer/Parser.scala | Scala | mit | 2,210 |
/*
* Copyright (C) 2015 Holmes Team at HUAWEI Noah's Ark Lab.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.spark.streamdm.core
/**
* A Model trait defines the needed operations on any learning Model. It
* provides a method for updating the model.
*/
trait Model extends Serializable {
type T <: Model
/**
* Update the model, depending on the Instance given for training.
*
* @param change the example based on which the Model is updated
* @return the updated Model
*/
def update(change: Example): T
}
| gosubpl/akka-online | src/main/scala/org/apache/spark/streamdm/core/Model.scala | Scala | apache-2.0 | 1,065 |
package es.uvigo.ei.sing.sds
package controller
import play.api.libs.json.Json
import play.api.libs.concurrent.Execution.Implicits.defaultContext
import play.api.mvc._
import entity._
import database._
object KeywordsController extends Controller {
lazy val keywordsDAO = new KeywordsDAO
def get(id: Keyword.ID): Action[AnyContent] =
Action.async(keywordsDAO.get(id) map {
_.map(k => Ok(Json.toJson(k))).getOrElse(NotFound(Json.obj("err" -> "Keyword not found")))
})
}
| agjacome/smart-drug-search | src/main/scala/controller/KeywordsController.scala | Scala | mit | 493 |
/**
* Copyright (C) 2019 Orbeon, Inc.
*
* This program is free software; you can redistribute it and/or modify it under the terms of the
* GNU Lesser General Public License as published by the Free Software Foundation; either version
* 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* The full text of the license is available at http://www.gnu.org/copyleft/lesser.html
*/
package org.orbeon.oxf.util
import enumeratum.EnumEntry.Lowercase
import enumeratum.{CirceEnum, Enum, EnumEntry}
import StringUtils._
// NOTE: We place this in a separate module also to help with Circe issues, see:
// https://github.com/circe/circe/issues/639
sealed trait Modifier extends EnumEntry with Lowercase
object Modifier extends Enum[Modifier] with CirceEnum[Modifier] {
val values = findValues
case object Shift extends Modifier
case object Ctrl extends Modifier
case object Alt extends Modifier
case object Meta extends Modifier
def parseStringToSet(s: String): Set[Modifier] = {
s.splitTo[Set]() map (_.toLowerCase) map {
case "control" => "ctrl"
case "option" => "alt"
case "command" => "meta"
case other => other
} map
withNameLowercaseOnly
}
}
| orbeon/orbeon-forms | common/shared/src/main/scala/org/orbeon/oxf/util/Modifier.scala | Scala | lgpl-2.1 | 1,500 |
/*
* ProbEvidenceBeliefPropagation.scala
* A belief propagation algorithm.
*
* Created By: Brian Ruttenberg ([email protected])
* Creation Date: Jan 15, 2015
*
* Copyright 2013 Avrom J. Pfeffer and Charles River Analytics, Inc.
* See http://www.cra.com or email [email protected] for information.
*
* See http://www.github.com/p2t2/figaro for a copy of the software license.
*/
package com.cra.figaro.algorithm.factored.beliefpropagation
import scala.Option.option2Iterable
import com.cra.figaro.algorithm._
import com.cra.figaro.algorithm.sampling._
import com.cra.figaro.language._
import com.cra.figaro.util._
import annotation.tailrec
import com.cra.figaro.algorithm.OneTimeProbQuery
import com.cra.figaro.algorithm.ProbQueryAlgorithm
import com.cra.figaro.algorithm.factored.factors._
import com.cra.figaro.algorithm.factored.factors.factory._
import com.cra.figaro.algorithm.factored._
import com.cra.figaro.algorithm.sampling.ProbEvidenceSampler
import com.cra.figaro.language.Element
import com.cra.figaro.language.Universe
import com.cra.figaro.algorithm.lazyfactored.LazyValues
import com.cra.figaro.algorithm.lazyfactored.BoundedProbFactor
import scala.collection.mutable.Map
trait ProbEvidenceBeliefPropagation extends ProbabilisticBeliefPropagation with ProbEvidenceAlgorithm {
private def logFcn: (Double => Double) = (semiring: DivideableSemiRing[Double]) match {
case LogSumProductSemiring() => (d: Double) => d
case SumProductSemiring() => (d: Double) => if (d == semiring.zero) Double.NegativeInfinity else math.log(d)
}
private def probFcn: (Double => Double) = (semiring: DivideableSemiRing[Double]) match {
case LogSumProductSemiring() => (d: Double) => if (d == semiring.zero) 0 else math.exp(d)
case SumProductSemiring() => (d: Double) => d
}
private def entropy(probFactor: Factor[Double], logFactor: Factor[Double]): Double = {
// Even though the variables in each factor are the same, the order of the vars might be different
val logFactorMapping = probFactor.variables.map(v => logFactor.variables.indexOf(v))
def remap(l: List[Int]) = l.zipWithIndex.map(s => (s._1, logFactorMapping(s._2))).sortBy(_._2).unzip._1
val e = (0.0 /: probFactor.getIndices)((c: Double, i: List[Int]) => {
val p = probFcn(probFactor.get(i))
if (p == 0) c else c + p * logFcn(logFactor.get(remap(i)))
})
e
}
/**
* Compute the evidence of the model. Returns the probability of evidence on the model. This assumes that BP
* has already been run on this algorithm instance.
*/
def computedResult(): Double = {
val factorNodes = factorGraph.getNodes.filter(_.isInstanceOf[FactorNode]).toList
val varNodes = factorGraph.getNodes.filter(_.isInstanceOf[VariableNode]).toList
val nonZeroEvidence = factorNodes.exists(p => beliefMap(p).contents.exists(_._2 != Double.NegativeInfinity))
if (nonZeroEvidence) {
val betheEnergy = -1 * factorNodes.map(f => {
entropy(normalize(beliefMap(f)), factorGraph.getFactorForNode(f.asInstanceOf[FactorNode]))
}).sum
val betheEntropy = {
val factorEntropy = -1 * factorNodes.map(f => {
entropy(normalize(beliefMap(f)), normalize(beliefMap(f)))
}).sum
val varEntropy = varNodes.map(v => {
(factorGraph.getNeighbors(v).size - 1) * entropy(normalize(beliefMap(v)), normalize(beliefMap(v)))
}).sum
factorEntropy + varEntropy
}
math.exp(-1 * (betheEnergy - betheEntropy)) / denominator
} else {
0.0
}
}
}
/**
* Trait for One Time BP evidence algorithms.
*/
trait OneTimeProbEvidenceBeliefPropagation extends OneTimeProbabilisticBeliefPropagation with OneTimeProbEvidence with ProbEvidenceBeliefPropagation {
def additionalEvidenceAlgorithm(evidence: List[NamedEvidence[_]]): ProbEvidenceAlgorithm = {
val myIterations = this.iterations
val myResult = computedResult
val myEvidence = evidence
new ProbQueryBeliefPropagation(universe, universe.activeElements: _*)(
List(),
(u: Universe, e: List[NamedEvidence[_]]) => () => ProbEvidenceSampler.computeProbEvidence(10000, e)(u)) with OneTimeProbEvidenceBeliefPropagation with OneTimeProbQuery {
val iterations = myIterations
override val denominator = myResult
override val evidence = myEvidence
}
}
}
object ProbEvidenceBeliefPropagation {
/**
* Creates a One Time belief propagation computer in the current default universe.
*/
def apply(myIterations: Int, evidence: List[NamedEvidence[_]])(implicit universe: Universe) = {
val baseline = new ProbQueryBeliefPropagation(universe, universe.activeElements:_*)(
List(),
(u: Universe, e: List[NamedEvidence[_]]) => () => ProbEvidenceSampler.computeProbEvidence(10000, e)(u))
with OneTimeProbabilisticBeliefPropagation with OneTimeProbQuery with OneTimeProbEvidenceBeliefPropagation { val iterations = myIterations }
baseline.start
baseline.probAdditionalEvidence(evidence)
}
/**
* Use one-time sampling to compute the probability of the given named evidence.
* Takes the conditions and constraints in the model as part of the model definition.
* This method takes care of creating and running the necessary algorithms.
*/
def computeProbEvidence(myIterations: Int, evidence: List[NamedEvidence[_]])(implicit universe: Universe): Double = {
val alg1 = apply(myIterations, List())
alg1.start()
val alg2 = alg1.probAdditionalEvidence(evidence)
alg1.kill()
alg2.start()
val result = alg2.probEvidence
alg2.kill()
result
}
}
| jyuhuan/figaro | Figaro/src/main/scala/com/cra/figaro/algorithm/factored/beliefpropagation/ProbEvidenceBeliefPropagation.scala | Scala | bsd-3-clause | 5,640 |
trait A {
def p: Int
def getP = p
}
trait B extends A {
def p: Int = 22
}
class C extends B {
private def p: Int = 23 // error
}
@main def Test =
C().getP // would crash with a duplicate method error if the private C#p was permitted
| som-snytt/dotty | tests/neg/i7926c.scala | Scala | apache-2.0 | 246 |
package com.github.kczulko.isc.dhcp.model
trait Item
| kczulko/isc-dhcp-leases-parser | src/main/scala/com/github/kczulko/isc/dhcp/model/Item.scala | Scala | apache-2.0 | 54 |
package little_server
object Main {
def main(args:Array[String]){
AsyncServer().start(8091)
//SyncServer().startHttpServer(8091)
}
}
| deathnik/little_server | src/main/scala/little_server/Main.scala | Scala | gpl-3.0 | 147 |
package maliki.interface
import upickle.default._
object Encode {
def apply(n: Node): String = {
write(List(n))
}
def apply(c: Command): String = {
write(List(c))
}
def apply(cms: List[Command]): String = {
write(cms)
}
}
object DecodeCommands {
def apply(s: String): List[Command] = {
read[List[Command]](s)
}
}
object Decode {
def apply(s: String): List[Node] = {
read[List[Node]](s)
}
}
| jamesreinke/Lola-Beta | src/main/scala/interface/Pickle.scala | Scala | mit | 422 |
package org.jetbrains.plugins.scala.lang.transformation
package general
import com.intellij.psi.PsiElement
import org.jetbrains.plugins.scala.lang.psi.api.expr.ScTuple
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaCode._
import org.jetbrains.plugins.scala.project.ProjectContext
/**
* @author Pavel Fatin
*/
class ExpandTupleInstantiation extends AbstractTransformer {
override protected def transformation(implicit project: ProjectContext): PartialFunction[PsiElement, Unit] = {
case e @ ScTuple(exprs) =>
e.replace(code"Tuple${exprs.length}(${@@(exprs)})")
}
}
| JetBrains/intellij-scala | scala/scala-impl/src/org/jetbrains/plugins/scala/lang/transformation/general/ExpandTupleInstantiation.scala | Scala | apache-2.0 | 591 |
package com.socrata.soql.brita
@deprecated("Use AsciiIdentifierFilter instead", "1.1.0")
object LegacyBrita {
def apply(xs: Iterable[String]) = AsciiIdentifierFilter(xs)
def apply(x: String) = AsciiIdentifierFilter(x)
}
| socrata-platform/soql-brita | src/main/scala/com/socrata/soql/brita/LegacyBrita.scala | Scala | apache-2.0 | 225 |
/*
* sFOparserTest.scala
*
*/
package at.logic.gapt.proofs.shlk
import java.io.InputStreamReader
import at.logic.gapt.formats.shlk_parsing.sFOParser
import at.logic.gapt.expr._
import at.logic.gapt.expr.schema._
import at.logic.gapt.proofs.lk._
import org.specs2.execute.Success
import org.specs2.mutable._
class sFOparserTest extends Specification {
sequential
"sFOparser" should {
"parse correctly a FO SLK-proof" in {
val var3 = SchemaAtom( Var( "x3", To ), Nil )
val var4 = SchemaAtom( Var( "x4", To ), Nil )
val ax1 = Axiom( var3 :: Nil, var3 :: Nil )
val ax2 = Axiom( var4 :: Nil, var4 :: Nil )
val negl = NegLeftRule( ax1, var3 )
val proof = OrLeftRule( negl, ax2, var3, var4 )
val A0 = IndexedPredicate( "A", IntZero() )
val i = IntVar( "i" )
val Ai2 = IndexedPredicate( "A", Succ( Succ( i ) ) )
val Ai = IndexedPredicate( "A", Succ( i ) )
val f1 = And( A0, BigAnd( i, Ai, IntZero(), Succ( i ) ) )
val ax11 = Axiom( A0 :: Nil, A0 :: Nil )
val s = new InputStreamReader( getClass.getClassLoader.getResourceAsStream( "sIND.lks" ) )
val map = sFOParser.parseProof( s )
def f = Const( "f", Ti -> Ti )
def h = Const( "h", ( Tindex -> ( Ti -> Ti ) ) )
def g = Const( "g", ( Tindex -> ( Ti -> Ti ) ) )
val k = IntVar( "k" )
val x = foVar( "x" )
val base2 = x
val step2 = foTerm( "f", sTerm( g, Succ( k ), x :: Nil ) :: Nil )
val base1 = sTerm( g, IntZero(), x :: Nil )
val step1 = sTerm( g, Succ( k ), x :: Nil )
dbTRS.clear
dbTRS.add( g, Tuple2( base1, base2 ), Tuple2( step1, step2 ) )
Success()
}
"parse correctly the journal example" in {
val var3 = SchemaAtom( Var( "x3", To ), Nil )
val var4 = SchemaAtom( Var( "x4", To ), Nil )
val ax1 = Axiom( var3 :: Nil, var3 :: Nil )
val ax2 = Axiom( var4 :: Nil, var4 :: Nil )
val negl = NegLeftRule( ax1, var3 )
val proof = OrLeftRule( negl, ax2, var3, var4 )
val A0 = IndexedPredicate( "A", IntZero() )
val i = IntVar( "i" )
val Ai2 = IndexedPredicate( "A", Succ( Succ( i ) ) )
val Ai = IndexedPredicate( "A", Succ( i ) )
val f1 = And( A0, BigAnd( i, Ai, IntZero(), Succ( i ) ) )
val ax11 = Axiom( A0 :: Nil, A0 :: Nil )
val s = new InputStreamReader( getClass.getClassLoader.getResourceAsStream( "shlk-journal_example.lks" ) )
val map = sFOParser.parseProof( s )
def f = Const( "f", Ti -> Ti )
def h = Const( "h", ( Tindex -> ( Ti -> Ti ) ) )
def g = Const( "g", ( Tindex -> ( Ti -> Ti ) ) )
val k = IntVar( "k" )
val x = foVar( "x" )
val base2 = x
val step2 = foTerm( "f", sTerm( g, Succ( k ), x :: Nil ) :: Nil )
val base1 = sTerm( g, IntZero(), x :: Nil )
val step1 = sTerm( g, Succ( k ), x :: Nil )
dbTRS.clear
dbTRS.add( g, Tuple2( base1, base2 ), Tuple2( step1, step2 ) )
Success()
}
}
}
| loewenheim/gapt | src/test/scala/at/logic/gapt/proofs/shlk/sFOparserTest.scala | Scala | gpl-3.0 | 3,001 |
package com.twitter.finagle.thrift
import org.jboss.netty.buffer.ChannelBuffer
import org.junit.runner.RunWith
import org.mockito.Mockito._
import org.scalatest.FunSuite
import org.scalatest.junit.JUnitRunner
import org.scalatest.mock.MockitoSugar
@RunWith(classOf[JUnitRunner])
class DuplexChannelBufferTransportTest extends FunSuite with MockitoSugar {
class DuplexChannelContext {
val in: ChannelBuffer = mock[ChannelBuffer]
val out: ChannelBuffer = mock[ChannelBuffer]
val t = new DuplexChannelBufferTransport(in, out)
}
val bb = "hello".getBytes
test("DuplexChannelBufferTransport writes to the output ChannelBuffer") {
val c = new DuplexChannelContext
import c._
t.write(bb, 0, 1)
verify(out).writeBytes(bb, 0, 1)
t.write(bb, 1, 2)
verify(out).writeBytes(bb, 1, 2)
t.write(bb, 0, 5)
verify(out).writeBytes(bb, 1, 2)
}
test("DuplexChannelBufferTransport reads from the input ChannelBuffer") {
val c = new DuplexChannelContext
import c._
val nReadable = 5
when(in.readableBytes).thenReturn(nReadable)
val b = new Array[Byte](nReadable)
assert(t.read(b, 0, 10) == nReadable)
assert(t.read(b, 0, 3) == 3)
}
}
| sveinnfannar/finagle | finagle-thrift/src/test/scala/com/twitter/finagle/thrift/DuplexChannelBufferTransportTest.scala | Scala | apache-2.0 | 1,207 |
//
// $Id$
//
// Wiggle - a 2D game development library - http://code.google.com/p/wiggle/
// Copyright 2008-2010 Michael Bayne
// Distributed under the "Simplified BSD License" in LICENSE.txt
package wiggle.gfx
import org.lwjgl.opengl.GL11
import wiggle.util.Mutator
/**
* A visual element, something rendered to the screen.
*/
abstract class Element
{
/** This element's x position. */
var x :Float = 0
/** This element's y position. */
var y :Float = 0
/** This element's horizontal scale. */
var xscale :Float = 1
/** This element's vertical scale. */
var yscale :Float = 1
/** This element's orientation, in degrees (blame OpenGL). */
var orient :Float = 0
/** This element's orientation in radians. */
def orientR :Float = math.Pi.toFloat * orient / 180f
/** Returns an option on this element's parent. */
def parent :Option[Group] = _parent
/** Returns a mutator for our x position. */
def xM = new Mutator {
override protected def apply = x
override protected def update (value :Float) {
x = value
}
}
/** Returns a mutator for our y position. */
def yM = new Mutator {
override protected def apply = y
override protected def update (value :Float) {
y = value
}
}
/** Returns a mutator for our x scale. */
def xscaleM = new Mutator {
override protected def apply = xscale
override protected def update (value :Float) {
xscale = value
}
}
/** Returns a mutator for our y scale. */
def yscaleM = new Mutator {
override protected def apply = yscale
override protected def update (value :Float) {
yscale = value
}
}
/** Returns a mutator for our orientation. */
def orientM = new Mutator {
override protected def apply = orient
override protected def update (value :Float) {
orient = value
}
}
/** Positions this element at the specified coordinates. */
def move (nx :Float, ny :Float) {
x = nx
y = ny
}
/** Sets up our transforms and renders this element. */
def render (rend :Renderer, time :Float) {
GL11.glPushMatrix
if (x != 0 || y != 0) {
GL11.glTranslatef(x, y, 0f)
}
if (orient != 0) {
GL11.glRotatef(orient, 0f, 0f, 1f)
}
// TODO: scale
try {
renderElement(rend, time)
} finally {
GL11.glPopMatrix
}
}
/** Called once the transforms are set up to render this element. */
protected def renderElement (rend :Renderer, time :Float)
/** Called by {@link Group} when we are added to or removed from it. */
private[gfx] def setParent (parent :Option[Group]) {
_parent match {
case Some(parent) => parent.remove(this)
case None => // noop
}
_parent = parent
}
/** A reference to our parent in the display hierarchy. */
private[this] var _parent :Option[Group] = None
}
| zdevzee/wiggle | src/main/scala/wiggle/gfx/Element.scala | Scala | bsd-3-clause | 2,863 |
// See LICENSE for license details.
package sifive.blocks.devices.chiplink
import Chisel.{defaultCompileOptions => _, _}
import freechips.rocketchip.util.CompileOptions.NotStrictInferReset
import freechips.rocketchip.tilelink._
class SinkE(info: ChipLinkInfo) extends Module
{
val io = new Bundle {
val e = Decoupled(new TLBundleE(info.edgeIn.bundle)).flip
val q = Decoupled(new DataLayer(info.params))
// Find the sink from D
val d_tlSink = Valid(UInt(width = info.params.sinkBits))
val d_clSink = UInt(INPUT, width = info.params.clSinkBits)
}
io.d_tlSink.valid := io.e.fire()
io.d_tlSink.bits := io.e.bits.sink
val header = info.encode(
format = UInt(4),
opcode = UInt(0),
param = UInt(0),
size = UInt(0),
domain = UInt(0),
source = io.d_clSink)
io.e.ready := io.q.ready
io.q.valid := io.e.valid
io.q.bits.last := Bool(true)
io.q.bits.data := header
io.q.bits.beats := UInt(1)
}
| sifive/sifive-blocks | src/main/scala/devices/chiplink/SinkE.scala | Scala | apache-2.0 | 955 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.tools
import java.io._
import kafka.message._
import kafka.log._
import kafka.utils._
import collection.mutable
import joptsimple.OptionParser
object DumpLogSegments {
def main(args: Array[String]) {
val parser = new OptionParser
val printOpt = parser.accepts("print-data-log", "if set, printing the messages content when dumping data logs")
val verifyOpt = parser.accepts("verify-index-only", "if set, just verify the index log without printing its content")
val filesOpt = parser.accepts("files", "REQUIRED: The comma separated list of data and index log files to be dumped")
.withRequiredArg
.describedAs("file1, file2, ...")
.ofType(classOf[String])
val maxMessageSizeOpt = parser.accepts("max-message-size", "Size of largest message.")
.withRequiredArg
.describedAs("size")
.ofType(classOf[java.lang.Integer])
.defaultsTo(5 * 1024 * 1024)
val deepIterationOpt = parser.accepts("deep-iteration", "if set, uses deep instead of shallow iteration")
val options = parser.parse(args : _*)
if(!options.has(filesOpt)) {
System.err.println("Missing required argument \"" + filesOpt + "\"")
parser.printHelpOn(System.err)
System.exit(1)
}
val print = if(options.has(printOpt)) true else false
val verifyOnly = if(options.has(verifyOpt)) true else false
val files = options.valueOf(filesOpt).split(",")
val maxMessageSize = options.valueOf(maxMessageSizeOpt).intValue()
val isDeepIteration = if(options.has(deepIterationOpt)) true else false
val misMatchesForIndexFilesMap = new mutable.HashMap[String, List[(Long, Long)]]
val nonConsecutivePairsForLogFilesMap = new mutable.HashMap[String, List[(Long, Long)]]
for(arg <- files) {
val file = new File(arg)
if(file.getName.endsWith(Log.LogFileSuffix)) {
println("Dumping " + file)
dumpLog(file, print, nonConsecutivePairsForLogFilesMap, isDeepIteration)
} else if(file.getName.endsWith(Log.IndexFileSuffix)) {
println("Dumping " + file)
dumpIndex(file, verifyOnly, misMatchesForIndexFilesMap, maxMessageSize)
}
}
misMatchesForIndexFilesMap.foreach {
case (fileName, listOfMismatches) => {
System.err.println("Mismatches in :" + fileName)
listOfMismatches.foreach(m => {
System.err.println(" Index offset: %d, log offset: %d".format(m._1, m._2))
})
}
}
nonConsecutivePairsForLogFilesMap.foreach {
case (fileName, listOfNonSecutivePairs) => {
System.err.println("Non-secutive offsets in :" + fileName)
listOfNonSecutivePairs.foreach(m => {
System.err.println(" %d is followed by %d".format(m._1, m._2))
})
}
}
}
/* print out the contents of the index */
private def dumpIndex(file: File,
verifyOnly: Boolean,
misMatchesForIndexFilesMap: mutable.HashMap[String, List[(Long, Long)]],
maxMessageSize: Int) {
val startOffset = file.getName().split("\\.")(0).toLong
val logFileName = file.getAbsolutePath.split("\\.")(0) + Log.LogFileSuffix
val logFile = new File(logFileName)
val messageSet = new FileMessageSet(logFile)
val index = new OffsetIndex(file = file, baseOffset = startOffset)
for(i <- 0 until index.entries) {
val entry = index.entry(i)
val partialFileMessageSet: FileMessageSet = messageSet.read(entry.position, maxMessageSize)
val messageAndOffset = getIterator(partialFileMessageSet.head, isDeepIteration = true).next()
if(messageAndOffset.offset != entry.offset + index.baseOffset) {
var misMatchesSeq = misMatchesForIndexFilesMap.getOrElse(file.getName, List[(Long, Long)]())
misMatchesSeq ::=(entry.offset + index.baseOffset, messageAndOffset.offset)
misMatchesForIndexFilesMap.put(file.getName, misMatchesSeq)
}
// since it is a sparse file, in the event of a crash there may be many zero entries, stop if we see one
if(entry.offset == 0 && i > 0)
return
if (!verifyOnly)
println("offset: %d position: %d".format(entry.offset + index.baseOffset, entry.position))
}
}
/* print out the contents of the log */
private def dumpLog(file: File,
printContents: Boolean,
nonConsecutivePairsForLogFilesMap: mutable.HashMap[String, List[(Long, Long)]],
isDeepIteration: Boolean) {
val startOffset = file.getName().split("\\.")(0).toLong
println("Starting offset: " + startOffset)
val messageSet = new FileMessageSet(file)
var validBytes = 0L
var lastOffset = -1l
for(shallowMessageAndOffset <- messageSet) { // this only does shallow iteration
val itr = getIterator(shallowMessageAndOffset, isDeepIteration)
for (messageAndOffset <- itr) {
val msg = messageAndOffset.message
if(lastOffset == -1)
lastOffset = messageAndOffset.offset
// If we are iterating uncompressed messages, offsets must be consecutive
else if (msg.compressionCodec == NoCompressionCodec && messageAndOffset.offset != lastOffset +1) {
var nonConsecutivePairsSeq = nonConsecutivePairsForLogFilesMap.getOrElse(file.getName, List[(Long, Long)]())
nonConsecutivePairsSeq ::=(lastOffset, messageAndOffset.offset)
nonConsecutivePairsForLogFilesMap.put(file.getName, nonConsecutivePairsSeq)
}
lastOffset = messageAndOffset.offset
print("offset: " + messageAndOffset.offset + " position: " + validBytes + " isvalid: " + msg.isValid +
" payloadsize: " + msg.payloadSize + " magic: " + msg.magic +
" compresscodec: " + msg.compressionCodec + " crc: " + msg.checksum)
if(msg.hasKey)
print(" keysize: " + msg.keySize)
if(printContents) {
if(msg.hasKey)
print(" key: " + Utils.readString(messageAndOffset.message.key, "UTF-8"))
print(" payload: " + Utils.readString(messageAndOffset.message.payload, "UTF-8"))
}
println()
}
validBytes += MessageSet.entrySize(shallowMessageAndOffset.message)
}
val trailingBytes = messageSet.sizeInBytes - validBytes
if(trailingBytes > 0)
println("Found %d invalid bytes at the end of %s".format(trailingBytes, file.getName))
}
private def getIterator(messageAndOffset: MessageAndOffset, isDeepIteration: Boolean) = {
if (isDeepIteration) {
val message = messageAndOffset.message
message.compressionCodec match {
case NoCompressionCodec =>
getSingleMessageIterator(messageAndOffset)
case _ =>
ByteBufferMessageSet.decompress(message).iterator
}
} else
getSingleMessageIterator(messageAndOffset)
}
private def getSingleMessageIterator(messageAndOffset: MessageAndOffset) = {
new IteratorTemplate[MessageAndOffset] {
var messageIterated = false
override def makeNext(): MessageAndOffset = {
if (!messageIterated) {
messageIterated = true
messageAndOffset
} else
allDone()
}
}
}
}
| archieco/kafka | core/src/main/scala/kafka/tools/DumpLogSegments.scala | Scala | apache-2.0 | 8,207 |
package monocle.function
import monocle.{Iso, Lens}
import scala.annotation.implicitNotFound
/**
* Typeclass that defines a [[Lens]] from an `S` to its fourth element of type `A`
* @tparam S source of [[Lens]]
* @tparam A target of [[Lens]], `A` is supposed to be unique for a given `S`
*/
@implicitNotFound("Could not find an instance of Field4[${S},${A}], please check Monocle instance location policy to " +
"find out which import is necessary")
abstract class Field4[S, A] extends Serializable {
def fourth: Lens[S, A]
}
trait Field4Functions {
def fourth[S, A](implicit ev: Field4[S, A]): Lens[S, A] = ev.fourth
}
object Field4 extends Field4Functions {
/** lift an instance of [[Field4]] using an [[Iso]] */
def fromIso[S, A, B](iso: Iso[S, A])(implicit ev: Field4[A, B]): Field4[S, B] = new Field4[S, B] {
val fourth: Lens[S, B] = iso composeLens ev.fourth
}
/************************************************************************************************/
/** Std instances */
/************************************************************************************************/
implicit def tuple4Field4[A1, A2, A3, A4]: Field4[(A1, A2, A3, A4), A4] = new Field4[(A1, A2, A3, A4), A4] {
val fourth = Lens((_: (A1, A2, A3, A4))._4)(a => t => t.copy(_4 = a))
}
implicit def tuple5Field4[A1, A2, A3, A4, A5]: Field4[(A1, A2, A3, A4, A5), A4] = new Field4[(A1, A2, A3, A4, A5), A4] {
val fourth = Lens((_: (A1, A2, A3, A4, A5))._4)(a => t => t.copy(_4 = a))
}
implicit def tuple6Field4[A1, A2, A3, A4, A5, A6]: Field4[(A1, A2, A3, A4, A5, A6), A4] = new Field4[(A1, A2, A3, A4, A5, A6), A4] {
val fourth = Lens((_: (A1, A2, A3, A4, A5, A6))._4)(a => t => t.copy(_4 = a))
}
} | rperry/Monocle | core/shared/src/main/scala/monocle/function/Field4.scala | Scala | mit | 1,814 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.eagle.datastream
import com.typesafe.config.Config
trait StormStreamExecutor[R <: EagleTuple] extends FlatMapper[Seq[AnyRef], R] {
def prepareConfig(config : Config)
def init
def fields : Array[String]
}
trait JavaStormStreamExecutor[R <: EagleTuple] extends FlatMapper[java.util.List[AnyRef], R] {
def prepareConfig(config : Config)
def init
def fields : Array[String]
override def toString() = this.getClass.getSimpleName
}
abstract class StormStreamExecutor1[T0] extends StormStreamExecutor[Tuple1[T0]] {
override def fields = Array("f0")
}
abstract class JavaStormStreamExecutor1[T0] extends JavaStormStreamExecutor[Tuple1[T0]] {
override def fields = Array("f0")
}
abstract class StormStreamExecutor2[T0, T1] extends StormStreamExecutor[Tuple2[T0, T1]] {
override def fields = Array("f0", "f1")
}
abstract class JavaStormStreamExecutor2[T0, T1] extends JavaStormStreamExecutor[Tuple2[T0, T1]] {
override def fields = Array("f0", "f1")
}
abstract class StormStreamExecutor3[T0, T1, T2] extends StormStreamExecutor[Tuple3[T0, T1, T2]] {
override def fields = Array("f0", "f1", "f2")
}
abstract class JavaStormStreamExecutor3[T0, T1, T2] extends JavaStormStreamExecutor[Tuple3[T0, T1, T2]] {
override def fields = Array("f0", "f1", "f2")
}
abstract class StormStreamExecutor4[T0, T1, T2, T3] extends StormStreamExecutor[Tuple4[T0, T1, T2, T3]] {
override def fields = Array("f0", "f1", "f2", "f3")
}
abstract class JavaStormStreamExecutor4[T0, T1, T2, T3] extends JavaStormStreamExecutor[Tuple4[T0, T1, T2, T3]] {
override def fields = Array("f0", "f1", "f2", "f3")
} | eBay/Eagle | eagle-core/eagle-data-process/eagle-stream-process-base/src/main/scala/org/apache/eagle/datastream/StormStreamExecutor.scala | Scala | apache-2.0 | 2,436 |
package stdlib
/* If this is taking too much time, then it should be replaced
* by a compile time map.
*/
import byteR._
import exceptions.ICE
import frontend.ASTType
object StandardLibraries {
val packages: List[LibraryPackage] = List(
STDMath,
STDReal,
STDTime, STDTimer)
def apply(names: List[String]): Option[ASTType] = {
val validPackages = packages map (prefixMatch(names, _))
val validFunctions = validPackages map (_.map{
case(name, pack) => pack.apply(name) })
val resultTypes = validFunctions filter (x => x != None && x != Some(None))
if (resultTypes.length == 0) {
None
} else if (resultTypes.length == 1) {
resultTypes(0).get
} else {
throw new ICE("""The standard library identifier %s is ambiguous"""
.format(names.mkString(".")))
}
}
def loadExpressionFor(names: List[String]) = {
val classFor = JVMClassRef.classRefFor("cmlc/lib/" + names.mkString("/"))
List(
new JVMNew(classFor),
new JVMDup(),
new JVMInvokeSpecialMethod(
new JVMMethodRef(classFor, "<init>", List(), JVMVoidPrimitiveType())))
}
private def prefixMatch(names: List[String], pack: LibraryPackage):
Option[(List[String], LibraryPackage)] = {
for (acceptedPrefix <- pack.prefixesAccepted) {
if (names.length > acceptedPrefix.length) {
if (names.take(acceptedPrefix.length) == acceptedPrefix)
return Some(names.drop(acceptedPrefix.length), pack)
}
}
None
}
}
| j-c-w/mlc | src/main/scala/stdlib/StandardLibraries.scala | Scala | gpl-3.0 | 1,523 |
package lila.round
import akka.actor._
import lila.hub.actorApi.game.ChangeFeatured
import lila.hub.actorApi.round.MoveEvent
import lila.socket.Socket.makeMessage
import play.api.libs.iteratee._
import play.api.libs.json._
private final class TvBroadcast extends Actor {
context.system.lilaBus.subscribe(self, 'moveEvent, 'changeFeaturedGame)
override def postStop() {
context.system.lilaBus.unsubscribe(self)
}
private val (enumerator, channel) = Concurrent.broadcast[JsValue]
private var featuredId = none[String]
def receive = {
case TvBroadcast.GetEnumerator => sender ! enumerator
case ChangeFeatured(id, msg) =>
featuredId = id.some
channel push msg
case move: MoveEvent if Some(move.gameId) == featuredId =>
channel push makeMessage("fen", Json.obj(
"fen" -> move.fen,
"lm" -> move.move
))
}
}
object TvBroadcast {
type EnumeratorType = Enumerator[JsValue]
case object GetEnumerator
}
| bjhaid/lila | modules/round/src/main/TvBroadcast.scala | Scala | mit | 979 |
/*
* Twitter Korean Text - Scala library to process Korean text
*
* Copyright 2015 Twitter, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.twitter.penguin.korean.tokenizer
import com.twitter.penguin.korean.tokenizer.KoreanTokenizer.KoreanToken
import com.twitter.penguin.korean.tokenizer.ParsedChunk._
import com.twitter.penguin.korean.util.KoreanDictionaryProvider._
import com.twitter.penguin.korean.util.KoreanPos._
object ParsedChunk {
val suffixes = Set(Suffix, Eomi, Josa, PreEomi)
val preferredBeforeHaVerb = Set(Noun, ProperNoun, VerbPrefix)
}
/**
* A candidate parse for a chunk.
*
* @param posNodes Sequence of KoreanTokens.
* @param words Number of words in this candidate parse.
*/
case class ParsedChunk(posNodes: Seq[KoreanToken], words: Int,
profile: TokenizerProfile = TokenizerProfile.defaultProfile) {
// Using lazy val to cache the score
lazy val score = countTokens * profile.tokenCount +
countUnknowns * profile.unknown +
words * profile.wordCount +
getUnknownCoverage * profile.unknownCoverage +
getFreqScore * profile.freq +
countPos(Unknown) * profile.unknownPosCount +
isExactMatch * profile.exactMatch +
isAllNouns * profile.allNoun +
isPreferredPattern * profile.preferredPattern +
countPos(Determiner) * profile.determinerPosCount +
countPos(Exclamation) * profile.exclamationPosCount +
isInitialPostPosition * profile.initialPostPosition +
isNounHa * profile.haVerb +
hasSpaceOutOfGuide * profile.spaceGuidePenalty
def countUnknowns = this.posNodes.count { p: KoreanToken => p.unknown }
def countTokens = this.posNodes.size
def isInitialPostPosition = if (suffixes.contains(this.posNodes.head.pos)) 1 else 0
def isExactMatch = if (this.posNodes.size == 1) 0 else 1
def hasSpaceOutOfGuide = if (profile.spaceGuide.isEmpty) {
0
} else {
this.posNodes
.filter{p: KoreanToken => !suffixes.contains(p.pos)}
.count {
p: KoreanToken => !profile.spaceGuide.contains(p.offset)
}
}
def isAllNouns = if (this.posNodes.exists(
t => t.pos != Noun && t.pos != ProperNoun)) 1
else 0
def isPreferredPattern = if (
posNodes.size == 2 && profile.preferredPatterns.contains(posNodes.map(_.pos))
) 0
else 1
def isNounHa = if (this.posNodes.size >= 2
&& preferredBeforeHaVerb.contains(this.posNodes.head.pos)
&& this.posNodes(1).pos == Verb
&& this.posNodes(1).text.startsWith("하")) 0
else 1
def posTieBreaker = this.posNodes.map(_.pos.id).sum
def getUnknownCoverage = this.posNodes.foldLeft(0) {
case (sum, p: KoreanToken) => if (p.unknown) sum + p.text.length else sum
}
def getFreqScore = this.posNodes.foldLeft(0f) {
case (output: Float, p: KoreanToken) if p.pos == Noun || p.pos == ProperNoun =>
output + (1f - koreanEntityFreq.getOrElse(p.text, 0f))
case (output: Float, p: KoreanToken) => output + 1.0f
} / this.posNodes.size
def ++(that: ParsedChunk) = {
ParsedChunk(this.posNodes ++ that.posNodes, this.words + that.words, profile)
}
def countPos(pos: KoreanPos) = this.posNodes.count { p: KoreanToken => p.pos == pos }
} | nlpenguin/twitter-korean-text | src/main/scala/com/twitter/penguin/korean/tokenizer/ParsedChunk.scala | Scala | apache-2.0 | 3,726 |
/*
*************************************************************************************
* Copyright 2013 Normation SAS
*************************************************************************************
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* In accordance with the terms of section 7 (7. Additional Terms.) of
* the GNU Affero GPL v3, the copyright holders add the following
* Additional permissions:
* Notwithstanding to the terms of section 5 (5. Conveying Modified Source
* Versions) and 6 (6. Conveying Non-Source Forms.) of the GNU Affero GPL v3
* licence, when you create a Related Module, this Related Module is
* not considered as a part of the work and may be distributed under the
* license agreement of your choice.
* A "Related Module" means a set of sources files including their
* documentation that, without modification of the Source Code, enables
* supplementary functions or services in addition to those offered by
* the Software.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/agpl.html>.
*
*************************************************************************************
*/
package com.normation.rudder.batch
import net.liftweb.actor.LiftActor
import net.liftweb.actor.LAPinger
import net.liftweb.common._
import net.liftweb.util.Helpers
import org.joda.time.DateTime
import com.normation.rudder.domain.logger.ApplicationLogger
// -----------------------------------------------------
// Constants and private objects and classes
// -----------------------------------------------------
//Message to send to the updater manager to start a new update of all dynamic groups
case object StartUpdate
sealed trait UpdaterStates //states into wich the updater process can be
//the process is idle
case object IdleUpdater extends UpdaterStates
//an update is currently running for the given nodes
case class StartProcessing(id:Long, started: DateTime) extends UpdaterStates
//the process gave a result
case class UpdateResult[T](id:Long, start: DateTime, end:DateTime, result: Box[T]) extends UpdaterStates
/**
* An abstract batch scheduler periodically executing some service.
*
* It works with three parts so that it can handle long running I/O
* process:
* - a pinger used as a scheduler
* - a StatusManager which keep the state of the task to do: currently running,
* idle, etc
* - a TaskProcessor which actually execute the task (and which can not be
* able to answer incoming messages for a long time)
*
* Moreover, some initialisation logic is added so that the scheduling
* interval may be configured externally with some guards.
*/
trait AbstractScheduler extends Loggable {
type T
val schedulerMinimumIntervalTime = 1
val schedulerMaximumIntervalTime = 300
def updateInterval: Int // in seconds
val executeTask: Long => Box[T]
def displayName : String
def propertyName : String
// -----------------------------------------------------
// Start batch
// -----------------------------------------------------
logger.trace(s"***** starting [${displayName}] scheduler *****")
(new StatusManager) ! StartUpdate
////////////////////////////////////////////////////////////////
//////////////////// implementation details ////////////////////
////////////////////////////////////////////////////////////////
// -----------------------------------------------------
/*
* Two actor utility class: one that manage the status
* (respond to ping, to status command, etc)
* one that actually process update.
*/
// -----------------------------------------------------
private class StatusManager extends LiftActor {
updateManager =>
val logger = ApplicationLogger
private var updateId = 0L
private var currentState: UpdaterStates = IdleUpdater
private var onePending = false
private var realUpdateInterval = {
if(updateInterval < schedulerMinimumIntervalTime) {
logger.warn(s"Value '${updateInterval}' for ${propertyName} is too small for [${displayName}] scheduler interval, using '${schedulerMinimumIntervalTime}'")
schedulerMinimumIntervalTime
} else {
if(updateInterval > schedulerMaximumIntervalTime) {
logger.warn(s"Value '${updateInterval}' for ${propertyName} is too big for [${displayName}] scheduler interval, using '${schedulerMaximumIntervalTime}'")
schedulerMaximumIntervalTime
} else {
logger.info(s"Starting [${displayName}] scheduler with a period of ${updateInterval} s")
updateInterval
}
}
}
override protected def messageHandler = {
// --------------------------------------------
// Ask for a new process
// --------------------------------------------
case StartUpdate =>
currentState match {
case IdleUpdater =>
logger.debug(s"[${displayName}] Scheduled task starting")
updateId = updateId + 1
TaskProcessor ! StartProcessing(updateId, new DateTime)
case _ : StartProcessing if(!onePending) =>
logger.trace(s"Add a pending task for [${displayName}] scheduler")
onePending = true
case _ =>
logger.warn(s"[${displayName}] Scheduled task NOT started: another task is still processing, ignoring")
}
// --------------------------------------------
// Process a successful update response
// --------------------------------------------
case UpdateResult(id,start,end,result) =>
logger.trace(s"Get result for [${displayName}] scheduler task's id '${id}'")
currentState = IdleUpdater
//if one update is pending, immediatly start one other
//schedule next update, in minutes
LAPinger.schedule(this, StartUpdate, realUpdateInterval*1000)
//log some information
val format = "yyyy/MM/dd HH:mm:ss"
result match {
case e:EmptyBox =>
val error = (e ?~! s"Error when executing [${displayName}] scheduler task started at ${start.toString(format)}, ended at ${end.toString(format)}.")
logger.error(error.messageChain)
case Full(x) =>
val executionTime = end.getMillis() - start.getMillis()
logger.debug(s"[${displayName}] Scheduled task finished in ${executionTime} ms (started at ${start.toString(format)}, finished at ${end.toString(format)})")
if (executionTime >= updateInterval*1000) {
ApplicationLogger.warn(s"[${displayName}] Task frequency is set too low! Last task took ${executionTime} ms but tasks are scheduled every ${updateInterval*1000} ms. Adjust ${propertyName} if this problem persists.")
}
}
// --------------------------------------------
// Unexpected messages
// --------------------------------------------
case x => logger.debug(s"[${displayName}] scheduler don't know how to process message: '${x}'")
}
private[this] object TaskProcessor extends LiftActor {
override protected def messageHandler = {
// --------------------------------------------
// Process a start process
// --------------------------------------------
case StartProcessing(processId, startTime) => {
logger.trace(s"[${displayName}] scheduler: start a new task with id: '${processId}' on date ${startTime}")
try {
val result = executeTask(processId)
if (updateManager!=null)
updateManager ! UpdateResult(processId, startTime, new DateTime, result)
else this ! StartProcessing(processId, startTime)
} catch {
case e:Throwable => e match {
case x:ThreadDeath => throw x
case x:InterruptedException => throw x
case e => logger.error(e)
//updateManager ! UpdateResult(processId,startTime,new DateTime, Failure("Exception caught during update process.",Full(e), Empty))
throw e
}
}
}
// --------------------------------------------
// Unexpected messages
// --------------------------------------------
case x => logger.debug(s"[${displayName}] Don't know how to process message: '${x}'")
}
}
}
} | Kegeruneku/rudder | rudder-core/src/main/scala/com/normation/rudder/batch/AbstractScheduler.scala | Scala | agpl-3.0 | 8,864 |
package org.hibernate.cache.rediscala.utils
import java.util.Properties
import org.hibernate.SessionFactory
import org.hibernate.cache.rediscala.client.HibernateRedisCache
import org.hibernate.cfg.AvailableSettings
import org.hibernate.internal.SessionFactoryImpl
import org.slf4j.LoggerFactory
import redis.RedisClient
/**
* Hibernate-Redis Helper class
*
* @author 배성혁 [email protected]
* @since 2014. 2. 21. 오전 9:34
*/
object HibernateRedisUtil {
implicit val akkaSystem = akka.actor.ActorSystem()
private lazy val log = LoggerFactory.getLogger(getClass)
val DEFAULT_PORT: Int = 6379
val DEFAULT_SENTINEL_PORT: Int = 26379
val DEFAULT_TIMEOUT: Int = 2000
val DEFAULT_DATABASE: Int = 1
val CHARSET: String = "UTF-8"
val EXPIRE_IN_SECONDS: String = "redis.expireInSeconds"
private var cacheProperties: Properties = _
/**
* `HibernateRedisCache` 를 생성합니다.
* @param props Hibernate property 정보
*/
def createCacheClient(props: Properties): HibernateRedisCache = {
log.info("RedisClient 인스턴스를 생성합니다...")
val cachePath = props.getProperty(AvailableSettings.CACHE_PROVIDER_CONFIG, "hibernate-redis.properties")
cacheProperties = loadCacheProperties(cachePath)
if (cacheProperties != null) {
// val expiryInSeconds = Integer.decode(cacheProperties.getProperty("redis.expiryInSeconds", "0"))
val host = cacheProperties.getProperty("redis.host", "localhost")
val port = cacheProperties.getProperty("redis.port", String.valueOf(DEFAULT_PORT)).toInt
// val timeout = cacheProperties.getProperty("redis.timeout", String.valueOf(DEFAULT_TIMEOUT)).toInt
val passwd = cacheProperties.getProperty("redis.password", "")
val database = cacheProperties.getProperty("redis.database", String.valueOf(DEFAULT_DATABASE)).toInt
HibernateRedisCache(RedisClient(host, port, Some(passwd), Some(database)))
} else {
HibernateRedisCache()
}
}
def expireInSeconds(regionName: String): Int = {
val defaultValue = getCacheProperty(EXPIRE_IN_SECONDS, "0").toInt
expireInSeconds(regionName, defaultValue)
}
def expireInSeconds(regionName: String, defaultExpiry: Int): Int = {
if (cacheProperties == null)
defaultExpiry
else
getCacheProperty(EXPIRE_IN_SECONDS + "." + regionName, String.valueOf(defaultExpiry)).toInt
}
def getCacheProperty(name: String, defaultValue: String): String = {
if (cacheProperties == null)
return defaultValue
try {
cacheProperties.getProperty(name, defaultValue)
} catch {
case ignored: Throwable => defaultValue
}
}
def loadCacheProperties(cachePath: String): Properties = {
val cacheProps = new Properties()
try {
log.debug(s"Loading cache properties... cachePath=$cachePath")
val is = getClass.getClassLoader.getResourceAsStream(cachePath)
cacheProps.load(is)
log.debug(s"Load cache properties. cacheProps=$cacheProps")
} catch {
case e: Throwable => log.warn(s"Cache용 환경설정을 로드하는데 실패했습니다. cachePath=$cachePath", e)
}
cacheProps
}
/**
* Returns an increasing unique value based on the System.currentTimeMillis()
* with some additional reserved space for a counter.
*/
def nextTimestamp(): Long = System.currentTimeMillis()
/**
* 엔티티의 cache regions name을 반환합니다.
*/
def getRegionName(sessionFactory: SessionFactory, entityClass: Class[_]): String = {
val p = sessionFactory.asInstanceOf[SessionFactoryImpl].getEntityPersister(entityClass.getName)
if (p != null && p.hasCache)
p.getCacheAccessStrategy.getRegion.getName
else
""
}
}
| debop/hibernate-rediscala | src/main/scala/org/hibernate/cache/rediscala/utils/HibernateRedisUtil.scala | Scala | apache-2.0 | 3,739 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ml.dmlc.mxnet
object Context {
val devtype2str = Map(1 -> "cpu", 2 -> "gpu", 3 -> "cpu_pinned")
val devstr2type = Map("cpu" -> 1, "gpu" -> 2, "cpu_pinned" -> 3)
private var _defaultCtx = new Context("cpu", 0)
def defaultCtx: Context = _defaultCtx
def cpu(deviceId: Int = 0): Context = {
new Context("cpu", deviceId)
}
def gpu(deviceId: Int = 0): Context = {
new Context("gpu", deviceId)
}
implicit def ctx2Array(ctx: Context): Array[Context] = Array(ctx)
}
/**
* Constructing a context.
* @param deviceTypeName {'cpu', 'gpu'} String representing the device type
* @param deviceId (default=0) The device id of the device, needed for GPU
*/
class Context(deviceTypeName: String, val deviceId: Int = 0) extends Serializable {
val deviceTypeid: Int = Context.devstr2type(deviceTypeName)
def this(context: Context) = {
this(context.deviceType, context.deviceId)
}
def withScope[T](body: => T): T = {
val oldDefaultCtx = Context.defaultCtx
Context._defaultCtx = this
try {
body
} finally {
Context._defaultCtx = oldDefaultCtx
}
}
/**
* Return device type of current context.
* @return device_type
*/
def deviceType: String = Context.devtype2str(deviceTypeid)
override def toString: String = {
s"$deviceType($deviceId)"
}
override def equals(other: Any): Boolean = {
if (other != null && other.isInstanceOf[Context]) {
val otherInst = other.asInstanceOf[Context]
otherInst.deviceId == deviceId && otherInst.deviceTypeid == deviceTypeid
} else {
false
}
}
override def hashCode: Int = {
toString.hashCode
}
}
| Mega-DatA-Lab/mxnet | scala-package/core/src/main/scala/ml/dmlc/mxnet/Context.scala | Scala | apache-2.0 | 2,463 |
/*
* Copyright (c) 2002-2018 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.cypher.internal.spi.v2_3
import org.neo4j.cypher.internal.compiler.v2_3.spi.TokenContext
import org.neo4j.kernel.api.Statement
import org.neo4j.kernel.api.exceptions.{LabelNotFoundKernelException, PropertyKeyNotFoundException, RelationshipTypeNotFoundException}
import org.neo4j.kernel.impl.api.operations.KeyReadOperations
abstract class TransactionBoundTokenContext(protected var statement: Statement) extends TokenContext {
def getOptPropertyKeyId(propertyKeyName: String): Option[Int] = {
val propertyId: Int = statement.readOperations().propertyKeyGetForName(propertyKeyName)
if (propertyId == KeyReadOperations.NO_SUCH_PROPERTY_KEY) None
else Some(propertyId)
}
def getPropertyKeyId(propertyKeyName: String) = {
val propertyId: Int = statement.readOperations().propertyKeyGetForName(propertyKeyName)
if (propertyId == KeyReadOperations.NO_SUCH_PROPERTY_KEY)
throw new PropertyKeyNotFoundException("No such property.", null)
propertyId
}
def getPropertyKeyName(propertyKeyId: Int): String = statement.readOperations().propertyKeyGetName(propertyKeyId)
def getLabelId(labelName: String): Int = {
val labelId: Int = statement.readOperations().labelGetForName(labelName)
if (labelId == KeyReadOperations.NO_SUCH_LABEL)
throw new LabelNotFoundKernelException("No such label", null)
labelId
}
def getOptLabelId(labelName: String): Option[Int] = {
val labelId: Int = statement.readOperations().labelGetForName(labelName)
if (labelId == KeyReadOperations.NO_SUCH_LABEL) None
else Some(labelId)
}
def getLabelName(labelId: Int): String = statement.readOperations().labelGetName(labelId)
def getOptRelTypeId(relType: String): Option[Int] = {
val relTypeId: Int = statement.readOperations().relationshipTypeGetForName(relType)
if (relTypeId == KeyReadOperations.NO_SUCH_RELATIONSHIP_TYPE) None
else Some(relTypeId)
}
def getRelTypeId(relType: String): Int = {
val relTypeId: Int = statement.readOperations().relationshipTypeGetForName(relType)
if (relTypeId == KeyReadOperations.NO_SUCH_RELATIONSHIP_TYPE)
throw new RelationshipTypeNotFoundException("No such relationship.", null)
relTypeId
}
def getRelTypeName(id: Int): String = statement.readOperations().relationshipTypeGetName(id)
}
| HuangLS/neo4j | community/cypher/cypher/src/main/scala/org/neo4j/cypher/internal/spi/v2_3/TransactionBoundTokenContext.scala | Scala | apache-2.0 | 3,127 |
/*
* Copyright (C) Lightbend Inc. <https://www.lightbend.com>
*/
// this is copy/pasted from https://github.com/akka/akka/blob/5576c233d063b3ee4cfc05d8e73c614a3dea478d/project/CrossJava.scalas
package playbuild
import java.io.File
import scala.annotation.tailrec
import scala.collection.immutable.ListMap
import sbt._
import sbt.Keys._
import sbt.librarymanagement.SemanticSelector
import sbt.librarymanagement.VersionNumber
/*
* Tools for discovering different Java versions,
* will be in sbt 1.3.0 (https://github.com/sbt/sbt/pull/4139 et al)
* but until that time replicated here
*/
case class JavaVersion(numbers: Vector[Long], vendor: Option[String]) {
def numberStr: String = numbers.mkString(".")
def withVendor(vendor: Option[String]) = copy(vendor = vendor)
def withVendor(vendor: String) = copy(vendor = Option(vendor))
def withNumbers(numbers: Vector[Long]) = copy(numbers = numbers)
override def toString: String = {
vendor.map(_ + "@").getOrElse("") + numberStr
}
}
object JavaVersion {
val specificationVersion: String = sys.props("java.specification.version")
val version: String = sys.props("java.version")
def isJdk8: Boolean =
VersionNumber(specificationVersion).matchesSemVer(SemanticSelector(s"=1.8"))
val isJdk11orHigher: Boolean =
VersionNumber(specificationVersion).matchesSemVer(SemanticSelector(">=11"))
def apply(version: String): JavaVersion = CrossJava.parseJavaVersion(version)
def apply(numbers: Vector[Long], vendor: String): JavaVersion = new JavaVersion(numbers, Option(vendor))
def notOnJdk8[T](values: Seq[T]): Seq[T] = if (isJdk8) Seq.empty[T] else values
def sourceAndTarget(fullJavaHome: Option[File]): Seq[String] = {
if (isJdk8) Nil
else {
val javaHome = fullJavaHome.getOrElse {
sys.error("Unable to identify a Java 8 home to specify the boot classpath")
}
Seq("-source", "8", "-target", "8", "-bootclasspath", s"$javaHome/jre/lib/rt.jar")
}
}
}
object CrossJava {
// parses jabba style version number [email protected]
def parseJavaVersion(version: String): JavaVersion = {
def splitDot(s: String): Vector[Long] =
Option(s) match {
case Some(x) => x.split('.').toVector.filterNot(_ == "").map(_.toLong)
case _ => Vector()
}
def splitAt(s: String): Vector[String] =
Option(s) match {
case Some(x) => x.split('@').toVector
case _ => Vector()
}
splitAt(version) match {
case Vector(vendor, rest) => JavaVersion(splitDot(rest), Option(vendor))
case Vector(rest) => JavaVersion(splitDot(rest), None)
case _ => sys.error(s"Invalid JavaVersion: $version")
}
}
def discoverJavaHomes: ListMap[String, File] = {
ListMap(JavaDiscoverConfig.configs.flatMap { _.javaHomes }.sortWith(versionOrder): _*)
}
sealed trait JavaDiscoverConf {
def javaHomes: Vector[(String, File)]
}
def versionOrder(left: (_, File), right: (_, File)): Boolean =
versionOrder(left._2.getName, right._2.getName)
// Sort version strings, considering 1.8.0 < 1.8.0_45 < 1.8.0_121
@tailrec
def versionOrder(left: String, right: String): Boolean = {
val Pattern = """.*?([0-9]+)(.*)""".r
left match {
case Pattern(leftNumber, leftRest) =>
right match {
case Pattern(rightNumber, rightRest) =>
if (Integer.parseInt(leftNumber) < Integer.parseInt(rightNumber)) true
else if (Integer.parseInt(leftNumber) > Integer.parseInt(rightNumber)) false
else versionOrder(leftRest, rightRest)
case _ =>
false
}
case _ =>
true
}
}
object JavaDiscoverConfig {
private val JavaHomeDir = """(java-|jdk-?|adoptopenjdk-)(1\.)?([0-9]+).*""".r
class LinuxDiscoverConfig(base: File) extends JavaDiscoverConf {
def javaHomes: Vector[(String, File)] =
wrapNull(base.list()).collect {
case dir @ JavaHomeDir(_, m, n) => JavaVersion(nullBlank(m) + n).toString -> (base / dir)
}
}
class MacOsDiscoverConfig extends JavaDiscoverConf {
val base: File = file("/Library") / "Java" / "JavaVirtualMachines"
def javaHomes: Vector[(String, File)] =
wrapNull(base.list()).collect {
case dir @ JavaHomeDir(_, m, n) =>
JavaVersion(nullBlank(m) + n).toString -> (base / dir / "Contents" / "Home")
}
}
class WindowsDiscoverConfig extends JavaDiscoverConf {
val base: File = file("C://Program Files/Java")
def javaHomes: Vector[(String, File)] =
wrapNull(base.list()).collect {
case dir @ JavaHomeDir(_, m, n) => JavaVersion(nullBlank(m) + n).toString -> (base / dir)
}
}
// See https://github.com/shyiko/jabba
class JabbaDiscoverConfig extends JavaDiscoverConf {
val base: File = Path.userHome / ".jabba" / "jdk"
val JavaHomeDir = """([\w\-]+)\@(1\.)?([0-9]+).*""".r
def javaHomes: Vector[(String, File)] =
wrapNull(base.list()).collect {
case dir @ JavaHomeDir(_, m, n) =>
val v = JavaVersion(nullBlank(m) + n).toString
if ((base / dir / "Contents" / "Home").exists) v -> (base / dir / "Contents" / "Home")
else v -> (base / dir)
}
}
class JavaHomeDiscoverConfig extends JavaDiscoverConf {
def javaHomes: Vector[(String, File)] =
sys.env
.get("JAVA_HOME")
.map(new java.io.File(_))
.filter(_.exists())
.flatMap { javaHome =>
val base = javaHome.getParentFile
javaHome.getName match {
case dir @ JavaHomeDir(_, m, n) => Some(JavaVersion(nullBlank(m) + n).toString -> (base / dir))
case _ => None
}
}
.toVector
}
val configs = Vector(
new JabbaDiscoverConfig,
new LinuxDiscoverConfig(file("/usr") / "java"),
new LinuxDiscoverConfig(file("/usr") / "lib" / "jvm"),
new MacOsDiscoverConfig,
new WindowsDiscoverConfig,
new JavaHomeDiscoverConfig
)
}
def nullBlank(s: String): String =
if (s eq null) ""
else s
// expand Java versions to 1-20 to 1.x, and vice versa to accept both "1.8" and "8"
private val oneDot = Map((1L to 20L).toVector.flatMap { i =>
Vector(Vector(i) -> Vector(1L, i), Vector(1L, i) -> Vector(i))
}: _*)
def expandJavaHomes(hs: Map[String, File]): Map[String, File] =
hs.flatMap {
case (k, v) =>
val jv = JavaVersion(k)
if (oneDot.contains(jv.numbers))
Vector(k -> v, jv.withNumbers(oneDot(jv.numbers)).toString -> v)
else Vector(k -> v)
}
def wrapNull(a: Array[String]): Vector[String] =
if (a eq null) Vector()
else a.toVector
}
| marcospereira/playframework | documentation/project/CrossJava.scala | Scala | apache-2.0 | 6,913 |
/*
* ScalaRay - Ray tracer based on pbrt (see http://pbrt.org) written in Scala
* Copyright (C) 2009, 2010, 2011 Jesper de Jong
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.jesperdj.scalaray.scene
import org.jesperdj.scalaray.shape.BoundingBox
import org.jesperdj.scalaray.vecmath._
// Transformed primitive: wraps a primitive with a transform (pbrt 4.1.2)
final class TransformedPrimitive (primitive: Primitive, transform: Transform) extends Primitive {
private val inverse: Transform = transform.inverse
// Bounding box that contains the primitive
val boundingBox: BoundingBox = primitive.boundingBox(transform)
// Bounding box when primitive is transformed
override def boundingBox(tr: Transform): BoundingBox = primitive.boundingBox(tr * transform)
// Compute closest intersection between a ray and this primitive, returns intersection and and distance of intersection along ray
def intersect(ray: Ray): Option[(Intersection, Double)] = primitive.intersect(inverse * ray) map {
case (its, distance) => (transform * its, distance)
}
// Check if a ray intersects this primitive
override def checkIntersect(ray: Ray): Boolean = primitive.checkIntersect(inverse * ray)
override def toString = "TransformedPrimitive(primitive=%s, transform=%s)" format (primitive, transform)
}
| jesperdj/scalaray | src/main/scala/org/jesperdj/scalaray/scene/TransformedPrimitive.scala | Scala | gpl-3.0 | 1,932 |
/* Copyright 2009-2016 EPFL, Lausanne */
import leon.annotation._
import leon.lang._
object Heaps {
/*~~~~~~~~~~~~~~~~~~~~~~~*/
/* Data type definitions */
/*~~~~~~~~~~~~~~~~~~~~~~~*/
private case class Node(rank : BigInt, elem : Int, nodes : Heap)
sealed abstract class Heap
private case class Nodes(head : Node, tail : Heap) extends Heap
private case object Empty extends Heap
sealed abstract class OptInt
case class Some(value : Int) extends OptInt
case object None extends OptInt
/*~~~~~~~~~~~~~~~~~~~~~~~*/
/* Abstraction functions */
/*~~~~~~~~~~~~~~~~~~~~~~~*/
def heapContent(h : Heap) : Set[Int] = h match {
case Empty => Set.empty[Int]
case Nodes(n, ns) => nodeContent(n) ++ heapContent(ns)
}
def nodeContent(n : Node) : Set[Int] = n match {
case Node(_, e, h) => Set(e) ++ heapContent(h)
}
/*~~~~~~~~~~~~~~~~~~~~~~~~*/
/* Helper/local functions */
/*~~~~~~~~~~~~~~~~~~~~~~~~*/
private def reverse(h : Heap) : Heap = reverse0(h, Empty)
private def reverse0(h : Heap, acc : Heap) : Heap = (h match {
case Empty => acc
case Nodes(n, ns) => reverse0(ns, Nodes(n, acc))
}) ensuring(res => heapContent(res) == heapContent(h) ++ heapContent(acc))
private def link(t1 : Node, t2 : Node) = (t1, t2) match {
case (Node(r, e1, ns1), Node(_, e2, ns2)) =>
if(e1 <= e2) {
Node(r + 1, e1, Nodes(t2, ns1))
} else {
Node(r + 1, e2, Nodes(t1, ns2))
}
}
private def insertNode(t : Node, h : Heap) : Heap = (h match {
case Empty => Nodes(t, Empty)
case Nodes(t2, h2) =>
if(t.rank < t2.rank) {
Nodes(t, h)
} else {
insertNode(link(t, t2), h2)
}
}) ensuring(res => heapContent(res) == nodeContent(t) ++ heapContent(h))
private def getMin(h : Heap) : (Node, Heap) = {
require(h != Empty)
h match {
case Nodes(t, Empty) => (t, Empty)
case Nodes(t, ts) =>
val (t0, ts0) = getMin(ts)
if(t.elem < t0.elem) {
(t, ts)
} else {
(t0, Nodes(t, ts0))
}
}
} ensuring(_ match {
case (n,h2) => nodeContent(n) ++ heapContent(h2) == heapContent(h)
})
/*~~~~~~~~~~~~~~~~*/
/* Heap interface */
/*~~~~~~~~~~~~~~~~*/
def empty() : Heap = {
Empty
} ensuring(res => heapContent(res) == Set.empty[Int])
def isEmpty(h : Heap) : Boolean = {
(h == Empty)
} ensuring(res => res == (heapContent(h) == Set.empty[Int]))
def insert(e : Int, h : Heap) : Heap = {
insertNode(Node(0, e, Empty), h)
} ensuring(res => heapContent(res) == heapContent(h) ++ Set(e))
def merge(h1 : Heap, h2 : Heap) : Heap = ((h1,h2) match {
case (_, Empty) => h1
case (Empty, _) => h2
case (Nodes(t1, ts1), Nodes(t2, ts2)) =>
if(t1.rank < t2.rank) {
Nodes(t1, merge(ts1, h2))
} else if(t2.rank < t1.rank) {
Nodes(t2, merge(h1, ts2))
} else {
insertNode(link(t1, t2), merge(ts1, ts2))
}
}) ensuring(res => heapContent(res) == heapContent(h1) ++ heapContent(h2))
def findMin(h : Heap) : OptInt = (h match {
case Empty => None
case Nodes(Node(_, e, _), ns) =>
findMin(ns) match {
case None => Some(e)
case Some(e2) => Some(if(e < e2) e else e2)
}
}) ensuring(_ match {
case None => isEmpty(h)
case Some(v) => heapContent(h).contains(v)
})
def deleteMin(h : Heap) : Heap = (h match {
case Empty => Empty
case ts : Nodes =>
val (Node(_, e, ns1), ns2) = getMin(ts)
merge(reverse(ns1), ns2)
}) ensuring(res => heapContent(res).subsetOf(heapContent(h)))
def sanity0() : Boolean = {
val h0 : Heap = Empty
val h1 = insert(42, h0)
val h2 = insert(72, h1)
val h3 = insert(0, h2)
findMin(h0) == None &&
findMin(h1) == Some(42) &&
findMin(h2) == Some(42) &&
findMin(h3) == Some(0)
}.holds
def sanity1() : Boolean = {
val h0 = insert(42, Empty)
val h1 = insert(0, Empty)
val h2 = merge(h0, h1)
findMin(h2) == Some(0)
}.holds
def sanity3() : Boolean = {
val h0 = insert(42, insert(0, insert(12, Empty)))
val h1 = deleteMin(h0)
findMin(h1) == Some(12)
}.holds
}
| epfl-lara/leon | src/test/resources/regression/verification/purescala/valid/Heaps.scala | Scala | gpl-3.0 | 4,215 |
package looty
package model
//////////////////////////////////////////////////////////////
// Copyright (c) 2013 Ben Jackman, Jeff Gomberg
// All Rights Reserved
// please contact [email protected] or [email protected]
// for licensing inquiries
// Created by bjackman @ 12/14/13 1:04 PM
//////////////////////////////////////////////////////////////
object Elements {
def of[A](a: => A) = new Elements[A] {
val physical : A = a
val fire : A = a
val cold : A = a
val lightning: A = a
val chaos : A = a
}
def mutable[A](a: => A): MutableElements[A] = {
val res = new MutableElements[A]
all.foreach(e => res(e) = a)
res
}
def calculatedWith[A](f: String => A) = new Elements[A] {
def physical: A = f(Elements.physical)
def fire: A = f(Elements.fire)
def cold: A = f(Elements.cold)
def lightning: A = f(Elements.lightning)
def chaos: A = f(Elements.chaos)
}
val physical = "physical"
val fire = "fire"
val cold = "cold"
val lightning = "lightning"
val chaos = "chaos"
def all = List(physical, fire, cold, lightning, chaos)
}
trait Elements[A] {
def physical: A
def fire: A
def cold: A
def lightning: A
def chaos: A
def all = Elements.all.map(this(_))
def apply(name: String): A = name match {
case Elements.physical => physical
case Elements.fire => fire
case Elements.cold => cold
case Elements.lightning => lightning
case Elements.chaos => chaos
}
}
class MutableElements[A] extends Elements[A] with Accessible[String, A] {
private var _physical : A = _
private var _fire : A = _
private var _cold : A = _
private var _lightning: A = _
private var _chaos : A = _
def physical: A = _physical
def fire: A = _fire
def cold: A = _cold
def lightning: A = _lightning
def chaos: A = _chaos
def physical_=(a: A) = _physical = a
def fire_=(a: A) = _fire = a
def cold_=(a: A) = _cold = a
def lightning_=(a: A) = _lightning = a
def chaos_=(a: A) = _chaos = a
def update(name: String, value: A) = name match {
case Elements.physical => _physical = value
case Elements.fire => _fire = value
case Elements.cold => _cold = value
case Elements.lightning => _lightning = value
case Elements.chaos => _chaos = value
}
}
| benjaminjackman/looty | looty/src/main/scala/looty/model/Elements.scala | Scala | gpl-2.0 | 2,326 |
import scala.tools.nsc._
import scala.tools.nsc.interpreter.shell.ReplReporterImpl
import scala.tools.partest.ReplTest
object Test extends ReplTest {
override def extraSettings = "-deprecation"
def code = """
// basics
3+4
def gcd(x: Int, y: Int): Int = {
if (x == 0) y
else if (y == 0) x
else gcd(y%x, x)
}
val five = gcd(15,35)
var x = 1
x = 2
val three = x+1
type anotherint = Int
val four: anotherint = 4
val bogus: anotherint = "hello"
trait PointlessTrait
val (x,y) = (2,3)
println("hello")
// ticket #1513
val t1513 = Array(null)
// overriding toString problem from #1404
class S(override val toString : String)
val fish = new S("fish")
// Test that arrays pretty print nicely.
val arr = Array("What's", "up", "doc?")
// Test that arrays pretty print nicely, even when we give them type Any
val arrInt : Any = Array(1,2,3)
// Test that nested arrays are pretty-printed correctly
val arrArrInt : Any = Array(Array(1, 2), Array(3, 4))
// implicit conversions
case class Foo(n: Int)
case class Bar(n: Int)
implicit def foo2bar(foo: Foo) = Bar(foo.n)
val bar: Bar = Foo(3)
// importing from a previous result
import bar._
val m = n
// stressing the imports mechanism
val one = 1
val one = 1
val one = 1
val one = 1
val one = 1
val one = 1
val one = 1
val one = 1
val one = 1
val one = 1
val one = 1
val one = 1
val one = 1
val one = 1
val one = 1
val one = 1
val one = 1
val one = 1
val one = 1
val one = 1
val x1 = 1
val x2 = 1
val x3 = 1
val x4 = 1
val x5 = 1
val x6 = 1
val x7 = 1
val x8 = 1
val x9 = 1
val x10 = 1
val x11 = 1
val x12 = 1
val x13 = 1
val x14 = 1
val x15 = 1
val x16 = 1
val x17 = 1
val x18 = 1
val x19 = 1
val x20 = 1
val two = one + x5
// handling generic wildcard arrays (#2386)
// It's put here because type feedback is an important part of it.
val xs: Array[_] = Array(1, 2)
xs.size
xs.head
xs filter (_ == 2)
xs map (_ => "abc")
xs map (x => x)
xs map (x => (x, x))
// interior syntax errors should *not* go into multi-line input mode.
// both of the following should abort immediately:
def x => y => z
[1,2,3]
/*
/*
multi-line comment
*/
*/
// multi-line string
"""+ "\\"\\"\\""+ """
hello
there
"""+ "\\"\\"\\""+ """
(1 + // give up early by typing two blank lines
// defining and using quoted names should work (ticket #323)
def `match` = 1
val x = `match`
// multiple classes defined on one line
sealed class Exp; class Fact extends Exp; class Term extends Exp
def f(e: Exp) = e match { // non-exhaustive warning here
case _:Fact => 3
}
"""
def appendix() = {
val settings = new Settings
settings.classpath.value = sys.props("java.class.path")
val interp = new interpreter.IMain(settings, new ReplReporterImpl(settings))
interp.interpret("def plusOne(x: Int) = x + 1")
interp.interpret("plusOne(5)")
interp.reset()
interp.interpret("\\"after reset\\"")
interp.interpret("plusOne(5) // should be undefined now")
}
override def main(args: Array[String]): Unit = {
super.main(args)
appendix()
}
}
| lrytz/scala | test/files/jvm/interpreter.scala | Scala | apache-2.0 | 3,034 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.streaming.dstream
import scala.collection.mutable.ArrayBuffer
import scala.reflect.ClassTag
import org.apache.spark.SparkException
import org.apache.spark.streaming.{Duration, Time}
import org.apache.spark.rdd.RDD
import org.apache.spark.rdd.UnionRDD
private[streaming]
class UnionDStream[T: ClassTag](parents: Array[DStream[T]])
extends DStream[T](parents.head.ssc) {
require(parents.length > 0, "List of DStreams to union is empty")
require(parents.map(_.ssc).distinct.size == 1, "Some of the DStreams have different contexts")
require(parents.map(_.slideDuration).distinct.size == 1,
"Some of the DStreams have different slide durations")
override def dependencies: List[DStream[_]] = parents.toList
override def slideDuration: Duration = parents.head.slideDuration
override def compute(validTime: Time): Option[RDD[T]] = {
val rdds = new ArrayBuffer[RDD[T]]()
parents.map(_.getOrCompute(validTime)).foreach {
case Some(rdd) => rdds += rdd
case None => throw new SparkException("Could not generate RDD from a parent for unifying at" +
s" time $validTime")
}
if (rdds.size > 0) {
Some(new UnionRDD(ssc.sc, rdds))
} else {
None
}
}
}
| chenc10/Spark-PAF | streaming/src/main/scala/org/apache/spark/streaming/dstream/UnionDStream.scala | Scala | apache-2.0 | 2,045 |
/*
* Copyright (c) 2014 Snowplow Analytics Ltd. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
package com.snowplowanalytics
package snowplow
package enrich
package common
package enrichments
package registry
// Java
import java.net.URI
// Maven Artifact
import org.apache.maven.artifact.versioning.DefaultArtifactVersion
// Scalaz
import scalaz._
import Scalaz._
import Validation.FlatMap._
// json4s
import org.json4s.JValue
// Iglu
import iglu.client.{SchemaCriterion, SchemaKey}
import iglu.client.validation.ProcessingMessageMethods._
// Snowplow referer-parser
import com.snowplowanalytics.refererparser.scala.{Parser => RefererParser}
import com.snowplowanalytics.refererparser.scala.Referer
// This project
import utils.{ConversionUtils => CU}
import utils.MapTransformer
import utils.MapTransformer._
import utils.ScalazJson4sUtils
/**
* Companion object. Lets us create a
* RefererParserEnrichment from a JValue
*/
object RefererParserEnrichment extends ParseableEnrichment {
val supportedSchema =
SchemaCriterion("com.snowplowanalytics.snowplow", "referer_parser", "jsonschema", 1, 0)
/**
* Creates a RefererParserEnrichment instance from a JValue.
*
* @param config The referer_parser enrichment JSON
* @param schemaKey The SchemaKey provided for the enrichment
* Must be a supported SchemaKey for this enrichment
* @return a configured RefererParserEnrichment instance
*/
def parse(config: JValue, schemaKey: SchemaKey): ValidatedNelMessage[RefererParserEnrichment] =
isParseable(config, schemaKey).flatMap(conf => {
(for {
param <- ScalazJson4sUtils.extract[List[String]](config, "parameters", "internalDomains")
enrich = RefererParserEnrichment(param)
} yield enrich).toValidationNel
})
}
/**
* Config for a referer_parser enrichment
*
* @param domains List of internal domains
*/
case class RefererParserEnrichment(
domains: List[String]
) extends Enrichment {
val version = new DefaultArtifactVersion("0.1.0")
/**
* A Scalaz Lens to update the term within
* a Referer object.
*/
private val termLens: Lens[Referer, MaybeString] =
Lens.lensu((r, newTerm) => r.copy(term = newTerm), _.term)
/**
* Extract details about the referer (sic).
*
* Uses the referer-parser library.
*
* @param uri The referer URI to extract
* referer details from
* @param pageHost The host of the current
* page (used to determine
* if this is an internal
* referer)
* @return a Tuple3 containing referer medium,
* source and term, all Strings
*/
def extractRefererDetails(uri: URI, pageHost: String): Option[Referer] =
for {
r <- RefererParser.parse(uri, pageHost, domains)
t = r.term.flatMap(t => CU.fixTabsNewlines(t))
} yield termLens.set(r, t)
}
| TimothyKlim/snowplow | 3-enrich/scala-common-enrich/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/RefererParserEnrichment.scala | Scala | apache-2.0 | 3,527 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources.orc
import org.apache.hadoop.io._
import org.apache.orc.mapred.{OrcList, OrcMap, OrcStruct, OrcTimestamp}
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.{SpecificInternalRow, UnsafeArrayData}
import org.apache.spark.sql.catalyst.util._
import org.apache.spark.sql.errors.QueryExecutionErrors
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.UTF8String
/**
* A deserializer to deserialize ORC structs to Spark rows.
*/
class OrcDeserializer(
requiredSchema: StructType,
requestedColIds: Array[Int]) {
private val resultRow = new SpecificInternalRow(requiredSchema.map(_.dataType))
// `fieldWriters(index)` is
// - null if the respective source column is missing, since the output value
// is always null in this case
// - a function that updates target column `index` otherwise.
private val fieldWriters: Array[WritableComparable[_] => Unit] = {
requiredSchema.zipWithIndex
.map { case (f, index) =>
if (requestedColIds(index) == -1) {
null
} else {
val writer = newWriter(f.dataType, new RowUpdater(resultRow))
(value: WritableComparable[_]) => writer(index, value)
}
}.toArray
}
def deserialize(orcStruct: OrcStruct): InternalRow = {
var targetColumnIndex = 0
while (targetColumnIndex < fieldWriters.length) {
if (fieldWriters(targetColumnIndex) != null) {
val value = orcStruct.getFieldValue(requestedColIds(targetColumnIndex))
if (value == null) {
resultRow.setNullAt(targetColumnIndex)
} else {
fieldWriters(targetColumnIndex)(value)
}
}
targetColumnIndex += 1
}
resultRow
}
/**
* Creates a writer to write ORC values to Catalyst data structure at the given ordinal.
*/
private def newWriter(
dataType: DataType, updater: CatalystDataUpdater): (Int, WritableComparable[_]) => Unit =
dataType match {
case NullType => (ordinal, _) =>
updater.setNullAt(ordinal)
case BooleanType => (ordinal, value) =>
updater.setBoolean(ordinal, value.asInstanceOf[BooleanWritable].get)
case ByteType => (ordinal, value) =>
updater.setByte(ordinal, value.asInstanceOf[ByteWritable].get)
case ShortType => (ordinal, value) =>
updater.setShort(ordinal, value.asInstanceOf[ShortWritable].get)
case IntegerType => (ordinal, value) =>
updater.setInt(ordinal, value.asInstanceOf[IntWritable].get)
case LongType => (ordinal, value) =>
updater.setLong(ordinal, value.asInstanceOf[LongWritable].get)
case FloatType => (ordinal, value) =>
updater.setFloat(ordinal, value.asInstanceOf[FloatWritable].get)
case DoubleType => (ordinal, value) =>
updater.setDouble(ordinal, value.asInstanceOf[DoubleWritable].get)
case StringType => (ordinal, value) =>
updater.set(ordinal, UTF8String.fromBytes(value.asInstanceOf[Text].copyBytes))
case BinaryType => (ordinal, value) =>
val binary = value.asInstanceOf[BytesWritable]
val bytes = new Array[Byte](binary.getLength)
System.arraycopy(binary.getBytes, 0, bytes, 0, binary.getLength)
updater.set(ordinal, bytes)
case DateType => (ordinal, value) =>
updater.setInt(ordinal, OrcShimUtils.getGregorianDays(value))
case TimestampType => (ordinal, value) =>
updater.setLong(ordinal, DateTimeUtils.fromJavaTimestamp(value.asInstanceOf[OrcTimestamp]))
case DecimalType.Fixed(precision, scale) => (ordinal, value) =>
val v = OrcShimUtils.getDecimal(value)
v.changePrecision(precision, scale)
updater.set(ordinal, v)
case st: StructType => (ordinal, value) =>
val result = new SpecificInternalRow(st)
val fieldUpdater = new RowUpdater(result)
val fieldConverters = st.map(_.dataType).map { dt =>
newWriter(dt, fieldUpdater)
}.toArray
val orcStruct = value.asInstanceOf[OrcStruct]
var i = 0
while (i < st.length) {
val value = orcStruct.getFieldValue(i)
if (value == null) {
result.setNullAt(i)
} else {
fieldConverters(i)(i, value)
}
i += 1
}
updater.set(ordinal, result)
case ArrayType(elementType, _) => (ordinal, value) =>
val orcArray = value.asInstanceOf[OrcList[WritableComparable[_]]]
val length = orcArray.size()
val result = createArrayData(elementType, length)
val elementUpdater = new ArrayDataUpdater(result)
val elementConverter = newWriter(elementType, elementUpdater)
var i = 0
while (i < length) {
val value = orcArray.get(i)
if (value == null) {
result.setNullAt(i)
} else {
elementConverter(i, value)
}
i += 1
}
updater.set(ordinal, result)
case MapType(keyType, valueType, _) => (ordinal, value) =>
val orcMap = value.asInstanceOf[OrcMap[WritableComparable[_], WritableComparable[_]]]
val length = orcMap.size()
val keyArray = createArrayData(keyType, length)
val keyUpdater = new ArrayDataUpdater(keyArray)
val keyConverter = newWriter(keyType, keyUpdater)
val valueArray = createArrayData(valueType, length)
val valueUpdater = new ArrayDataUpdater(valueArray)
val valueConverter = newWriter(valueType, valueUpdater)
var i = 0
val it = orcMap.entrySet().iterator()
while (it.hasNext) {
val entry = it.next()
keyConverter(i, entry.getKey)
val value = entry.getValue
if (value == null) {
valueArray.setNullAt(i)
} else {
valueConverter(i, value)
}
i += 1
}
// The ORC map will never have null or duplicated map keys, it's safe to create a
// ArrayBasedMapData directly here.
updater.set(ordinal, new ArrayBasedMapData(keyArray, valueArray))
case udt: UserDefinedType[_] => newWriter(udt.sqlType, updater)
case _ =>
throw QueryExecutionErrors.dataTypeUnsupportedYetError(dataType)
}
private def createArrayData(elementType: DataType, length: Int): ArrayData = elementType match {
case BooleanType => UnsafeArrayData.fromPrimitiveArray(new Array[Boolean](length))
case ByteType => UnsafeArrayData.fromPrimitiveArray(new Array[Byte](length))
case ShortType => UnsafeArrayData.fromPrimitiveArray(new Array[Short](length))
case IntegerType => UnsafeArrayData.fromPrimitiveArray(new Array[Int](length))
case LongType => UnsafeArrayData.fromPrimitiveArray(new Array[Long](length))
case FloatType => UnsafeArrayData.fromPrimitiveArray(new Array[Float](length))
case DoubleType => UnsafeArrayData.fromPrimitiveArray(new Array[Double](length))
case _ => new GenericArrayData(new Array[Any](length))
}
/**
* A base interface for updating values inside catalyst data structure like `InternalRow` and
* `ArrayData`.
*/
sealed trait CatalystDataUpdater {
def set(ordinal: Int, value: Any): Unit
def setNullAt(ordinal: Int): Unit = set(ordinal, null)
def setBoolean(ordinal: Int, value: Boolean): Unit = set(ordinal, value)
def setByte(ordinal: Int, value: Byte): Unit = set(ordinal, value)
def setShort(ordinal: Int, value: Short): Unit = set(ordinal, value)
def setInt(ordinal: Int, value: Int): Unit = set(ordinal, value)
def setLong(ordinal: Int, value: Long): Unit = set(ordinal, value)
def setDouble(ordinal: Int, value: Double): Unit = set(ordinal, value)
def setFloat(ordinal: Int, value: Float): Unit = set(ordinal, value)
}
final class RowUpdater(row: InternalRow) extends CatalystDataUpdater {
override def setNullAt(ordinal: Int): Unit = row.setNullAt(ordinal)
override def set(ordinal: Int, value: Any): Unit = row.update(ordinal, value)
override def setBoolean(ordinal: Int, value: Boolean): Unit = row.setBoolean(ordinal, value)
override def setByte(ordinal: Int, value: Byte): Unit = row.setByte(ordinal, value)
override def setShort(ordinal: Int, value: Short): Unit = row.setShort(ordinal, value)
override def setInt(ordinal: Int, value: Int): Unit = row.setInt(ordinal, value)
override def setLong(ordinal: Int, value: Long): Unit = row.setLong(ordinal, value)
override def setDouble(ordinal: Int, value: Double): Unit = row.setDouble(ordinal, value)
override def setFloat(ordinal: Int, value: Float): Unit = row.setFloat(ordinal, value)
}
final class ArrayDataUpdater(array: ArrayData) extends CatalystDataUpdater {
override def setNullAt(ordinal: Int): Unit = array.setNullAt(ordinal)
override def set(ordinal: Int, value: Any): Unit = array.update(ordinal, value)
override def setBoolean(ordinal: Int, value: Boolean): Unit = array.setBoolean(ordinal, value)
override def setByte(ordinal: Int, value: Byte): Unit = array.setByte(ordinal, value)
override def setShort(ordinal: Int, value: Short): Unit = array.setShort(ordinal, value)
override def setInt(ordinal: Int, value: Int): Unit = array.setInt(ordinal, value)
override def setLong(ordinal: Int, value: Long): Unit = array.setLong(ordinal, value)
override def setDouble(ordinal: Int, value: Double): Unit = array.setDouble(ordinal, value)
override def setFloat(ordinal: Int, value: Float): Unit = array.setFloat(ordinal, value)
}
}
| wangmiao1981/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/orc/OrcDeserializer.scala | Scala | apache-2.0 | 10,439 |
// Generated by <a href="http://scalaxb.org/">scalaxb</a>.
package eveapi.xml.account.char.Skills
import scala.concurrent.Future
/**
usage:
val obj = scalaxb.fromXML[eveapi.xml.account.char.Skills.Foo](node)
val document = scalaxb.toXML[eveapi.xml.account.char.Skills.Foo](obj, "foo", eveapi.xml.account.char.Skills.defaultScope)
**/
object `package` extends XMLProtocol { }
trait XMLProtocol extends scalaxb.XMLStandardTypes {
implicit lazy val executionContext = scala.concurrent.ExecutionContext.Implicits.global
val defaultScope = scalaxb.toScope(Some("xs") -> "http://www.w3.org/2001/XMLSchema",
Some("xsi") -> "http://www.w3.org/2001/XMLSchema-instance")
implicit lazy val SkillsEveapiFormat: scalaxb.XMLFormat[eveapi.xml.account.char.Skills.Eveapi] = new DefaultSkillsEveapiFormat {}
implicit lazy val SkillsResultFormat: scalaxb.XMLFormat[eveapi.xml.account.char.Skills.Result] = new DefaultSkillsResultFormat {}
implicit lazy val SkillsRowsetFormat: scalaxb.XMLFormat[eveapi.xml.account.char.Skills.Rowset] = new DefaultSkillsRowsetFormat {}
implicit lazy val SkillsRowFormat: scalaxb.XMLFormat[eveapi.xml.account.char.Skills.Row] = new DefaultSkillsRowFormat {}
trait DefaultSkillsEveapiFormat extends scalaxb.ElemNameParser[eveapi.xml.account.char.Skills.Eveapi] {
val targetNamespace: Option[String] = None
def parser(node: scala.xml.Node, stack: List[scalaxb.ElemName]): Parser[eveapi.xml.account.char.Skills.Eveapi] =
phrase((scalaxb.ElemName(None, "currentTime")) ~
(scalaxb.ElemName(None, "result")) ~
(scalaxb.ElemName(None, "cachedUntil")) ^^
{ case p1 ~ p2 ~ p3 =>
eveapi.xml.account.char.Skills.Eveapi(scalaxb.fromXML[String](p1, scalaxb.ElemName(node) :: stack),
scalaxb.fromXML[eveapi.xml.account.char.Skills.Result](p2, scalaxb.ElemName(node) :: stack),
scalaxb.fromXML[String](p3, scalaxb.ElemName(node) :: stack),
scala.collection.immutable.ListMap(List(
(node \\ "@version").headOption map { x => scalaxb.DataRecord(x, node, scalaxb.fromXML[BigInt](x, scalaxb.ElemName(node) :: stack)) } map { "@version" -> _ }
).flatten[(String, scalaxb.DataRecord[Any])]: _*)) })
override def writesAttribute(__obj: eveapi.xml.account.char.Skills.Eveapi, __scope: scala.xml.NamespaceBinding): scala.xml.MetaData = {
var attr: scala.xml.MetaData = scala.xml.Null
__obj.attributes.toList map {
case ("@version", _) => attr = scala.xml.Attribute(null, "version", __obj.version.toString, attr)
case (key, x) => attr = scala.xml.Attribute((x.namespace map { __scope.getPrefix(_) }).orNull, x.key.orNull, x.value.toString, attr)
}
attr
}
def writesChildNodes(__obj: eveapi.xml.account.char.Skills.Eveapi, __scope: scala.xml.NamespaceBinding): Seq[scala.xml.Node] =
Seq.concat(scalaxb.toXML[String](__obj.currentTime, None, Some("currentTime"), __scope, false),
scalaxb.toXML[eveapi.xml.account.char.Skills.Result](__obj.result, None, Some("result"), __scope, false),
scalaxb.toXML[String](__obj.cachedUntil, None, Some("cachedUntil"), __scope, false))
}
trait DefaultSkillsResultFormat extends scalaxb.ElemNameParser[eveapi.xml.account.char.Skills.Result] {
val targetNamespace: Option[String] = None
def parser(node: scala.xml.Node, stack: List[scalaxb.ElemName]): Parser[eveapi.xml.account.char.Skills.Result] =
phrase((scalaxb.ElemName(None, "freeSkillPoints")) ~
(scalaxb.ElemName(None, "rowset")) ^^
{ case p1 ~ p2 =>
eveapi.xml.account.char.Skills.Result(scalaxb.fromXML[BigInt](p1, scalaxb.ElemName(node) :: stack),
scalaxb.fromXML[eveapi.xml.account.char.Skills.Rowset](p2, scalaxb.ElemName(node) :: stack)) })
def writesChildNodes(__obj: eveapi.xml.account.char.Skills.Result, __scope: scala.xml.NamespaceBinding): Seq[scala.xml.Node] =
Seq.concat(scalaxb.toXML[BigInt](__obj.freeSkillPoints, None, Some("freeSkillPoints"), __scope, false),
scalaxb.toXML[eveapi.xml.account.char.Skills.Rowset](__obj.rowset, None, Some("rowset"), __scope, false))
}
trait DefaultSkillsRowsetFormat extends scalaxb.ElemNameParser[eveapi.xml.account.char.Skills.Rowset] {
val targetNamespace: Option[String] = None
def parser(node: scala.xml.Node, stack: List[scalaxb.ElemName]): Parser[eveapi.xml.account.char.Skills.Rowset] =
phrase(safeRep(scalaxb.ElemName(None, "row")) ^^
{ case p1 =>
eveapi.xml.account.char.Skills.Rowset(p1.toSeq map { scalaxb.fromXML[eveapi.xml.account.char.Skills.Row](_, scalaxb.ElemName(node) :: stack) },
scala.collection.immutable.ListMap(List(
(node \\ "@columns").headOption map { x => scalaxb.DataRecord(x, node, scalaxb.fromXML[String](x, scalaxb.ElemName(node) :: stack)) } map { "@columns" -> _ },
(node \\ "@key").headOption map { x => scalaxb.DataRecord(x, node, scalaxb.fromXML[String](x, scalaxb.ElemName(node) :: stack)) } map { "@key" -> _ },
(node \\ "@name").headOption map { x => scalaxb.DataRecord(x, node, scalaxb.fromXML[String](x, scalaxb.ElemName(node) :: stack)) } map { "@name" -> _ }
).flatten[(String, scalaxb.DataRecord[Any])]: _*)) })
override def writesAttribute(__obj: eveapi.xml.account.char.Skills.Rowset, __scope: scala.xml.NamespaceBinding): scala.xml.MetaData = {
var attr: scala.xml.MetaData = scala.xml.Null
__obj.attributes.toList map {
case ("@columns", _) => attr = scala.xml.Attribute(null, "columns", __obj.columns.toString, attr)
case ("@key", _) => attr = scala.xml.Attribute(null, "key", __obj.key.toString, attr)
case ("@name", _) => attr = scala.xml.Attribute(null, "name", __obj.name.toString, attr)
case (key, x) => attr = scala.xml.Attribute((x.namespace map { __scope.getPrefix(_) }).orNull, x.key.orNull, x.value.toString, attr)
}
attr
}
def writesChildNodes(__obj: eveapi.xml.account.char.Skills.Rowset, __scope: scala.xml.NamespaceBinding): Seq[scala.xml.Node] =
(__obj.row flatMap { scalaxb.toXML[eveapi.xml.account.char.Skills.Row](_, None, Some("row"), __scope, false) })
}
trait DefaultSkillsRowFormat extends scalaxb.XMLFormat[eveapi.xml.account.char.Skills.Row] with scalaxb.CanWriteChildNodes[eveapi.xml.account.char.Skills.Row] {
val targetNamespace: Option[String] = None
import scalaxb.ElemName._
def reads(seq: scala.xml.NodeSeq, stack: List[scalaxb.ElemName]): Either[String, eveapi.xml.account.char.Skills.Row] = seq match {
case node: scala.xml.Node => Right(eveapi.xml.account.char.Skills.Row(scala.collection.immutable.ListMap(List(
(node \\ "@level").headOption map { x => scalaxb.DataRecord(x, node, scalaxb.fromXML[BigInt](x, scalaxb.ElemName(node) :: stack)) } map { "@level" -> _ },
(node \\ "@published").headOption map { x => scalaxb.DataRecord(x, node, scalaxb.fromXML[BigInt](x, scalaxb.ElemName(node) :: stack)) } map { "@published" -> _ },
(node \\ "@skillpoints").headOption map { x => scalaxb.DataRecord(x, node, scalaxb.fromXML[BigInt](x, scalaxb.ElemName(node) :: stack)) } map { "@skillpoints" -> _ },
(node \\ "@typeID").headOption map { x => scalaxb.DataRecord(x, node, scalaxb.fromXML[BigInt](x, scalaxb.ElemName(node) :: stack)) } map { "@typeID" -> _ },
(node \\ "@typeName").headOption map { x => scalaxb.DataRecord(x, node, scalaxb.fromXML[String](x, scalaxb.ElemName(node) :: stack)) } map { "@typeName" -> _ }
).flatten[(String, scalaxb.DataRecord[Any])]: _*)))
case _ => Left("reads failed: seq must be scala.xml.Node")
}
override def writesAttribute(__obj: eveapi.xml.account.char.Skills.Row, __scope: scala.xml.NamespaceBinding): scala.xml.MetaData = {
var attr: scala.xml.MetaData = scala.xml.Null
__obj.attributes.toList map {
case ("@level", _) => attr = scala.xml.Attribute(null, "level", __obj.level.toString, attr)
case ("@published", _) => attr = scala.xml.Attribute(null, "published", __obj.published.toString, attr)
case ("@skillpoints", _) => attr = scala.xml.Attribute(null, "skillpoints", __obj.skillpoints.toString, attr)
case ("@typeID", _) => attr = scala.xml.Attribute(null, "typeID", __obj.typeID.toString, attr)
case ("@typeName", _) => attr = scala.xml.Attribute(null, "typeName", __obj.typeName.toString, attr)
case (key, x) => attr = scala.xml.Attribute((x.namespace map { __scope.getPrefix(_) }).orNull, x.key.orNull, x.value.toString, attr)
}
attr
}
def writesChildNodes(__obj: eveapi.xml.account.char.Skills.Row, __scope: scala.xml.NamespaceBinding): Seq[scala.xml.Node] =
Nil
}
}
| scala-eveapi/eveapi | xml/src/main/scala/eveapi/xml/char/Skills/xmlprotocol.scala | Scala | mit | 8,732 |
package fpinscala.laziness
import org.scalatest.{FunSpec, MustMatchers}
class StreamTest extends FunSpec with MustMatchers {
describe("toList") {
it("returns non-empty list") {
Stream(1, 2, 3, 4).toList mustBe List(1, 2, 3, 4)
}
it("returns empty list") {
Stream().toList mustBe Nil
}
}
describe("take") {
it("returns stream of the first n elements") {
Stream(1, 2, 3, 4).take(0).toList mustBe List()
Stream(1, 2, 3, 4).take(1).toList mustBe List(1)
Stream(1, 2, 3, 4).take(2).toList mustBe List(1, 2)
Stream(1, 2, 3, 4).take(3).toList mustBe List(1, 2, 3)
Stream(1, 2, 3, 4).take(4).toList mustBe List(1, 2, 3, 4)
}
it("returns all stream if n > Stream.length") {
Stream(1, 2).take(4).toList mustBe List(1, 2)
}
it("returns empty stream") {
Stream().take(4) mustBe Empty
}
}
describe("takeViaUnfold") {
it("returns stream of the first n elements") {
Stream(1, 2, 3, 4).takeViaUnfold(0).toList mustBe List()
Stream(1, 2, 3, 4).takeViaUnfold(1).toList mustBe List(1)
Stream(1, 2, 3, 4).takeViaUnfold(2).toList mustBe List(1, 2)
Stream(1, 2, 3, 4).takeViaUnfold(3).toList mustBe List(1, 2, 3)
Stream(1, 2, 3, 4).takeViaUnfold(4).toList mustBe List(1, 2, 3, 4)
}
it("returns all stream if n > Stream.length") {
Stream(1, 2).takeViaUnfold(4).toList mustBe List(1, 2)
}
it("returns empty stream") {
Stream().takeViaUnfold(4) mustBe Empty
}
}
describe("drop") {
it("returns stream without the first n elements") {
Stream(1, 2, 3, 4).drop(2).toList mustBe List(3, 4)
}
it("returns all stream if n > Stream.length") {
Stream(1, 2) drop 4 mustBe Empty
}
it("returns empty stream") {
Stream() drop 4 mustBe Empty
}
}
describe("takeWhile") {
it("returns empty stream if predicate is always false") {
Stream(1, 3, 5, 7).takeWhile(_ < 0).toList mustBe Nil
}
it("returns prefix of the stream while predicate is true") {
Stream(1, 3, 5, 7).takeWhile(_ < 6).toList mustBe List(1, 3, 5)
}
it("returns whole stream if predicate is always true") {
Stream(1, 3, 5, 7).takeWhile(_ < 9).toList mustBe List(1, 3, 5, 7)
}
}
describe("takeWhileViaUnfold") {
it("returns empty stream if predicate is always false") {
Stream(1, 3, 5, 7).takeWhileViaUnfold(_ < 0).toList mustBe Nil
}
it("returns prefix of the stream while predicate is true") {
Stream(1, 3, 5, 7).takeWhileViaUnfold(_ < 6).toList mustBe List(1, 3, 5)
}
it("returns whole stream if predicate is always true") {
Stream(1, 3, 5, 7).takeWhileViaUnfold(_ < 9).toList mustBe List(1, 3, 5, 7)
}
}
describe("takeWhileViaFoldRight") {
it("returns empty stream if predicate is always false") {
Stream(1, 3, 5, 7).takeWhileViaFoldRight(_ < 0).toList mustBe Nil
}
it("returns prefix of the stream while predicate is true") {
Stream(1, 3, 5, 7).takeWhileViaFoldRight(_ < 6).toList mustBe List(1, 3, 5)
}
it("returns whole stream if predicate is always true") {
Stream(1, 3, 5, 7).takeWhileViaFoldRight(_ < 9).toList mustBe List(1, 3, 5, 7)
}
}
describe("headOption") {
it("some head") {
Stream(1, 2, 3).headOption mustBe Some(1)
}
it("none head") {
Empty.headOption mustBe None
}
}
describe("map") {
it("applies function to every element of a stream") {
Stream(1, 2, 3).map(_.toString).toList mustBe List("1", "2", "3")
}
}
describe("mapViaUnfold") {
it("applies function to every element of a stream") {
Stream(1, 2, 3).mapViaUnfold(_.toString).toList mustBe List("1", "2", "3")
}
}
describe("flatMap") {
it("applies function to every element of a stream and flattens results") {
val f: Int => Stream[Int] = e => if (e % 2 == 0) Stream(e, e * 10) else Empty
Stream(1, 2, 3, 4).flatMap(f).toList mustBe List(2, 20, 4, 40)
}
}
describe("filter") {
it("removes element that doesn't satisfy a predicate") {
Stream(1, 2, 3, 4).filter(_ % 2 == 0).toList mustBe List(2, 4)
}
}
describe("append") {
it("appends a stream to the end of the empty stream") {
Empty.append(Stream(1, 2)).toList mustBe List(1, 2)
}
it("appends one stream to the end of another") {
Stream(1, 2).append(Stream(3, 4)).toList mustBe List(1, 2, 3, 4)
}
it("appends empty stream to the end of another") {
Stream(1, 2).append(Empty).toList mustBe List(1, 2)
}
}
describe("constant") {
it("returns infinite stream of constants") {
Stream.constant(5).map(_ - 4).take(5).toList mustBe List(1, 1, 1, 1, 1)
}
}
describe("from") {
it("returns growing row of integers") {
Stream.from(3).take(5).toList mustBe List(3, 4, 5, 6, 7)
}
}
describe("fibs") {
it("returns fibonacci sequence") {
Stream.fibs.take(7).toList mustBe List(0, 1, 1, 2, 3, 5, 8)
}
}
describe("constantViaUnfold") {
it("returns infinite stream of constants") {
Stream.constantViaUnfold(5).map(_ - 4).take(5).toList mustBe List(1, 1, 1, 1, 1)
}
}
describe("fromViaUnfold") {
it("returns growing row of integers") {
Stream.fromViaUnfold(3).take(5).toList mustBe List(3, 4, 5, 6, 7)
}
}
describe("fibsViaUnfold") {
it("returns fibonacci sequence") {
Stream.fibsViaUnfold.take(7).toList mustBe List(0, 1, 1, 2, 3, 5, 8)
}
}
describe("zipWithViaUnfold") {
it("zips streams of equal length") {
Stream(1, 2, 3).zipWithViaUnfold(Stream("a", "b", "c"))(_ + "-" + _).toList mustBe List("1-a", "2-b", "3-c")
}
it("zips with a longer stream") {
Stream(1, 2).zipWithViaUnfold(Stream("a", "b", "c"))(_ + "-" + _).toList mustBe List("1-a", "2-b")
}
it("zips with a shorter stream") {
Stream(1, 2, 3).zipWithViaUnfold(Stream("a", "b"))(_ + "-" + _).toList mustBe List("1-a", "2-b")
}
}
describe("zipAll") {
it("zips streams of equal length") {
Stream(1, 2).zipAll(Stream("a", "b")).toList mustBe List((Some(1), Some("a")), (Some(2), Some("b")))
}
it("zips with a longer stream") {
Stream(1).zipAll(Stream("a", "b")).toList mustBe List((Some(1), Some("a")), (None, Some("b")))
}
it("zips with a shorter stream") {
Stream(1, 2).zipAll(Stream("a")).toList mustBe List((Some(1), Some("a")), (Some(2), None))
}
}
describe("startsWith") {
it("returns true if stream starts with another non-empty stream") {
Stream(1, 2, 3) startsWith Stream(1, 2) mustBe true
}
it("returns true if stream starts with another empty stream") {
Stream(1, 2, 3) startsWith Empty mustBe true
}
it("returns true if empty stream starts with another empty stream") {
Empty startsWith Empty mustBe true
}
it("returns false for an empty stream starts with another non-empty stream") {
Empty startsWith Stream(1) mustBe false
}
it("returns false if stream starts with another non-empty stream") {
Stream(1, 3, 4) startsWith Stream(1, 2) mustBe false
}
}
describe("tails") {
it("returns stream of tails") {
Stream(1, 2, 3, 4).tails.map(_.toList).toList mustBe List(
List(1, 2, 3, 4),
List(2, 3, 4),
List(3, 4),
List(4),
List())
}
it("returns empty stream for empty stream") {
Empty.tails.toList mustBe List(Empty)
}
}
describe("scanRight") {
it("returns list of intermediate results") {
Stream(1, 2, 3).scanRight(0)(_ + _).toList mustBe List(6, 5, 3, 0)
}
}
}
| Unisay/fpinscala | exercises/src/test/scala/fpinscala/laziness/StreamTest.scala | Scala | mit | 7,734 |
/*
* Copyright 2018 Analytics Zoo Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.zoo.pipeline.inference
import java.io._
import java.util.{List => JList}
import com.intel.analytics.bigdl.Module
import com.intel.analytics.bigdl.nn.abstractnn.{AbstractModule, Activity}
import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.utils.Table
import scala.collection.mutable.ArrayBuffer
import scala.reflect.ClassTag
class FloatModel(var model: AbstractModule[Activity, Activity, Float],
var metaModel: AbstractModule[Activity, Activity, Float],
var isOriginal: Boolean)
extends AbstractModel with InferenceSupportive with Serializable {
override def predictNg(input: JList[JTensor]): JList[JTensor] = {
throw new Exception("Not implemented")
}
override def predict(inputs: JList[JList[JTensor]]): JList[JList[JTensor]] = {
val batchSize = inputs.size()
require(batchSize > 0, "inputs size should > 0")
val inputActivity = transferListOfActivityToActivityOfBatch(inputs, batchSize)
val result: Activity = predict(inputActivity)
val outputs = result.isTensor match {
case true =>
val outputTensor = result.toTensor[Float]
transferBatchTensorToJListOfJListOfJTensor(outputTensor, batchSize)
case false =>
val outputTable: Table = result.toTable
transferBatchTableToJListOfJListOfJTensor(outputTable, batchSize)
}
outputs
}
override def predict(inputActivity: Activity): Activity = {
model.forward(inputActivity)
}
override def copy(num: Int): Array[AbstractModel] = {
doCopy(metaModel, model.getWeightsBias(), num)
}
override def release(): Unit = {
isReleased match {
case true =>
case false =>
model.release()
model = null
metaModel = null
}
}
override def isReleased(): Boolean = {
model == null
}
override def toString: String = s"FloatInferenceModel($model)"
def doCopy(metaModel: AbstractModule[Activity, Activity, Float],
weightBias: Array[Tensor[Float]],
num: Int):
Array[AbstractModel] = {
require(metaModel != null, "metaModel can NOT be null")
List.range(0, num).map(_ => {
val clonedModel = metaModel.cloneModule()
val clonedModelWithWeightsBias = makeUpModel(clonedModel, weightBias)
new FloatModel(clonedModelWithWeightsBias, metaModel, false)
}).toArray
}
}
| intel-analytics/analytics-zoo | zoo/src/main/scala/com/intel/analytics/zoo/pipeline/inference/FloatModel.scala | Scala | apache-2.0 | 3,083 |
// Copyright: 2010 - 2017 https://github.com/ensime/ensime-server/graphs
// License: http://www.gnu.org/licenses/gpl-3.0.en.html
package org.ensime.indexer.lucene
import org.apache.lucene.document.{ Document, StringField }
import org.apache.lucene.document.Field.Store
import org.apache.lucene.index.Term
import org.apache.lucene.search.{ BooleanQuery, Query, TermQuery }
import org.apache.lucene.search.BooleanClause.Occur._
import shapeless.Typeable
// in hindsight, this would have been more cleanly designed as TypeClass
abstract class Serializer[T](tpe: Typeable[T])
extends DocumentProvider[T] with DocumentRecovery[T] with QueryProvider[T] {
private val TypeField = new StringField("TYPE", tpe.describe, Store.YES)
private val TypeTerm = new TermQuery(new Term(TypeField.name, TypeField.stringValue))
def id(t: T): String
final def toDocument(t: T): Document = {
val doc = new Document
doc.add(TypeField)
doc.add(new StringField("ID", id(t), Store.NO))
addFields(doc, t)
doc
}
def addFields(doc: Document, t: T): Unit
final def createQuery(e: T): Query = {
new BooleanQuery.Builder().
add(TypeTerm, MUST).
add(new TermQuery(new Term("ID", id(e))), MUST).
build()
}
}
abstract class EntityS[T <: Entity](tpe: Typeable[T]) extends Serializer(tpe) {
def id(t: T) = t.id
}
| VlachJosef/ensime-server | core/src/main/scala/org/ensime/indexer/lucene/Serializer.scala | Scala | gpl-3.0 | 1,346 |
package scryetek
import org.scalacheck.Arbitrary._
import org.scalacheck.Arbitrary
import org.scalacheck.Gen
/**
* Created by Matt on 01/11/2015.
*/
package object vecmath {
implicit val arbitraryVec3 = Arbitrary(for {
x <- Gen.choose(-100f, 100f)
y <- Gen.choose(-100f, 100f)
z <- Gen.choose(-100f, 100f)
vec = Vec3(x,y,z) if !vec.magnitude.isInfinite && !vec.magnitude.isNaN
} yield vec)
implicit val arbitraryVec2 = Arbitrary(for {
x <- Gen.choose(-100f, 100f)
y <- Gen.choose(-100f, 100f)
vec = Vec2(x,y) if !vec.magnitude.isInfinite && !vec.magnitude.isNaN
} yield vec)
implicit val arbitraryAngleAxis = Arbitrary(for {
v <- arbitrary[Vec3]
angle <- Gen.choose(-math.Pi*2, math.Pi*2)
} yield AngleAxis(angle.toFloat, v.normalized))
implicit val arbitraryVec4 = Arbitrary(for {
x <- Gen.choose(-100f, 100f)
y <- Gen.choose(-100f, 100f)
z <- Gen.choose(-100f, 100f)
w <- Gen.choose(-100f, 100f)
vec = Vec4(x,y,z,w) if !vec.magnitude.isInfinite && !vec.magnitude.isNaN
} yield vec)
implicit val arbitraryQuat = Arbitrary(for {
axis <- arbitrary[Vec3]
angle <- Gen.choose(-math.Pi*2, math.Pi*2)
} yield Quat.fromAngleAxis(angle.toFloat, axis.normalized))
implicit val arbitraryMat4 = Arbitrary(for {
aa <- arbitrary[AngleAxis]
scale <- arbitrary[Vec3] if scale.x != 0 && scale.y != 0 && scale.z != 0
translate <- arbitrary[Vec3]
} yield Mat4.rotate(aa.angle, aa.axis) * Mat4.translate(translate) * Mat4.scale(scale))
implicit val arbitraryMat3 = Arbitrary(for {
aa <- arbitrary[AngleAxis]
scale <- arbitrary[Vec3] if scale.x != 0 && scale.y != 0 && scale.z != 0
} yield Mat3.rotate(aa.angle, aa.axis) * Mat3.scale(scale))
implicit val arbitraryMat2 = Arbitrary(for {
angle <- Gen.choose(-math.Pi*2, math.Pi*2)
scale <- arbitrary[Vec2] if scale.x != 0 && scale.y != 0
} yield Mat2.rotate(angle.toFloat) * Mat2.scale(scale))
implicit val arbitraryMat2d = Arbitrary(for {
m <- arbitrary[Mat3]
} yield m.toMat2d)
implicit class floatPimp(val f: Float) extends AnyVal {
def approximatelyEqualTo(x: Float, epsilon: Float) =
math.abs(f-x) < epsilon
def ~=(x: Float) =
this.approximatelyEqualTo(x, 0.001f)
}
implicit class vec2Pimp(val v: Vec2) {
@inline
def approximatelyEqualTo(v2: Vec2, epsilon: Float): Boolean =
math.abs((v - v2).magnitude) < epsilon
def ~=(v: Vec2): Boolean =
this.approximatelyEqualTo(v, 0.001f)
}
implicit class vec3Pimp(val v: Vec3) {
@inline
def approximatelyEqualTo(v2: Vec3, epsilon: Float): Boolean =
math.abs((v - v2).magnitude) < epsilon
def ~=(v: Vec3): Boolean =
this.approximatelyEqualTo(v, 0.001f)
}
implicit class vec4Pimp(val v: Vec4) {
@inline
def approximatelyEqualTo(v2: Vec4, epsilon: Float): Boolean =
math.abs((v - v2).magnitude) < epsilon
def ~=(v: Vec4): Boolean =
this.approximatelyEqualTo(v, 0.001f)
}
}
| mseddon/vecmath | shared/src/test/scala/scryetek/vecmath/package.scala | Scala | bsd-3-clause | 3,002 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package models.responsiblepeople
import jto.validation.{Invalid, Path, Valid, ValidationError}
import org.scalatestplus.mockito.MockitoSugar
import org.scalatestplus.play.PlaySpec
import play.api.libs.json._
class PositionInBusinessSpec extends PlaySpec with MockitoSugar {
"PositionInBusiness" must {
"validate position and 'other' text from tuple" in {
PositionWithinBusiness.fullySpecifiedRule.validate((Set("01"), None)) mustBe Valid(Set(BeneficialOwner))
PositionWithinBusiness.fullySpecifiedRule.validate((Set("02"), None)) mustBe Valid(Set(Director))
PositionWithinBusiness.fullySpecifiedRule.validate((Set("03"), None)) mustBe Valid(Set(InternalAccountant))
PositionWithinBusiness.fullySpecifiedRule.validate((Set("04"), None)) mustBe Valid(Set(NominatedOfficer))
PositionWithinBusiness.fullySpecifiedRule.validate((Set("05"), None)) mustBe Valid(Set(Partner))
PositionWithinBusiness.fullySpecifiedRule.validate((Set("06"), None)) mustBe Valid(Set(SoleProprietor))
PositionWithinBusiness.fullySpecifiedRule.validate((Set("07"), None)) mustBe Valid(Set(DesignatedMember))
PositionWithinBusiness.fullySpecifiedRule.validate((Set("other"), Some("some other role"))) mustBe Valid(Set(Other("some other role")))
}
"successfully validate form" in {
val form = Map(
"positions[0]" -> Seq("01"),
"positions[1]" -> Seq("other"),
"otherPosition" -> Seq("some other position"))
PositionWithinBusiness.positionsRule.validate(form) mustBe
Valid( Set(BeneficialOwner, Other("some other position")))
}
"fail to validate when 'other' is selected but no 'other' value is given" in {
val form = Map(
"positions[0]" -> Seq("01"),
"positions[1]" -> Seq("other"))
PositionWithinBusiness.positionsRule.validate(form) mustBe
Invalid(Seq((Path \\ "otherPosition") -> Seq(ValidationError("responsiblepeople.position_within_business.other_position.othermissing"))))
}
"fail to validate when 'other' is selected but white space 'other' value is given" in {
val form = Map(
"positions[0]" -> Seq("01"),
"positions[1]" -> Seq("other"),
"otherPosition" -> Seq(" "))
PositionWithinBusiness.positionsRule.validate(form) mustBe
Invalid(Seq((Path \\ "otherPosition") -> Seq(ValidationError("responsiblepeople.position_within_business.other_position.othermissing"))))
}
"fail to validate when an invalid valid was given" in {
val form = Map(
"positions[0]" -> Seq("10")
)
intercept[Exception] {
PositionWithinBusiness.positionsRule.validate(form)
}
}
"fail to validate an empty position list" in {
PositionWithinBusiness.atLeastOneRule.validate(Set.empty[String]) must
be(Invalid(Seq(
Path -> Seq(ValidationError("error.required.positionWithinBusiness"))
)))
}
"write correct position id" in {
PositionWithinBusiness.formWrite.writes(BeneficialOwner) must be("01")
PositionWithinBusiness.formWrite.writes(Director) must be("02")
PositionWithinBusiness.formWrite.writes(InternalAccountant) must be("03")
PositionWithinBusiness.formWrite.writes(NominatedOfficer) must be("04")
PositionWithinBusiness.formWrite.writes(Partner) must be("05")
PositionWithinBusiness.formWrite.writes(SoleProprietor) must be("06")
PositionWithinBusiness.formWrite.writes(DesignatedMember) must be("07")
PositionWithinBusiness.formWrite.writes(Other("")) must be("other")
}
"successfully write form from a set of PositionWithinBusiness" in {
val model = Set(InternalAccountant, Other("some other position")).asInstanceOf[Set[PositionWithinBusiness]]
PositionWithinBusiness.formWrites.writes(model) mustBe Map(
"positions[]" -> Seq("03", "other"),
"otherPosition" -> Seq("some other position"))
}
"JSON validation" must {
"read the correct value" when {
"given a BeneficialOwner value" in {
Json.fromJson[PositionWithinBusiness](JsString("01")) must
be(JsSuccess(BeneficialOwner))
}
"given a Director value" in {
Json.fromJson[PositionWithinBusiness](JsString("02")) must
be(JsSuccess(Director))
}
"given a InternalAccountant value" in {
Json.fromJson[PositionWithinBusiness](JsString("03")) must
be(JsSuccess(InternalAccountant))
}
"given a NominatedOfficer value" in {
Json.fromJson[PositionWithinBusiness](JsString("04")) must
be(JsSuccess(NominatedOfficer))
}
"given a Partner value" in {
Json.fromJson[PositionWithinBusiness](JsString("05")) must
be(JsSuccess(Partner))
}
"given a SoleProprietor value" in {
Json.fromJson[PositionWithinBusiness](JsString("06")) must
be(JsSuccess(SoleProprietor))
}
"given a DesignatedMember value" in {
Json.fromJson[PositionWithinBusiness](JsString("07")) must
be(JsSuccess(DesignatedMember))
}
"given an OtherSelection value" in {
Json.fromJson[PositionWithinBusiness](Json.obj("other" -> "some other role")) mustBe JsSuccess(Other("some other role"))
}
}
"fail to validate" when {
"given an empty value" in {
Json.fromJson[PositionWithinBusiness](JsString("")) must
be(JsError((JsPath \\ "positions") -> play.api.libs.json.JsonValidationError("error.invalid")))
}
}
"write the correct value" when {
"given a BeneficialOwner" in {
Json.toJson(BeneficialOwner.asInstanceOf[PositionWithinBusiness]) must be(JsString("01"))
}
"given a Director" in {
Json.toJson(Director.asInstanceOf[PositionWithinBusiness]) must be(JsString("02"))
}
"given a InternalAccountant" in {
Json.toJson(InternalAccountant.asInstanceOf[PositionWithinBusiness]) must be(JsString("03"))
}
"given a NominatedOfficer" in {
Json.toJson(NominatedOfficer.asInstanceOf[PositionWithinBusiness]) must be(JsString("04"))
}
"given a Partner" in {
Json.toJson(Partner.asInstanceOf[PositionWithinBusiness]) must be(JsString("05"))
}
"given a SoleProprietor" in {
Json.toJson(SoleProprietor.asInstanceOf[PositionWithinBusiness]) must be(JsString("06"))
}
"given a DesignatedMember" in {
Json.toJson(DesignatedMember.asInstanceOf[PositionWithinBusiness]) must be(JsString("07"))
}
"given an Other" in {
Json.toJson(Other("some new role").asInstanceOf[PositionWithinBusiness]) mustBe Json.obj("other" -> "some new role")
}
}
}
}
}
| hmrc/amls-frontend | test/models/responsiblepeople/PositionInBusinessSpec.scala | Scala | apache-2.0 | 7,484 |
package de.lenabrueder.rfc6902
import de.lenabrueder.UnitSpec
import play.api.libs.json._
class JsonPatchOverlayOpSpec extends UnitSpec {
val overlayOp = "overlay"
val json = Json.parse("""{"a":"b", "b":{"c":"d"}, "c":1}""")
"JsPatchOverlayOp" should {
"correctly add flat elements to a sub-path" in {
val patch = JsPatch(Json.parse(s"""{"op":"$overlayOp", "path":"/b", "value": {"rainbow": "unicorn"}}"""))
patch shouldBe 'right
patch.right.get(json) should equal(Right(Json.parse("""{"a":"b", "b":{"c":"d", "rainbow":"unicorn"}, "c":1}""")))
}
"correctly add complex elements to a sub-path" in {
val patch = JsPatch(Json.parse(s"""{"op":"$overlayOp", "path":"/b", "value": {"rainbow": {"complex":"unicorn"}}}"""))
patch shouldBe 'right
patch.right.get(json) should equal(Right(Json.parse("""{"a":"b", "b":{"c":"d", "rainbow": {"complex":"unicorn"}}, "c":1}""")))
}
"correctly create a path from scratch that does not exist in the original JSON" in {
val patch = JsPatch(Json.parse(s"""{"op":"$overlayOp", "path":"/z/y/x", "value": {"rainbow": {"complex":"unicorn"}}}"""))
patch shouldBe 'right
patch.right.get(json) should equal(Right(Json.parse("""{"a":"b", "b":{"c":"d"}, "c":1,"z":{"y":{"x":{"rainbow": {"complex":"unicorn"}}}}}""")))
}
"not care at all about the original type of a path when it should overlay at that path" in {
//for example the case when trying to write something to path "/c" in the above example
val patch = JsPatch(Json.parse(s"""{"op":"$overlayOp", "path":"/c", "value": {"rainbow": "unicorn"}}"""))
patch shouldBe 'right
patch.right.get(json) should equal(Right(Json.parse("""{"a":"b", "b":{"c":"d"}, "c":{"rainbow": "unicorn"}}""")))
}
"not care at all about the original type of a path when it should overlay at a deeper point from that path" in {
//for example the case when trying to write something to path "/c" in the above example
val patch = JsPatch(Json.parse(s"""{"op":"$overlayOp", "path":"/c/d", "value": {"rainbow": "unicorn"}}"""))
patch shouldBe 'right
patch.right.get(json) should equal(Right(Json.parse("""{"a":"b", "b":{"c":"d"}, "c":{"d": {"rainbow": "unicorn"}}}""")))
}
}
}
| lenalebt/play-rfc6902 | src/test/scala/de/lenabrueder/rfc6902/JsonPatchOverlayOpSpec.scala | Scala | lgpl-3.0 | 2,282 |
package infrastructure.view
import scala.slick.driver.JdbcProfile
trait Profile {
val profile: JdbcProfile
}
| pawelkaczor/ddd-leaven-akka | src/main/scala/infrastructure/view/Profile.scala | Scala | mit | 113 |
package scdbpf
import scala.collection.immutable._
import Sc4Path._
import DbpfUtil._
trait Sc4Path extends DbpfType {
def terrainVariance: Boolean
def paths: Seq[Path]
def stopPaths: Seq[StopPath]
def copy(
terrainVariance: Boolean = terrainVariance,
paths: Seq[Path] = paths,
stopPaths: Seq[StopPath] = stopPaths): Sc4Path = Sc4Path(terrainVariance, paths, stopPaths)
/** Rotates and flips all paths and stop paths in this `Sc4Path`. If
* `rf.flipped`, the paths will also be reversed (as would be expected).
*/
def * (rf: RotFlip): Sc4Path =
if (rf == RotFlip.R0F0) this
else copy(paths = paths map (_ * rf), stopPaths = stopPaths map (_ * rf))
/** Shifts the path vertically by translation `t`. */
def shiftHeight(t: Float) = copy(paths = paths map (_.shiftHeight(t)), stopPaths = stopPaths map (_.shiftHeight(t)))
/** Combines two paths files by appending `that` to `this`. The terrain
* variance will be set if it was set for either of the two paths.
* Automatically renumbers the class numbers to ensure that no class number
* is assigned twice.
*/
def ++ (that: Sc4Path) =
Sc4Path(this.terrainVariance || that.terrainVariance, this.paths ++ that.paths, this.stopPaths ++ that.stopPaths).renumberClassNumbers
/** Rebuilds the class numbers of the paths and stop paths in this `Sc4Path`
* from scratch. This may be necessary so that no class number is assigned
* twice for a given class of paths, i.e. transport type, entry, exit,
* UK-flag and class number must all be distinct for each path (and the class
* number is the only variable).
*
* The new class numbers will be 0 for paths that are singular in a class,
* and will be numbered sequentially from 1 otherwise.
*/
def renumberClassNumbers: Sc4Path = {
type PathProp = (TransportType, Cardinal, Cardinal, Boolean)
val pathToTuple = (p: Path) => (p.transportType, p.entry, p.exit, false)
val stopPathToTuple = (p: StopPath) => (p.transportType, p.entry, p.exit, p.uk)
val updatePath = (p: Path, i: Int) => p.copy(classNumber = i)
val updateStopPath = (p: StopPath, i: Int) => p.copy(classNumber = i)
def renumber[A <: PathLike](paths: Seq[A], toTuple: A => PathProp, updateClass: (A, Int) => A): Seq[A] = {
val lastIndex = collection.mutable.Map.empty[PathProp, Int] // stores index of a given class number
val ps = collection.mutable.ArrayBuffer.empty[A]
for ((p, i) <- paths.zipWithIndex; prop = toTuple(p)) {
if (!lastIndex.contains(prop)) // class does not yet exist
ps += updateClass(p, 0)
else {
val j = lastIndex(prop)
val q = ps(j)
if (q.classNumber == 0) { // we need to update class number of q
ps(j) = updateClass(q, 1)
ps += updateClass(p, 2)
} else {
ps += updateClass(p, q.classNumber + 1)
}
}
lastIndex(prop) = i
}
ps.to[Seq]
}
val result = copy(paths = renumber(paths, pathToTuple, updatePath),
stopPaths = renumber(stopPaths, stopPathToTuple, updateStopPath))
assert(result.paths.size == paths.size && result.stopPaths.size == stopPaths.size)
result
}
override def toString: String = {
val sb = new StringBuilder
def addln(s: String): Unit = { sb ++= s + "\\r\\n" } // explicitly use windows line breaks to ensure compatibility across editors
val version =
if (paths.exists(_.junction)) 2
else if (!stopPaths.isEmpty) 1
else 0
addln("SC4PATHS")
addln("1." + version)
addln(paths.size.toString)
if (version > 0)
addln(stopPaths.size.toString)
addln(if (terrainVariance) "1" else "0")
paths flatMap (_.lines(version)) foreach addln
stopPaths flatMap (_.lines) foreach addln
sb.toString
}
}
object Sc4Path {
def apply(terrainVariance: Boolean, paths: Seq[Path], stopPaths: Seq[StopPath] = Seq()): Sc4Path =
new FreeSc4Path(terrainVariance, paths, stopPaths)
implicit val converter = new Converter[DbpfType, Sc4Path] {
def apply(from: DbpfType): Sc4Path = {
try {
new BufferedSc4Path(from.dataView)
} catch {
case e @ (_: NoSuchElementException
|_: IllegalArgumentException
|_: IndexOutOfBoundsException
|_: NumberFormatException
|_: org.parboiled.errors.ParserRuntimeException) =>
throw new DbpfDecodeFailedException(e.toString, e)
}
}
}
type Coord = (Float, Float, Float)
type TransportType = TransportType.Value
object TransportType extends Enumeration {
val Car = Value(1)
val Sim = Value(2)
val Train = Value(3)
val Subway = Value(4)
val ElTrain = Value(6)
val Monorail = Value(7)
}
type Cardinal = Cardinal.Value
object Cardinal extends Enumeration {
val West, North, East, South = Value
val Special = Value(255)
}
sealed trait PathLike {
type Self <: PathLike
val comment: Option[String]
val transportType: TransportType
val classNumber: Int
val entry: Cardinal
val exit: Cardinal
def header: String
def * (rf: RotFlip): Self
/** Shifts the path vertically by translation `t`. */
def shiftHeight(t: Float): Self
private[Sc4Path] def classAsString = if (classNumber == 0) "" else ('a' + classNumber - 1).toChar + "_"
private[Sc4Path] def commentLines = { // adds -- delimiters if missing
comment.toList flatMap (_.lines) map (_.trim) map
(c => if (c.startsWith("--")) c else "-- " + c)
}
private[Sc4Path] def coordString(c: Coord): String = c.productIterator.mkString(",")
}
case class Path(
comment: Option[String],
transportType: TransportType,
classNumber: Int,
entry: Cardinal,
exit: Cardinal,
junction: Boolean = false,
coords: Seq[Coord]) extends PathLike {
require(coords.size >= 2, "at least 2 coords are required")
// TODO ???
//require(coords zip coords.tail forall { case (a, b) => a != b } , "coords need to be distinct")
type Self = Path
def header: String =
s"-- ${transportType.toString}_${classAsString}${entry.id}_${exit.id}${if (junction) "_J" else ""}"
def lines(version: Int): List[String] = {
var res: List[Any] = commentLines ++ List(header, transportType.id, classNumber, entry.id, exit.id)
if (version >= 2) {
res :+= (if (junction) 1 else 0)
}
res :+= coords.size
res ++= coords map (c => coordString(c))
res map (_.toString)
}
/** If rf.flipped, this also reverses the path. */
def * (rf: RotFlip): Path = {
val res = copy(entry = entry *: rf, exit = exit *: rf, coords = coords map (_ *: rf))
if (rf.flipped) res.reverse else res
}
def reverse: Path = copy(entry = exit, exit = entry, coords = coords.reverse)
def shiftHeight(t: Float) = copy(coords = coords map { case (x,y,z) => (x, y, z + t) })
}
case class StopPath(
comment: Option[String],
uk: Boolean,
transportType: TransportType,
classNumber: Int,
entry: Cardinal,
exit: Cardinal,
coord: Coord) extends PathLike {
type Self = StopPath
def header: String =
s"-- Stop${if (uk) "UK" else ""}_${transportType.toString}_${classAsString}${entry.id}_${exit.id}"
def lines: List[String] = {
commentLines ++ List(
header, if (uk) 2 else 1, transportType.id, classNumber, entry.id, exit.id, coordString(coord)
) map (_.toString)
}
/** If rf.flipped, this will also toggle the uk flag. */
def * (rf: RotFlip): StopPath = {
copy(uk = uk ^ rf.flipped, entry = entry *: rf, exit = exit *: rf, coord = coord *: rf)
}
def shiftHeight(t: Float) = copy(coord = coord.copy(_3 = coord._3 + t))
}
private class FreeSc4Path(val terrainVariance: Boolean, val paths: Seq[Path], val stopPaths: Seq[StopPath]) extends Sc4Path {
protected lazy val data: Array[Byte] = toString.getBytes(DbpfUtil.asciiEncoding)
}
private lazy val parser = new Sc4PathParser() // needs to be locked for concurrent access
private class BufferedSc4Path(arr: Array[Byte]) extends RawType(arr) with Sc4Path {
override lazy val toString = new String(data, DbpfUtil.asciiEncoding)
val (terrainVariance, paths, stopPaths) = {
val text = toString
parser.synchronized {
val p = parser.parseSc4Path(text)
(p.terrainVariance, p.paths, p.stopPaths)
}
}
}
}
| memo33/scdbpf | src/main/scala/scdbpf/Sc4Path.scala | Scala | mit | 8,540 |
/*
* SelectionTruncationImpl.scala
* (Muta)
*
* Copyright (c) 2013-2014 Hanns Holger Rutz. All rights reserved.
*
* This software is published under the GNU Lesser General Public License v3+
*
*
* For further information, please contact Hanns Holger Rutz at
* [email protected]
*/
package de.sciss.muta
package impl
/** A truncation type selection algorithm. Cf. http://en.wikipedia.org/wiki/Truncation_selection */
trait SelectionTruncationImpl[Chromosome] extends Selection[Chromosome] {
def size: SelectionSize
def apply(pop: Vec[(Chromosome, Double)], r: util.Random): Vec[Chromosome] = {
val n = size(pop.size)
val sorted = pop.sortBy(_._2)
sorted.takeRight(n).map(_._1)
}
}
| Sciss/Muta | src/main/scala/de/sciss/muta/impl/SelectionTruncationImpl.scala | Scala | lgpl-3.0 | 721 |
package com.wavesplatform.transaction.serialization.impl
import java.nio.ByteBuffer
import com.google.common.primitives.{Bytes, Longs}
import com.wavesplatform.account.AddressScheme
import com.wavesplatform.serialization.{ByteBufferOps, Deser}
import com.wavesplatform.transaction.TxVersion
import com.wavesplatform.transaction.assets.SetAssetScriptTransaction
import play.api.libs.json.{JsObject, Json}
import scala.util.Try
object SetAssetScriptTxSerializer {
def toJson(tx: SetAssetScriptTransaction): JsObject = {
import tx._
BaseTxJson.toJson(tx) ++ Json.obj(
"assetId" -> asset.id.toString,
"script" -> script.map(_.bytes().base64)
) ++ (if (tx.version == TxVersion.V1) Json.obj("chainId" -> tx.chainId) else Json.obj())
}
def bodyBytes(tx: SetAssetScriptTransaction): Array[Byte] = {
import tx._
version match {
case TxVersion.V1 =>
Bytes.concat(
Array(builder.typeId, version, chainId),
sender.arr,
asset.id.arr,
Longs.toByteArray(fee),
Longs.toByteArray(timestamp),
Deser.serializeOptionOfArrayWithLength(script)(s => s.bytes().arr)
)
case _ =>
PBTransactionSerializer.bodyBytes(tx)
}
}
def toBytes(tx: SetAssetScriptTransaction): Array[Byte] =
if (tx.isProtobufVersion) PBTransactionSerializer.bytes(tx)
else Bytes.concat(Array(0: Byte), this.bodyBytes(tx), tx.proofs.bytes())
def parseBytes(bytes: Array[Byte]): Try[SetAssetScriptTransaction] = Try {
require(bytes.length > 2, "buffer underflow while parsing transaction")
val buf = ByteBuffer.wrap(bytes)
require(buf.getByte == 0 && buf.getByte == SetAssetScriptTransaction.typeId && buf.getByte == TxVersion.V1, "transaction type mismatch")
require(buf.getByte == AddressScheme.current.chainId, "transaction chainId mismatch")
val sender = buf.getPublicKey
val asset = buf.getIssuedAsset
val fee = buf.getLong
val timestamp = buf.getLong
val script = buf.getScript
val proofs = buf.getProofs
SetAssetScriptTransaction(TxVersion.V1, sender, asset, script, fee, timestamp, proofs, AddressScheme.current.chainId)
}
}
| wavesplatform/Waves | node/src/main/scala/com/wavesplatform/transaction/serialization/impl/SetAssetScriptTxSerializer.scala | Scala | mit | 2,205 |
package org.jetbrains.plugins.scala
package lang
package psi
package impl
package statements
import com.intellij.lang.ASTNode
import com.intellij.psi.PsiElementVisitor
import org.jetbrains.plugins.scala.lang.parser.ScalaElementTypes
import org.jetbrains.plugins.scala.lang.psi.api.ScalaElementVisitor
import org.jetbrains.plugins.scala.lang.psi.api.base._
import org.jetbrains.plugins.scala.lang.psi.api.base.types.ScTypeElement
import org.jetbrains.plugins.scala.lang.psi.api.statements._
import org.jetbrains.plugins.scala.lang.psi.stubs.ScVariableStub
import org.jetbrains.plugins.scala.lang.psi.types.ScType
import org.jetbrains.plugins.scala.lang.psi.types.result.{TypeResult, TypingContext}
/**
* @author Alexander Podkhalyuzin
*/
class ScVariableDeclarationImpl private (stub: ScVariableStub, node: ASTNode)
extends ScalaStubBasedElementImpl(stub, ScalaElementTypes.VARIABLE_DECLARATION, node) with ScVariableDeclaration {
def this(node: ASTNode) = this(null, node)
def this(stub: ScVariableStub) = this(stub, null)
override def toString: String = "ScVariableDeclaration: " + declaredElements.map(_.name).mkString(", ")
def getType(ctx: TypingContext): TypeResult[ScType] = wrap(typeElement) flatMap {_.getType(TypingContext.empty)}
def declaredElements: Seq[ScFieldId] = getIdList.fieldIds
def typeElement: Option[ScTypeElement] = byPsiOrStub(findChild(classOf[ScTypeElement]))(_.typeElement)
def getIdList: ScIdList = getStubOrPsiChild(ScalaElementTypes.IDENTIFIER_LIST)
override def accept(visitor: ScalaElementVisitor) {
visitor.visitVariableDeclaration(this)
}
override def accept(visitor: PsiElementVisitor) {
visitor match {
case s: ScalaElementVisitor => s.visitVariableDeclaration(this)
case _ => super.accept(visitor)
}
}
} | ilinum/intellij-scala | src/org/jetbrains/plugins/scala/lang/psi/impl/statements/ScVariableDeclarationImpl.scala | Scala | apache-2.0 | 1,802 |
package org.scaladebugger.api.profiles.java.info.events
import com.sun.jdi.event._
import com.sun.jdi.{ReferenceType, ThreadReference, VirtualMachine}
import org.scaladebugger.api.lowlevel.events.JDIEventArgument
import org.scaladebugger.api.lowlevel.requests.JDIRequestArgument
import org.scaladebugger.api.profiles.traits.info.InfoProducer
import org.scaladebugger.api.profiles.traits.info.events._
import org.scaladebugger.api.virtualmachines.ScalaVirtualMachine
import org.scaladebugger.test.helpers.ParallelMockFunSpec
class JavaVMDisconnectEventInfoSpec extends ParallelMockFunSpec {
private val mockScalaVirtualMachine = mock[ScalaVirtualMachine]
private val mockInfoProducer = mock[InfoProducer]
private val mockVMDisconnectEvent = mock[VMDisconnectEvent]
private val mockJdiRequestArguments = Seq(mock[JDIRequestArgument])
private val mockJdiEventArguments = Seq(mock[JDIEventArgument])
private val mockJdiArguments =
mockJdiRequestArguments ++ mockJdiEventArguments
private val javaVMDisconnectEventInfoProfile = new JavaVMDisconnectEventInfo(
scalaVirtualMachine = mockScalaVirtualMachine,
infoProducer = mockInfoProducer,
vmDisconnectEvent = mockVMDisconnectEvent,
jdiArguments = mockJdiArguments
)
describe("JavaVMDisconnectEventInfo") {
describe("#toJavaInfo") {
it("should return a new instance of the Java profile representation") {
val expected = mock[VMDisconnectEventInfo]
// Event info producer will be generated in its Java form
val mockEventInfoProducer = mock[EventInfoProducer]
(mockInfoProducer.eventProducer _).expects()
.returning(mockEventInfoProducer).once()
(mockEventInfoProducer.toJavaInfo _).expects()
.returning(mockEventInfoProducer).once()
// Java version of event info producer creates a new event instance
// NOTE: Cannot validate second set of args because they are
// call-by-name, which ScalaMock does not support presently
(mockEventInfoProducer.newVMDisconnectEventInfo _).expects(
mockScalaVirtualMachine,
mockVMDisconnectEvent,
mockJdiArguments
).returning(expected).once()
val actual = javaVMDisconnectEventInfoProfile.toJavaInfo
actual should be (expected)
}
}
describe("#isJavaInfo") {
it("should return true") {
val expected = true
val actual = javaVMDisconnectEventInfoProfile.isJavaInfo
actual should be (expected)
}
}
describe("#toJdiInstance") {
it("should return the JDI instance this profile instance represents") {
val expected = mockVMDisconnectEvent
val actual = javaVMDisconnectEventInfoProfile.toJdiInstance
actual should be (expected)
}
}
}
}
| ensime/scala-debugger | scala-debugger-api/src/test/scala/org/scaladebugger/api/profiles/java/info/events/JavaVMDisconnectEventInfoSpec.scala | Scala | apache-2.0 | 2,819 |
package slick.compiler
import slick.util.{ConstArrayOp, ConstArray}
import slick.{SlickTreeException, SlickException}
import slick.ast._
import Util._
import TypeUtil._
import scala.collection.mutable
/** Expand sum types and their catamorphisms to equivalent product type operations. */
class ExpandSums extends Phase {
val name = "expandSums"
def apply(state: CompilerState) =
if(state.get(Phase.assignUniqueSymbols).map(_.nonPrimitiveOption).getOrElse(true)) state.map(expandSums)
else state
val Disc1 = LiteralNode(ScalaBaseType.optionDiscType.optionType, Option(1))
val DiscNone = LiteralNode(ScalaBaseType.optionDiscType.optionType, None)
def expandSums(n: Node): Node = {
var multi = false
/** Perform the sum expansion on a Node */
def tr(tree: Node, oldDiscCandidates: Set[(TypeSymbol, List[TermSymbol])]): Node = {
val discCandidates = oldDiscCandidates ++ (tree match {
case Filter(_, _, p) => collectDiscriminatorCandidates(p)
case Bind(_, j: Join, _) => collectDiscriminatorCandidates(j.on)
case _ => Set.empty
})
val tree2 = tree.mapChildren(tr(_, discCandidates), keepType = true)
val tree3 = tree2 match {
// Expand multi-column null values in ELSE branches (used by Rep[Option].filter) with correct type
case IfThenElse(ConstArray(pred, then1 :@ tpe, LiteralNode(None) :@ OptionType(ScalaBaseType.nullType))) =>
multi = true
IfThenElse(ConstArray(pred, then1, buildMultiColumnNone(tpe))) :@ tpe
// Identity OptionFold/OptionApply combination -> remove
case OptionFold(from, LiteralNode(None) :@ OptionType(ScalaBaseType.nullType), oa @ OptionApply(Ref(s)), gen) if s == gen =>
silentCast(oa.nodeType, from)
// Primitive OptionFold representing GetOrElse -> translate to GetOrElse
case OptionFold(from :@ OptionType.Primitive(_), LiteralNode(v), Ref(s), gen) if s == gen =>
GetOrElse(from, () => v).infer()
// Primitive OptionFold -> translate to null check
case OptionFold(from :@ OptionType.Primitive(_), ifEmpty, map, gen) =>
val pred = Library.==.typed[Boolean](from, LiteralNode(null))
val n2 = (ifEmpty, map) match {
case (LiteralNode(true), LiteralNode(false)) => pred
case (LiteralNode(false), LiteralNode(true)) => Library.Not.typed[Boolean](pred)
case _ =>
val ifDefined = map.replace({
case r @ Ref(s) if s == gen => silentCast(r.nodeType, from)
}, keepType = true)
val ifEmpty2 = silentCast(ifDefined.nodeType.structural, ifEmpty)
IfThenElse(ConstArray(pred, ifEmpty2, ifDefined))
}
n2.infer()
// Other OptionFold -> translate to discriminator check
case OptionFold(from, ifEmpty, map, gen) =>
multi = true
val left = from.select(ElementSymbol(1)).infer()
val pred = Library.==.typed[Boolean](left, LiteralNode(null))
val n2 = (ifEmpty, map) match {
case (LiteralNode(true), LiteralNode(false)) => pred
case (LiteralNode(false), LiteralNode(true)) => Library.Not.typed[Boolean](pred)
case _ =>
val ifDefined = map.replace({
case r @ Ref(s) if s == gen => silentCast(r.nodeType, from.select(ElementSymbol(2)).infer())
}, keepType = true)
val ifEmpty2 = silentCast(ifDefined.nodeType.structural, ifEmpty)
if(left == Disc1) ifDefined else IfThenElse(ConstArray(Library.Not.typed[Boolean](pred), ifDefined, ifEmpty2))
}
n2.infer()
// Primitive OptionApply -> leave unchanged
case n @ OptionApply(_) :@ OptionType.Primitive(_) => n
// Other OptionApply -> translate to product form
case n @ OptionApply(ch) =>
multi = true
ProductNode(ConstArray(Disc1, silentCast(toOptionColumns(ch.nodeType), ch))).infer()
// Non-primitive GetOrElse
// (.get is only defined on primitive Options, but this can occur inside of HOFs like .map)
case g @ GetOrElse(ch :@ tpe, _) =>
tpe match {
case OptionType.Primitive(_) => g
case _ => throw new SlickException(".get may only be called on Options of top-level primitive types")
}
// Option-extended left outer, right outer or full outer join
case bind @ Bind(bsym, Join(_, _, _, _, jt, _), _) if jt == JoinType.LeftOption || jt == JoinType.RightOption || jt == JoinType.OuterOption =>
multi = true
translateJoin(bind, discCandidates)
case n => n
}
val tree4 = fuse(tree3)
tree4 :@ trType(tree4.nodeType)
}
val n2 = tr(n, Set.empty)
if(multi) expandConditionals(n2) else n2
}
/** Translate an Option-extended left outer, right outer or full outer join */
def translateJoin(bind: Bind, discCandidates: Set[(TypeSymbol, List[TermSymbol])]): Bind = {
logger.debug("translateJoin", bind)
val Bind(bsym, (join @ Join(lsym, rsym, left :@ CollectionType(_, leftElemType), right :@ CollectionType(_, rightElemType), jt, on)) :@ CollectionType(cons, elemType), pure) = bind
val lComplex = !leftElemType.structural.isInstanceOf[AtomicType]
val rComplex = !rightElemType.structural.isInstanceOf[AtomicType]
logger.debug(s"Translating join ($jt, complex: $lComplex, $rComplex):", bind)
// Find an existing column that can serve as a discriminator
def findDisc(t: Type): Option[List[TermSymbol]] = {
val global: Set[List[TermSymbol]] = t match {
case NominalType(ts, exp) =>
val c = discCandidates.filter { case (t, ss) => t == ts && ss.nonEmpty }.map(_._2)
logger.debug("Discriminator candidates from surrounding Filter and Join predicates: "+
c.map(Path.toString).mkString(", "))
c
case _ => Set.empty
}
def find(t: Type, path: List[TermSymbol]): Vector[List[TermSymbol]] = t.structural match {
case StructType(defs) => defs.toSeq.iterator.flatMap { case (s, t) => find(t, s :: path) }.toVector
case p: ProductType => p.elements.iterator.zipWithIndex.flatMap { case (t, i) => find(t, ElementSymbol(i+1) :: path) }.toVector
case _: AtomicType => Vector(path)
case _ => Vector.empty
}
val local = find(t, Nil).sortBy { ss =>
(if(global contains ss) 3 else 1) * (ss.head match {
case f: FieldSymbol =>
if(f.options contains ColumnOption.PrimaryKey) -2 else -1
case _ => 0
})
}
logger.debug("Local candidates: "+local.map(Path.toString).mkString(", "))
local.headOption
}
// Option-extend one side of the join with a discriminator column
def extend(side: Node, sym: TermSymbol, on: Node): (Node, Node, Boolean) = {
val extendGen = new AnonSymbol
val elemType = side.nodeType.asCollectionType.elementType
val (disc, createDisc) = findDisc(elemType) match {
case Some(path) =>
logger.debug("Using existing column "+Path(path)+" as discriminator in "+elemType)
(FwdPath(extendGen :: path.reverse), true)
case None =>
logger.debug("No suitable discriminator column found in "+elemType)
(Disc1, false)
}
val extend :@ CollectionType(_, extendedElementType) = Bind(extendGen, side, Pure(ProductNode(ConstArray(disc, Ref(extendGen))))).infer()
val sideInCondition = Select(Ref(sym) :@ extendedElementType, ElementSymbol(2)).infer()
val on2 = on.replace({
case Ref(s) if s == sym => sideInCondition
}, bottomUp = true).infer()
(extend, on2, createDisc)
}
// Translate the join depending on JoinType and Option type
val (left2, right2, on2, jt2, ldisc, rdisc) = jt match {
case JoinType.LeftOption =>
val (right2, on2, rdisc) = if(rComplex) extend(right, rsym, on) else (right, on, false)
(left, right2, on2, JoinType.Left, false, rdisc)
case JoinType.RightOption =>
val (left2, on2, ldisc) = if(lComplex) extend(left, lsym, on) else (left, on, false)
(left2, right, on2, JoinType.Right, ldisc, false)
case JoinType.OuterOption =>
val (left2, on2, ldisc) = if(lComplex) extend(left, lsym, on) else (left, on, false)
val (right2, on3, rdisc) = if(rComplex) extend(right, rsym, on2) else (right, on2, false)
(left2, right2, on3, JoinType.Outer, ldisc, rdisc)
}
// Cast to translated Option type in outer bind
val join2 :@ CollectionType(_, elemType2) = Join(lsym, rsym, left2, right2, jt2, on2).infer()
def optionCast(idx: Int, createDisc: Boolean): Node = {
val ref = Select(Ref(bsym) :@ elemType2, ElementSymbol(idx+1))
val v = if(createDisc) {
val protoDisc = Select(ref, ElementSymbol(1)).infer()
val rest = Select(ref, ElementSymbol(2))
val disc = IfThenElse(ConstArray(Library.==.typed[Boolean](silentCast(OptionType(protoDisc.nodeType), protoDisc), LiteralNode(null)), DiscNone, Disc1))
ProductNode(ConstArray(disc, rest))
} else ref
silentCast(trType(elemType.asInstanceOf[ProductType].children(idx)), v)
}
val ref = ProductNode(ConstArray(optionCast(0, ldisc), optionCast(1, rdisc))).infer()
val pure2 = pure.replace({
case Ref(s) if s == bsym => ref
// Hoist SilentCasts and remove unnecessary ones
case Library.SilentCast(Library.SilentCast(ch)) :@ tpe => silentCast(tpe, ch)
case Select(Library.SilentCast(ch), s) :@ tpe => silentCast(tpe, ch.select(s).infer())
}, bottomUp = true, keepType = true)
val res = Bind(bsym, join2, pure2).infer()
logger.debug("Translated join:", res)
res
}
/** Create a SilentCast call unless the type already matches */
def silentCast(tpe: Type, n: Node): Node = n match {
case LiteralNode(None) :@ OptionType(ScalaBaseType.nullType) => buildMultiColumnNone(tpe)
case n :@ tpe2 if tpe2 == tpe => n
case n =>
if(tpe == UnassignedType) throw new SlickTreeException("Unexpected UnassignedType for:", n)
Library.SilentCast.typed(tpe, n).infer()
}
/** Create a Node representing a structure of null values of the given Type */
def buildMultiColumnNone(tpe: Type): Node = (tpe.structural match {
case ProductType(ch) => ProductNode(ch.map(buildMultiColumnNone))
case StructType(ch) => StructNode(ch.map { case (sym, t) => (sym, buildMultiColumnNone(t)) })
case OptionType(ch) => LiteralNode(tpe, None)
case t => throw new SlickException("Unexpected non-Option type in multi-column None")
}) :@ tpe
/** Perform the sum expansion on a Type */
def trType(tpe: Type): Type = {
def f(tpe: Type): Type = tpe.mapChildren(f) match {
case t @ OptionType.Primitive(_) => t
case OptionType(ch) => ProductType(ConstArray(ScalaBaseType.optionDiscType.optionType, toOptionColumns(ch)))
case t => t
}
val tpe2 = f(tpe)
logger.debug(s"Translated type: $tpe -> $tpe2")
tpe2
}
/** Strip nominal types and convert all atomic types to OptionTypes */
def toOptionColumns(tpe: Type): Type = tpe match {
case NominalType(_, str) => toOptionColumns(str)
case o @ OptionType(ch) if ch.structural.isInstanceOf[AtomicType] => o
case t: AtomicType => OptionType(t)
case t => t.mapChildren(toOptionColumns)
}
/** Fuse unnecessary Option operations */
def fuse(n: Node): Node = n match {
// Option.map
case IfThenElse(ConstArray(Library.Not(Library.==(disc, LiteralNode(null))), ProductNode(ConstArray(Disc1, map)), ProductNode(ConstArray(DiscNone, _)))) =>
ProductNode(ConstArray(disc, map)).infer()
case n => n
}
/** Collect discriminator candidate fields in a predicate. These are all paths below an
* OptionApply, which indicates their future use under a discriminator guard. */
def collectDiscriminatorCandidates(n: Node): Set[(TypeSymbol, List[TermSymbol])] = n.collectAll[(TypeSymbol, List[TermSymbol])] {
case OptionApply(ch) =>
ch.collect[(TypeSymbol, List[TermSymbol])] { case PathOnTypeSymbol(ts, ss) => (ts, ss) }
}.toSet
object PathOnTypeSymbol {
def unapply(n: Node): Option[(TypeSymbol, List[TermSymbol])] = n match {
case (n: PathElement) :@ NominalType(ts, _) => Some((ts, Nil))
case Select(in, s) => unapply(in).map { case (ts, l) => (ts, s :: l) }
case Library.SilentCast(ch) => unapply(ch)
case _ => None
}
}
/** Expand multi-column conditional expressions and SilentCasts.
* Single-column conditionals involving NULL values are optimized away where possible. */
def expandConditionals(n: Node): Node = {
val invalid = mutable.HashSet.empty[TypeSymbol]
def invalidate(n: Node): Unit = invalid ++= n.nodeType.collect { case NominalType(ts, _) => ts }.toSeq
def tr(n: Node): Node = n.mapChildren(tr, keepType = true) match {
// Expand multi-column SilentCasts
case cast @ Library.SilentCast(ch) :@ Type.Structural(ProductType(typeCh)) =>
invalidate(ch)
val elems = typeCh.zipWithIndex.map { case (t, idx) => tr(Library.SilentCast.typed(t, ch.select(ElementSymbol(idx+1))).infer()) }
ProductNode(elems).infer()
case Library.SilentCast(ch) :@ Type.Structural(StructType(typeCh)) =>
invalidate(ch)
val elems = typeCh.map { case (sym, t) => (sym, tr(Library.SilentCast.typed(t, ch.select(sym)).infer())) }
StructNode(elems).infer()
// Optimize trivial SilentCasts
case Library.SilentCast(v :@ tpe) :@ tpe2 if tpe.structural == tpe2.structural =>
invalidate(v)
v
case Library.SilentCast(Library.SilentCast(ch)) :@ tpe => tr(Library.SilentCast.typed(tpe, ch).infer())
case Library.SilentCast(LiteralNode(None)) :@ (tpe @ OptionType.Primitive(_)) => LiteralNode(tpe, None).infer()
// Expand multi-column IfThenElse
case (cond @ IfThenElse(_)) :@ Type.Structural(ProductType(chTypes)) =>
val ch = ConstArrayOp.from(1 to chTypes.length).map { idx =>
val sym = ElementSymbol(idx)
tr(cond.mapResultClauses(n => n.select(sym)).infer())
}
ProductNode(ch).infer()
case (cond @ IfThenElse(_)) :@ Type.Structural(StructType(chTypes)) =>
val ch = chTypes.map { case (sym, _) =>
(sym, tr(cond.mapResultClauses(n => n.select(sym)).infer()))
}
StructNode(ch).infer()
// Optimize null-propagating single-column IfThenElse
case IfThenElse(ConstArray(Library.==(r, LiteralNode(null)), Library.SilentCast(LiteralNode(None)), c @ Library.SilentCast(r2))) if r == r2 => c
// Fix Untyped nulls in else clauses
case cond @ IfThenElse(clauses) if (clauses.last match { case LiteralNode(None) :@ OptionType(ScalaBaseType.nullType) => true; case _ => false }) =>
cond.copy(clauses.init :+ LiteralNode(cond.nodeType, None))
// Resolve Selects into ProductNodes and StructNodes
case Select(ProductNode(ch), ElementSymbol(idx)) => ch(idx-1)
case Select(StructNode(ch), sym) => ch.find(_._1 == sym).get._2
case n2 @ Pure(_, ts) if n2 ne n =>
invalid += ts
n2
case n => n
}
val n2 = tr(n)
logger.debug("Invalidated TypeSymbols: "+invalid.mkString(", "))
n2.replace({
case n: PathElement if n.nodeType.containsSymbol(invalid) => n.untyped
}, bottomUp = true).infer()
}
}
| slick/slick | slick/src/main/scala/slick/compiler/ExpandSums.scala | Scala | bsd-2-clause | 15,523 |
package ch.uzh.cl.slmlib.ngrams.statistics
import ch.uzh.cl.slmlib.ngrams.{NGram, NGramFilter}
import org.apache.spark.rdd.RDD
import org.apache.spark.SparkContext._
import scala.collection.mutable.HashMap
import scala.reflect.ClassTag
/**
* N-Gram collections with corresponding statistics (frequencies or probabilities) for n-grams in order range.
*/
trait RangeOrderStatistics[T, U] extends Statistics[T, U] {
private[slmlib] val mStatistics: HashMap[Int, RDD[(NGram[T], U)]]
/** Minimum order of n-grams in range. */
val minOrder: Int
/** Maximum order of n-grams in range. */
val maxOrder: Int
override def partitionsNumber = mStatistics(minOrder).partitions.length
/** Returns statistics object with n-grams if given order. apply() function can be also called:
* {{{
* val a : RangeOrderStatistics
* a(2)
* }}}
*/
def apply(order: Int): SingleOrderStatistics[T, U] = new {} with SingleOrderStatistics[T, U] {
override val rdd: RDD[(NGram[T], U)] = RangeOrderStatistics.this.mStatistics(order)
}
/** Returns statistics object with n-grams in given order range. apply() function can be also called:
* {{{
* val a : RangeOrderStatistics
* a(2,3)
* }}}
*/
def apply(orderMin: Int, orderMax: Int): RangeOrderStatistics[T, U] = new {} with RangeOrderStatistics[T, U] {
require(orderMin >= RangeOrderStatistics.this.minOrder && orderMin <= RangeOrderStatistics.this.maxOrder && orderMax >= RangeOrderStatistics.this.minOrder && orderMax <= RangeOrderStatistics.this.maxOrder && orderMin < orderMax)
override val minOrder = orderMin
override val maxOrder = orderMax
override val mStatistics = new HashMap[Int, RDD[(NGram[T], U)]]()
for {
order <- orderMin to orderMax by 1
} yield {
mStatistics(order) = RangeOrderStatistics.this.mStatistics(order)
}
}
override def filter(ngFilter: NGramFilter[T, U]): RangeOrderStatistics[T, U] = new {} with RangeOrderStatistics[T, U] {
override val minOrder = RangeOrderStatistics.this.minOrder
override val maxOrder = RangeOrderStatistics.this.maxOrder
override val mStatistics = new HashMap[Int, RDD[(NGram[T], U)]]()
for {
order <- minOrder to maxOrder by 1
} yield {
mStatistics(order) = ngFilter.filter(RangeOrderStatistics.this.mStatistics(order))
}
}
/** Memory efficient iterator over n-grams of all orders.
* Iterator retrieves one partition at a time, so the memory required on client side equals to the size of the biggest partition.
*/
override def iterator: Iterator[(NGram[T], U)] = {
{
for {
order <- minOrder to maxOrder by 1
} yield {
mStatistics(order).toLocalIterator
}
}.reduceLeft(_ ++ _)
}
override def saveAsTextFile(path: String, format: String): Unit = {
for {
order <- minOrder to maxOrder by 1
} yield {
val childPath = path + "/" + order
mStatistics(order).mapPartitions(_.map(tuple => format.format(tuple._1, tuple._2))).saveAsTextFile(childPath)
}
}
override def saveNGramsAsSerializedRDD(path: String): Unit = {
for {
order <- minOrder to maxOrder by 1
} yield {
val childPath = path + "/" + order
mStatistics(order).saveAsObjectFile(childPath)
}
}
override def count: Long = {
for {
order <- minOrder to maxOrder by 1
} yield {
mStatistics(order).count
}
}.reduceLeft(_ + _)
}
| michtek/SLMLib | src/main/scala/ch/uzh/cl/slmlib/ngrams/statistics/RangeOrderStatistics.scala | Scala | apache-2.0 | 3,461 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.openwhisk.core.containerpool
import akka.actor.{Actor, ActorRef, ActorRefFactory, Props}
import org.apache.openwhisk.common.{Logging, LoggingMarkers, MetricEmitter, TransactionId}
import org.apache.openwhisk.core.connector.MessageFeed
import org.apache.openwhisk.core.entity.ExecManifest.ReactivePrewarmingConfig
import org.apache.openwhisk.core.entity._
import org.apache.openwhisk.core.entity.size._
import scala.annotation.tailrec
import scala.collection.immutable
import scala.concurrent.duration._
import scala.util.{Random, Try}
case class ColdStartKey(kind: String, memory: ByteSize)
case object EmitMetrics
case object AdjustPrewarmedContainer
/**
* A pool managing containers to run actions on.
*
* This pool fulfills the other half of the ContainerProxy contract. Only
* one job (either Start or Run) is sent to a child-actor at any given
* time. The pool then waits for a response of that container, indicating
* the container is done with the job. Only then will the pool send another
* request to that container.
*
* Upon actor creation, the pool will start to prewarm containers according
* to the provided prewarmConfig, iff set. Those containers will **not** be
* part of the poolsize calculation, which is capped by the poolSize parameter.
* Prewarm containers are only used, if they have matching arguments
* (kind, memory) and there is space in the pool.
*
* @param childFactory method to create new container proxy actor
* @param feed actor to request more work from
* @param prewarmConfig optional settings for container prewarming
* @param poolConfig config for the ContainerPool
*/
class ContainerPool(childFactory: ActorRefFactory => ActorRef,
feed: ActorRef,
prewarmConfig: List[PrewarmingConfig] = List.empty,
poolConfig: ContainerPoolConfig)(implicit val logging: Logging)
extends Actor {
import ContainerPool.memoryConsumptionOf
implicit val ec = context.dispatcher
var freePool = immutable.Map.empty[ActorRef, ContainerData]
var busyPool = immutable.Map.empty[ActorRef, ContainerData]
var prewarmedPool = immutable.Map.empty[ActorRef, PreWarmedData]
var prewarmStartingPool = immutable.Map.empty[ActorRef, (String, ByteSize)]
// If all memory slots are occupied and if there is currently no container to be removed, than the actions will be
// buffered here to keep order of computation.
// Otherwise actions with small memory-limits could block actions with large memory limits.
var runBuffer = immutable.Queue.empty[Run]
// Track the resent buffer head - so that we don't resend buffer head multiple times
var resent: Option[Run] = None
val logMessageInterval = 10.seconds
//periodically emit metrics (don't need to do this for each message!)
context.system.scheduler.scheduleAtFixedRate(30.seconds, 10.seconds, self, EmitMetrics)
// Key is ColdStartKey, value is the number of cold Start in minute
var coldStartCount = immutable.Map.empty[ColdStartKey, Int]
adjustPrewarmedContainer(true, false)
// check periodically, adjust prewarmed container(delete if unused for some time and create some increment containers)
// add some random amount to this schedule to avoid a herd of container removal + creation
val interval = poolConfig.prewarmExpirationCheckInterval + poolConfig.prewarmExpirationCheckIntervalVariance
.map(v =>
Random
.nextInt(v.toSeconds.toInt))
.getOrElse(0)
.seconds
if (prewarmConfig.exists(!_.reactive.isEmpty)) {
context.system.scheduler.scheduleAtFixedRate(
poolConfig.prewarmExpirationCheckInitDelay,
interval,
self,
AdjustPrewarmedContainer)
}
def logContainerStart(r: Run, containerState: String, activeActivations: Int, container: Option[Container]): Unit = {
val namespaceName = r.msg.user.namespace.name.asString
val actionName = r.action.name.name
val actionNamespace = r.action.namespace.namespace
val maxConcurrent = r.action.limits.concurrency.maxConcurrent
val activationId = r.msg.activationId.toString
r.msg.transid.mark(
this,
LoggingMarkers.INVOKER_CONTAINER_START(containerState, namespaceName, actionNamespace, actionName),
s"containerStart containerState: $containerState container: $container activations: $activeActivations of max $maxConcurrent action: $actionName namespace: $namespaceName activationId: $activationId",
akka.event.Logging.InfoLevel)
}
def receive: Receive = {
// A job to run on a container
//
// Run messages are received either via the feed or from child containers which cannot process
// their requests and send them back to the pool for rescheduling (this may happen if "docker" operations
// fail for example, or a container has aged and was destroying itself when a new request was assigned)
case r: Run =>
// Check if the message is resent from the buffer. Only the first message on the buffer can be resent.
val isResentFromBuffer = runBuffer.nonEmpty && runBuffer.dequeueOption.exists(_._1.msg == r.msg)
// Only process request, if there are no other requests waiting for free slots, or if the current request is the
// next request to process
// It is guaranteed, that only the first message on the buffer is resent.
if (runBuffer.isEmpty || isResentFromBuffer) {
if (isResentFromBuffer) {
//remove from resent tracking - it may get resent again, or get processed
resent = None
}
val kind = r.action.exec.kind
val memory = r.action.limits.memory.megabytes.MB
val createdContainer =
// Schedule a job to a warm container
ContainerPool
.schedule(r.action, r.msg.user.namespace.name, freePool)
.map(container => (container, container._2.initingState)) //warmed, warming, and warmingCold always know their state
.orElse(
// There was no warm/warming/warmingCold container. Try to take a prewarm container or a cold container.
// When take prewarm container, has no need to judge whether user memory is enough
takePrewarmContainer(r.action)
.map(container => (container, "prewarmed"))
.orElse {
// Is there enough space to create a new container or do other containers have to be removed?
if (hasPoolSpaceFor(busyPool ++ freePool ++ prewarmedPool, prewarmStartingPool, memory)) {
val container = Some(createContainer(memory), "cold")
incrementColdStartCount(kind, memory)
container
} else None
})
.orElse(
// Remove a container and create a new one for the given job
ContainerPool
// Only free up the amount, that is really needed to free up
.remove(freePool, Math.min(r.action.limits.memory.megabytes, memoryConsumptionOf(freePool)).MB)
.map(removeContainer)
// If the list had at least one entry, enough containers were removed to start the new container. After
// removing the containers, we are not interested anymore in the containers that have been removed.
.headOption
.map(_ =>
takePrewarmContainer(r.action)
.map(container => (container, "recreatedPrewarm"))
.getOrElse {
val container = (createContainer(memory), "recreated")
incrementColdStartCount(kind, memory)
container
}))
createdContainer match {
case Some(((actor, data), containerState)) =>
//increment active count before storing in pool map
val newData = data.nextRun(r)
val container = newData.getContainer
if (newData.activeActivationCount < 1) {
logging.error(this, s"invalid activation count < 1 ${newData}")
}
//only move to busyPool if max reached
if (!newData.hasCapacity()) {
if (r.action.limits.concurrency.maxConcurrent > 1) {
logging.info(
this,
s"container ${container} is now busy with ${newData.activeActivationCount} activations")
}
busyPool = busyPool + (actor -> newData)
freePool = freePool - actor
} else {
//update freePool to track counts
freePool = freePool + (actor -> newData)
}
// Remove the action that was just executed from the buffer and execute the next one in the queue.
if (isResentFromBuffer) {
// It is guaranteed that the currently executed messages is the head of the queue, if the message comes
// from the buffer
val (_, newBuffer) = runBuffer.dequeue
runBuffer = newBuffer
// Try to process the next item in buffer (or get another message from feed, if buffer is now empty)
processBufferOrFeed()
}
actor ! r // forwards the run request to the container
logContainerStart(r, containerState, newData.activeActivationCount, container)
case None =>
// this can also happen if createContainer fails to start a new container, or
// if a job is rescheduled but the container it was allocated to has not yet destroyed itself
// (and a new container would over commit the pool)
val isErrorLogged = r.retryLogDeadline.map(_.isOverdue).getOrElse(true)
val retryLogDeadline = if (isErrorLogged) {
logging.warn(
this,
s"Rescheduling Run message, too many message in the pool, " +
s"freePoolSize: ${freePool.size} containers and ${memoryConsumptionOf(freePool)} MB, " +
s"busyPoolSize: ${busyPool.size} containers and ${memoryConsumptionOf(busyPool)} MB, " +
s"maxContainersMemory ${poolConfig.userMemory.toMB} MB, " +
s"userNamespace: ${r.msg.user.namespace.name}, action: ${r.action}, " +
s"needed memory: ${r.action.limits.memory.megabytes} MB, " +
s"waiting messages: ${runBuffer.size}")(r.msg.transid)
MetricEmitter.emitCounterMetric(LoggingMarkers.CONTAINER_POOL_RESCHEDULED_ACTIVATION)
Some(logMessageInterval.fromNow)
} else {
r.retryLogDeadline
}
if (!isResentFromBuffer) {
// Add this request to the buffer, as it is not there yet.
runBuffer = runBuffer.enqueue(Run(r.action, r.msg, retryLogDeadline))
}
//buffered items will be processed via processBufferOrFeed()
}
} else {
// There are currently actions waiting to be executed before this action gets executed.
// These waiting actions were not able to free up enough memory.
runBuffer = runBuffer.enqueue(r)
}
// Container is free to take more work
case NeedWork(warmData: WarmedData) =>
val oldData = freePool.get(sender()).getOrElse(busyPool(sender()))
val newData =
warmData.copy(lastUsed = oldData.lastUsed, activeActivationCount = oldData.activeActivationCount - 1)
if (newData.activeActivationCount < 0) {
logging.error(this, s"invalid activation count after warming < 1 ${newData}")
}
if (newData.hasCapacity()) {
//remove from busy pool (may already not be there), put back into free pool (to update activation counts)
freePool = freePool + (sender() -> newData)
if (busyPool.contains(sender())) {
busyPool = busyPool - sender()
if (newData.action.limits.concurrency.maxConcurrent > 1) {
logging.info(
this,
s"concurrent container ${newData.container} is no longer busy with ${newData.activeActivationCount} activations")
}
}
} else {
busyPool = busyPool + (sender() -> newData)
freePool = freePool - sender()
}
processBufferOrFeed()
// Container is prewarmed and ready to take work
case NeedWork(data: PreWarmedData) =>
prewarmStartingPool = prewarmStartingPool - sender()
prewarmedPool = prewarmedPool + (sender() -> data)
// Container got removed
case ContainerRemoved(replacePrewarm) =>
// if container was in free pool, it may have been processing (but under capacity),
// so there is capacity to accept another job request
freePool.get(sender()).foreach { f =>
freePool = freePool - sender()
}
// container was busy (busy indicates at full capacity), so there is capacity to accept another job request
busyPool.get(sender()).foreach { _ =>
busyPool = busyPool - sender()
}
processBufferOrFeed()
// in case this was a prewarm
prewarmedPool.get(sender()).foreach { data =>
prewarmedPool = prewarmedPool - sender()
}
// in case this was a starting prewarm
prewarmStartingPool.get(sender()).foreach { _ =>
logging.info(this, "failed starting prewarm, removed")
prewarmStartingPool = prewarmStartingPool - sender()
}
//backfill prewarms on every ContainerRemoved(replacePrewarm = true), just in case
if (replacePrewarm) {
adjustPrewarmedContainer(false, false) //in case a prewarm is removed due to health failure or crash
}
// This message is received for one of these reasons:
// 1. Container errored while resuming a warm container, could not process the job, and sent the job back
// 2. The container aged, is destroying itself, and was assigned a job which it had to send back
// 3. The container aged and is destroying itself
// Update the free/busy lists but no message is sent to the feed since there is no change in capacity yet
case RescheduleJob =>
freePool = freePool - sender()
busyPool = busyPool - sender()
case EmitMetrics =>
emitMetrics()
case AdjustPrewarmedContainer =>
adjustPrewarmedContainer(false, true)
}
/** Resend next item in the buffer, or trigger next item in the feed, if no items in the buffer. */
def processBufferOrFeed() = {
// If buffer has more items, and head has not already been resent, send next one, otherwise get next from feed.
runBuffer.dequeueOption match {
case Some((run, _)) => //run the first from buffer
implicit val tid = run.msg.transid
//avoid sending dupes
if (resent.isEmpty) {
logging.info(this, s"re-processing from buffer (${runBuffer.length} items in buffer)")
resent = Some(run)
self ! run
} else {
//do not resend the buffer head multiple times (may reach this point from multiple messages, before the buffer head is re-processed)
}
case None => //feed me!
feed ! MessageFeed.Processed
}
}
/** adjust prewarm containers up to the configured requirements for each kind/memory combination. */
def adjustPrewarmedContainer(init: Boolean, scheduled: Boolean): Unit = {
if (scheduled) {
//on scheduled time, remove expired prewarms
ContainerPool.removeExpired(poolConfig, prewarmConfig, prewarmedPool).foreach { p =>
prewarmedPool = prewarmedPool - p
p ! Remove
}
//on scheduled time, emit cold start counter metric with memory + kind
coldStartCount foreach { coldStart =>
val coldStartKey = coldStart._1
MetricEmitter.emitCounterMetric(
LoggingMarkers.CONTAINER_POOL_PREWARM_COLDSTART(coldStartKey.memory.toString, coldStartKey.kind))
}
}
//fill in missing prewarms (replaces any deletes)
ContainerPool
.increasePrewarms(init, scheduled, coldStartCount, prewarmConfig, prewarmedPool, prewarmStartingPool)
.foreach { c =>
val config = c._1
val currentCount = c._2._1
val desiredCount = c._2._2
if (currentCount < desiredCount) {
(currentCount until desiredCount).foreach { _ =>
prewarmContainer(config.exec, config.memoryLimit, config.reactive.map(_.ttl))
}
}
}
if (scheduled) {
// lastly, clear coldStartCounts each time scheduled event is processed to reset counts
coldStartCount = immutable.Map.empty[ColdStartKey, Int]
}
}
/** Creates a new container and updates state accordingly. */
def createContainer(memoryLimit: ByteSize): (ActorRef, ContainerData) = {
val ref = childFactory(context)
val data = MemoryData(memoryLimit)
freePool = freePool + (ref -> data)
ref -> data
}
/** Creates a new prewarmed container */
def prewarmContainer(exec: CodeExec[_], memoryLimit: ByteSize, ttl: Option[FiniteDuration]): Unit = {
if (hasPoolSpaceFor(busyPool ++ freePool ++ prewarmedPool, prewarmStartingPool, memoryLimit)) {
val newContainer = childFactory(context)
prewarmStartingPool = prewarmStartingPool + (newContainer -> (exec.kind, memoryLimit))
newContainer ! Start(exec, memoryLimit, ttl)
} else {
logging.warn(
this,
s"Cannot create prewarm container due to reach the invoker memory limit: ${poolConfig.userMemory.toMB}")
}
}
/** this is only for cold start statistics of prewarm configs, e.g. not blackbox or other configs. */
def incrementColdStartCount(kind: String, memoryLimit: ByteSize): Unit = {
prewarmConfig
.filter { config =>
kind == config.exec.kind && memoryLimit == config.memoryLimit
}
.foreach { _ =>
val coldStartKey = ColdStartKey(kind, memoryLimit)
coldStartCount.get(coldStartKey) match {
case Some(value) => coldStartCount = coldStartCount + (coldStartKey -> (value + 1))
case None => coldStartCount = coldStartCount + (coldStartKey -> 1)
}
}
}
/**
* Takes a prewarm container out of the prewarmed pool
* iff a container with a matching kind and memory is found.
*
* @param action the action that holds the kind and the required memory.
* @return the container iff found
*/
def takePrewarmContainer(action: ExecutableWhiskAction): Option[(ActorRef, ContainerData)] = {
val kind = action.exec.kind
val memory = action.limits.memory.megabytes.MB
val now = Deadline.now
prewarmedPool.toSeq
.sortBy(_._2.expires.getOrElse(now))
.find {
case (_, PreWarmedData(_, `kind`, `memory`, _, _)) => true
case _ => false
}
.map {
case (ref, data) =>
// Move the container to the usual pool
freePool = freePool + (ref -> data)
prewarmedPool = prewarmedPool - ref
// Create a new prewarm container
// NOTE: prewarming ignores the action code in exec, but this is dangerous as the field is accessible to the
// factory
//get the appropriate ttl from prewarm configs
val ttl =
prewarmConfig.find(pc => pc.memoryLimit == memory && pc.exec.kind == kind).flatMap(_.reactive.map(_.ttl))
prewarmContainer(action.exec, memory, ttl)
(ref, data)
}
}
/** Removes a container and updates state accordingly. */
def removeContainer(toDelete: ActorRef) = {
toDelete ! Remove
freePool = freePool - toDelete
busyPool = busyPool - toDelete
}
/**
* Calculate if there is enough free memory within a given pool.
*
* @param pool The pool, that has to be checked, if there is enough free memory.
* @param memory The amount of memory to check.
* @return true, if there is enough space for the given amount of memory.
*/
def hasPoolSpaceFor[A](pool: Map[A, ContainerData],
prewarmStartingPool: Map[A, (String, ByteSize)],
memory: ByteSize): Boolean = {
memoryConsumptionOf(pool) + prewarmStartingPool.map(_._2._2.toMB).sum + memory.toMB <= poolConfig.userMemory.toMB
}
/**
* Log metrics about pool state (buffer size, buffer memory requirements, active number, active memory, prewarm number, prewarm memory)
*/
private def emitMetrics() = {
MetricEmitter.emitGaugeMetric(LoggingMarkers.CONTAINER_POOL_RUNBUFFER_COUNT, runBuffer.size)
MetricEmitter.emitGaugeMetric(
LoggingMarkers.CONTAINER_POOL_RUNBUFFER_SIZE,
runBuffer.map(_.action.limits.memory.megabytes).sum)
val containersInUse = freePool.filter(_._2.activeActivationCount > 0) ++ busyPool
MetricEmitter.emitGaugeMetric(LoggingMarkers.CONTAINER_POOL_ACTIVE_COUNT, containersInUse.size)
MetricEmitter.emitGaugeMetric(
LoggingMarkers.CONTAINER_POOL_ACTIVE_SIZE,
containersInUse.map(_._2.memoryLimit.toMB).sum)
MetricEmitter.emitGaugeMetric(
LoggingMarkers.CONTAINER_POOL_PREWARM_COUNT,
prewarmedPool.size + prewarmStartingPool.size)
MetricEmitter.emitGaugeMetric(
LoggingMarkers.CONTAINER_POOL_PREWARM_SIZE,
prewarmedPool.map(_._2.memoryLimit.toMB).sum + prewarmStartingPool.map(_._2._2.toMB).sum)
val unused = freePool.filter(_._2.activeActivationCount == 0)
val unusedMB = unused.map(_._2.memoryLimit.toMB).sum
MetricEmitter.emitGaugeMetric(LoggingMarkers.CONTAINER_POOL_IDLES_COUNT, unused.size)
MetricEmitter.emitGaugeMetric(LoggingMarkers.CONTAINER_POOL_IDLES_SIZE, unusedMB)
}
}
object ContainerPool {
/**
* Calculate the memory of a given pool.
*
* @param pool The pool with the containers.
* @return The memory consumption of all containers in the pool in Megabytes.
*/
protected[containerpool] def memoryConsumptionOf[A](pool: Map[A, ContainerData]): Long = {
pool.map(_._2.memoryLimit.toMB).sum
}
/**
* Finds the best container for a given job to run on.
*
* Selects an arbitrary warm container from the passed pool of idle containers
* that matches the action and the invocation namespace. The implementation uses
* matching such that structural equality of action and the invocation namespace
* is required.
* Returns None iff no matching container is in the idle pool.
* Does not consider pre-warmed containers.
*
* @param action the action to run
* @param invocationNamespace the namespace, that wants to run the action
* @param idles a map of idle containers, awaiting work
* @return a container if one found
*/
protected[containerpool] def schedule[A](action: ExecutableWhiskAction,
invocationNamespace: EntityName,
idles: Map[A, ContainerData]): Option[(A, ContainerData)] = {
idles
.find {
case (_, c @ WarmedData(_, `invocationNamespace`, `action`, _, _, _)) if c.hasCapacity() => true
case _ => false
}
.orElse {
idles.find {
case (_, c @ WarmingData(_, `invocationNamespace`, `action`, _, _)) if c.hasCapacity() => true
case _ => false
}
}
.orElse {
idles.find {
case (_, c @ WarmingColdData(`invocationNamespace`, `action`, _, _)) if c.hasCapacity() => true
case _ => false
}
}
}
/**
* Finds the oldest previously used container to remove to make space for the job passed to run.
* Depending on the space that has to be allocated, several containers might be removed.
*
* NOTE: This method is never called to remove an action that is in the pool already,
* since this would be picked up earlier in the scheduler and the container reused.
*
* @param pool a map of all free containers in the pool
* @param memory the amount of memory that has to be freed up
* @return a list of containers to be removed iff found
*/
@tailrec
protected[containerpool] def remove[A](pool: Map[A, ContainerData],
memory: ByteSize,
toRemove: List[A] = List.empty): List[A] = {
// Try to find a Free container that does NOT have any active activations AND is initialized with any OTHER action
val freeContainers = pool.collect {
// Only warm containers will be removed. Prewarmed containers will stay always.
case (ref, w: WarmedData) if w.activeActivationCount == 0 =>
ref -> w
}
if (memory > 0.B && freeContainers.nonEmpty && memoryConsumptionOf(freeContainers) >= memory.toMB) {
// Remove the oldest container if:
// - there is more memory required
// - there are still containers that can be removed
// - there are enough free containers that can be removed
val (ref, data) = freeContainers.minBy(_._2.lastUsed)
// Catch exception if remaining memory will be negative
val remainingMemory = Try(memory - data.memoryLimit).getOrElse(0.B)
remove(freeContainers - ref, remainingMemory, toRemove ++ List(ref))
} else {
// If this is the first call: All containers are in use currently, or there is more memory needed than
// containers can be removed.
// Or, if this is one of the recursions: Enough containers are found to get the memory, that is
// necessary. -> Abort recursion
toRemove
}
}
/**
* Find the expired actor in prewarmedPool
*
* @param poolConfig
* @param prewarmConfig
* @param prewarmedPool
* @param logging
* @return a list of expired actor
*/
def removeExpired[A](poolConfig: ContainerPoolConfig,
prewarmConfig: List[PrewarmingConfig],
prewarmedPool: Map[A, PreWarmedData])(implicit logging: Logging): List[A] = {
val now = Deadline.now
val expireds = prewarmConfig
.flatMap { config =>
val kind = config.exec.kind
val memory = config.memoryLimit
config.reactive
.map { c =>
val expiredPrewarmedContainer = prewarmedPool.toSeq
.filter { warmInfo =>
warmInfo match {
case (_, p @ PreWarmedData(_, `kind`, `memory`, _, _)) if p.isExpired() => true
case _ => false
}
}
.sortBy(_._2.expires.getOrElse(now))
if (expiredPrewarmedContainer.nonEmpty) {
// emit expired container counter metric with memory + kind
MetricEmitter.emitCounterMetric(LoggingMarkers.CONTAINER_POOL_PREWARM_EXPIRED(memory.toString, kind))
logging.info(
this,
s"[kind: ${kind} memory: ${memory.toString}] ${expiredPrewarmedContainer.size} expired prewarmed containers")
}
expiredPrewarmedContainer.map(e => (e._1, e._2.expires.getOrElse(now)))
}
.getOrElse(List.empty)
}
.sortBy(_._2) //need to sort these so that if the results are limited, we take the oldest
.map(_._1)
if (expireds.nonEmpty) {
logging.info(this, s"removing up to ${poolConfig.prewarmExpirationLimit} of ${expireds.size} expired containers")
expireds.take(poolConfig.prewarmExpirationLimit).foreach { e =>
prewarmedPool.get(e).map { d =>
logging.info(this, s"removing expired prewarm of kind ${d.kind} with container ${d.container} ")
}
}
}
expireds.take(poolConfig.prewarmExpirationLimit)
}
/**
* Find the increased number for the prewarmed kind
*
* @param init
* @param scheduled
* @param coldStartCount
* @param prewarmConfig
* @param prewarmedPool
* @param prewarmStartingPool
* @param logging
* @return the current number and increased number for the kind in the Map
*/
def increasePrewarms(init: Boolean,
scheduled: Boolean,
coldStartCount: Map[ColdStartKey, Int],
prewarmConfig: List[PrewarmingConfig],
prewarmedPool: Map[ActorRef, PreWarmedData],
prewarmStartingPool: Map[ActorRef, (String, ByteSize)])(
implicit logging: Logging): Map[PrewarmingConfig, (Int, Int)] = {
prewarmConfig.map { config =>
val kind = config.exec.kind
val memory = config.memoryLimit
val runningCount = prewarmedPool.count {
// done starting (include expired, since they may not have been removed yet)
case (_, p @ PreWarmedData(_, `kind`, `memory`, _, _)) => true
// started but not finished starting (or expired)
case _ => false
}
val startingCount = prewarmStartingPool.count(p => p._2._1 == kind && p._2._2 == memory)
val currentCount = runningCount + startingCount
// determine how many are needed
val desiredCount: Int =
if (init) config.initialCount
else {
if (scheduled) {
// scheduled/reactive config backfill
config.reactive
.map(c => getReactiveCold(coldStartCount, c, kind, memory).getOrElse(c.minCount)) //reactive -> desired is either cold start driven, or minCount
.getOrElse(config.initialCount) //not reactive -> desired is always initial count
} else {
// normal backfill after removal - make sure at least minCount or initialCount is started
config.reactive.map(_.minCount).getOrElse(config.initialCount)
}
}
if (currentCount < desiredCount) {
logging.info(
this,
s"found ${currentCount} started and ${startingCount} starting; ${if (init) "initing" else "backfilling"} ${desiredCount - currentCount} pre-warms to desired count: ${desiredCount} for kind:${config.exec.kind} mem:${config.memoryLimit.toString}")(
TransactionId.invokerWarmup)
}
(config, (currentCount, desiredCount))
}.toMap
}
/**
* Get the required prewarmed container number according to the cold start happened in previous minute
*
* @param coldStartCount
* @param config
* @param kind
* @param memory
* @return the required prewarmed container number
*/
def getReactiveCold(coldStartCount: Map[ColdStartKey, Int],
config: ReactivePrewarmingConfig,
kind: String,
memory: ByteSize): Option[Int] = {
coldStartCount.get(ColdStartKey(kind, memory)).map { value =>
// Let's assume that threshold is `2`, increment is `1` in runtimes.json
// if cold start number in previous minute is `2`, requireCount is `2/2 * 1 = 1`
// if cold start number in previous minute is `4`, requireCount is `4/2 * 1 = 2`
math.min(math.max(config.minCount, (value / config.threshold) * config.increment), config.maxCount)
}
}
def props(factory: ActorRefFactory => ActorRef,
poolConfig: ContainerPoolConfig,
feed: ActorRef,
prewarmConfig: List[PrewarmingConfig] = List.empty)(implicit logging: Logging) =
Props(new ContainerPool(factory, feed, prewarmConfig, poolConfig))
}
/** Contains settings needed to perform container prewarming. */
case class PrewarmingConfig(initialCount: Int,
exec: CodeExec[_],
memoryLimit: ByteSize,
reactive: Option[ReactivePrewarmingConfig] = None)
| style95/openwhisk | core/invoker/src/main/scala/org/apache/openwhisk/core/containerpool/ContainerPool.scala | Scala | apache-2.0 | 32,684 |
package org.dama.datasynth.lang
import net.liftweb.json._
import org.dama.datasynth.LoadPropertyTables
import org.dama.datasynth.executionplan.ExecutionPlan._
import org.dama.datasynth.schema.Schema
import scala.reflect.runtime.universe._
import scala.collection.mutable
/**
* Created by joangui on 13/04/2017.
* ReadExecutionPlan is used to read a json and create the schema of the graph to be generated.
* It also allows to create the necessary Tables, as ExecutionPlanNodes, that are necessary by the
* runtime to generate the graph.
*/
object ReadExecutionPlan {
//var schema:Schema = null
/**
* Load a schema
* @param json String containin the definition of the schema in JSON format.
* @return Schema
*/
def loadSchema(json : String): Schema ={
implicit val formats = DefaultFormats
val jsonT = parse(json)
val schema:Schema = jsonT.extract[Schema]
schema
}
/**
* Given a schema return the set of Tables necessary to create it.
* @param schema to be created
* @return the sequence of necessary Tables to create the graph.
*/
def createExecutionPlan(schema: Schema):Seq[Table]=
{
val propertyTablesNodes:Seq[PropertyTable[_]] = LoadPropertyTables.getPropertyTableNodes(schema.nodeTypes)
val edgeTablesNodes:Seq[Table] = LoadStructuralTables.getStructuralTables(schema,propertyTablesNodes)
propertyTablesNodes++edgeTablesNodes
}
/**
*
* @param initParameters A sequence in the form {value:dataType}
* @return a sequence of values
*/
def readInitParameters(initParameters: Seq[String]):Seq[Value[_]] = {
initParameters.map(initParameter=> {
val colonPosition = initParameter.lastIndexOf(":")
val dataType = initParameter.substring(colonPosition+1).toLowerCase
val value = initParameter.substring(0,colonPosition);
dataType match {
case "string" => StaticValue[String](value)
case "int" => StaticValue[Int](value.toInt)
case "double" => StaticValue[Double](value.toDouble)
case "long" => StaticValue[Long](value.toLong)
case "float" => StaticValue[Float](value.toFloat)
case "file" => File(value)
}
}
)
}
}
| DAMA-UPC/DataSynth | src/main/scala/org/dama/datasynth/lang/ReadExecutionPlan.scala | Scala | gpl-3.0 | 2,211 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive.execution
import org.apache.spark.{SparkConf, SparkContext, SparkFunSuite}
import org.apache.spark.sql.hive.test.TestHiveContext
import org.scalatest.BeforeAndAfterAll
class ConcurrentHiveSuite extends SparkFunSuite with BeforeAndAfterAll {
ignore("multiple instances not supported") {
test("Multiple Hive Instances") {
(1 to 10).map { i =>
val ts =
new TestHiveContext(new SparkContext("local", s"TestSQLContext$i", new SparkConf()))
ts.executeSql("SHOW TABLES").toRdd.collect()
ts.executeSql("SELECT * FROM src").toRdd.collect()
ts.executeSql("SHOW TABLES").toRdd.collect()
}
}
}
}
| ArvinDevel/onlineAggregationOnSparkV2 | sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/ConcurrentHiveSuite.scala | Scala | apache-2.0 | 1,488 |
package org.rz.akkagraphite.actors
import java.net.InetSocketAddress
import akka.actor.{ActorLogging, ActorRef, ActorSystem, FSM, Props, RootActorPath}
import akka.cluster.ClusterEvent.{CurrentClusterState, MemberUp}
import akka.cluster.{Cluster, Member, MemberStatus}
import akka.io.{IO, Tcp}
import akka.util.ByteString
import com.typesafe.config.{Config, ConfigValueFactory}
import org.rz.akkagraphite.actors.GraphiteRelayer._
import org.rz.akkagraphite.actors.GraphiteRelayerFrontend.RelayerRegistration
import org.rz.akkagraphite.model.data.GraphiteData
/**
* Relay to Graphite actor.
*
* @author rozasdev (rozasdev at gmail.com)
*/
class GraphiteRelayer(remoteSocket: InetSocketAddress) extends FSM[GraphiteRelayer.State, GraphiteData] with ActorLogging {
import Tcp._
import context.system
val manager = IO(Tcp)
var managerWasShutDown: Boolean = false
var socket: ActorRef = _
val cluster = Cluster(context.system)
// Initial state
startWith(Disconnected, EmptyData)
/**
* Define actions taken when receiving commands at a given state via FSM DSL.
*/
when(GraphiteRelayer.Disconnected) {
case Event(Send(_), _) =>
log.error("[GRAPHITE_RELAYER] Cannot send a tuple to Graphite when TCP connection is down")
stay using EmptyData
case Event(Connected(remote, local), _) =>
socket = sender()
// We won't be handling any incoming TCP message ...
socket ! Register(system.actorOf(Props.empty))
goto(GraphiteRelayer.Connected) using EmptyData
case Event(CommandFailed(_), _) =>
val errorMsg = s"[GRAPHITE_RELAYER] Cannot connect via TCP to (${remoteSocket.getHostName}:${remoteSocket.getPort})"
log.error(errorMsg)
stop(FSM.Failure(errorMsg))
case Event(GraphiteRelayer.Connect, _) =>
manager ! Connect(remoteSocket)
stay using EmptyData
case Event(state: CurrentClusterState, _) =>
state.members.filter(_.status == MemberStatus.Up) foreach register
stay using EmptyData
case Event(MemberUp(member), _) =>
log.debug(s"Member is up $member")
register(member)
stay using EmptyData
}
when(GraphiteRelayer.Connected) {
case Event(Send(data), _) =>
val message: ByteString = ByteString(data.toString)
log.debug(s"[GRAPHITE_RELAYER] Sending '${message.utf8String.replace("\n", "")}'")
socket ! Write(message)
stay using data
case Event(CommandFailed(w: Write), _) =>
log.error("[GRAPHITE_RELAYER] Error writing to TCP socket")
stay using EmptyData
case Event(ErrorClosed, _) =>
val errorMsg = s"[GRAPHITE_RELAYER] TCP connection was lost (${remoteSocket.getHostName}:${remoteSocket.getPort})"
log.error(errorMsg)
stop(FSM.Failure(errorMsg))
goto(GraphiteRelayer.Disconnected) using EmptyData
case Event(GraphiteRelayer.Disconnect, _) =>
managerWasShutDown = true
manager ! Close
goto(GraphiteRelayer.Disconnected) using EmptyData
case Event(state: CurrentClusterState, _) =>
state.members.filter(_.status == MemberStatus.Up) foreach register
stay
case default =>
log.warning(s"[GRAPHITE_RELAYER] Unknown event $default")
stay using EmptyData
}
/**
* Suscribe to the cluster.
*/
override def preStart(): Unit = cluster.subscribe(self, classOf[MemberUp])
/**
* Send termination message to TCP manager and leaves the cluster.
*/
override def postStop(): Unit = {
if (!managerWasShutDown) {
log.warning("[GRAPHITE_RELAYER] TCP Manager was not shut down with a Disconnect command, shutting down now...")
manager ! Close
}
cluster.unsubscribe(self)
}
private def register(member: Member): Unit =
if (member.hasRole(Constants.ROLE_FRONTEND)) {
val frontendPath = RootActorPath(member.address) / Constants.ACTOR_PATH_USER / GraphiteRelayerFrontend.getName
context.actorSelection(frontendPath) ! RelayerRegistration
}
}
/**
* Companion object for Graphite Relayer actor.
*/
object GraphiteRelayer {
/**
* Commands that the relayer understands.
*/
sealed trait Command
case object Connect extends Command
case object Disconnect extends Command
case class Send(data: GraphiteData) extends Command
/**
* Actor state: connected to Carbon or disconnected.
*/
sealed trait State
case object Connected extends State
case object Disconnected extends State
/**
* Actor's internal state definition.
*/
case object EmptyData extends GraphiteData
/**
* Actor props.
*/
def props(remote: InetSocketAddress) = Props(classOf[GraphiteRelayer], remote)
/**
* Initiates the backend node system.
*/
def initiate(backendName: String, config: Config, port: Int, socket: InetSocketAddress): ActorRef = {
val backendConfig = config.getConfig(Constants.ROLE_BACKEND).withValue(Constants.CONFIG_NETTY_TCP_PORT, ConfigValueFactory.fromAnyRef(port))
val system = ActorSystem(config.getString(Constants.CONFIG_CLUSTER_NAME), backendConfig)
system.actorOf(GraphiteRelayer.props(socket), backendName)
}
}
| rozasdev/AkkaGraphite | src/main/scala/org/rz/akkagraphite/actors/GraphiteRelayer.scala | Scala | apache-2.0 | 5,145 |
package concurrent_programming.monitors.semaphores
import io.threadcso.locks.Lock
import io.threadcso.semaphore.{BooleanSemaphore, Flag}
class FCFSLock extends Lock{
private var locked = false
private val mutex = new BooleanSemaphore(available = true)
private val queue = scala.collection.mutable.Queue[Flag]()
override def lock(): Unit = {
mutex.acquire()
if (!locked){
locked = true
mutex.release()
}
else{
val gate = new Flag()
queue.enqueue(gate)
mutex.release()
gate.acquire()
locked = true
}
}
override def unlock(): Unit = {
mutex.acquire()
if (queue.isEmpty){
locked = false
}
else{
val gate = queue.dequeue()
gate.release()
}
mutex.release()
}
}
| AlexandruValeanu/Concurrent-Programming-in-Scala | src/concurrent_programming/monitors/semaphores/FCFSLock.scala | Scala | gpl-3.0 | 780 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.cloudera.datascience.montecarlorisk
import org.apache.spark.rdd.RDD
import org.apache.commons.math3.util.FastMath
object KernelDensity {
/**
* Given a set of samples form a distribution, estimates its density at the set of given points.
* Uses a Gaussian kernel with the given standard deviation.
*/
def estimate(samples: RDD[Double], standardDeviation: Double,
evaluationPoints: Array[Double]): Array[Double] = {
val logStandardDeviationPlusHalfLog2Pi =
FastMath.log(standardDeviation) + 0.5 * FastMath.log(2 * FastMath.PI)
val (points, count) = samples.aggregate((new Array[Double](evaluationPoints.length), 0))(
(x, y) => {
var i = 0
while (i < evaluationPoints.length) {
x._1(i) += normPdf(y, standardDeviation, logStandardDeviationPlusHalfLog2Pi,
evaluationPoints(i))
i += 1
}
(x._1, i)
},
(x, y) => {
var i = 0
while (i < evaluationPoints.length) {
x._1(i) += y._1(i)
i += 1
}
(x._1, x._2 + y._2)
})
var i = 0
while (i < points.length) {
points(i) /= count
i += 1
}
points
}
private def normPdf(mean: Double, standardDeviation: Double,
logStandardDeviationPlusHalfLog2Pi: Double, x: Double): Double = {
val x0 = x - mean
val x1 = x0 / standardDeviation
FastMath.exp(-0.5 * x1 * x1 - logStandardDeviationPlusHalfLog2Pi)
}
}
| chayapan/montecarlorisk | src/main/scala/com/cloudera/datascience/montecarlorisk/KernelDensity.scala | Scala | apache-2.0 | 2,298 |
package com.thangiee.lolhangouts.ui.profile
import android.content.Context
import android.support.v4.view.ViewPager.SimpleOnPageChangeListener
import android.support.v4.view.{PagerAdapter, ViewPager}
import android.view._
import android.widget.FrameLayout
import com.afollestad.materialdialogs.MaterialDialog
import com.thangiee.lolhangouts.R
import com.thangiee.lolhangouts.data.usecases.{ManageFriendUseCaseImpl, GetFriendsUseCaseImpl, GetUserUseCaseImpl}
import com.thangiee.lolhangouts.ui.core.Container
import com.thangiee.lolhangouts.ui.utils._
import it.neokree.materialtabs.{MaterialTab, MaterialTabHost, MaterialTabListener}
import scala.concurrent.ExecutionContext.Implicits.global
class ProfileContainer(name: String, regionId: String)(implicit ctx: Context) extends FrameLayout(ctx) with Container with MaterialTabListener {
private lazy val tabs = this.find[MaterialTabHost](R.id.tabs)
private lazy val pager = this.find[ViewPager](R.id.pager)
private lazy val profileSummaryView = this.find[ProfileSummaryView](R.id.page_1)
private lazy val profileTopChampView = this.find[ProfileTopChampsView](R.id.page_2)
private lazy val profileMatchHistView = this.find[ProfileMatchHistView](R.id.page_3)
case class Page(title: String, var isSet: Boolean = false)
private val loadUser = GetUserUseCaseImpl().loadUser()
private val pages = List(Page("Summary"), Page("Champions"), Page("History"))
private var pagePosition = 0
override def onAttachedToWindow(): Unit = {
super.onAttachedToWindow()
addView(layoutInflater.inflate(R.layout.view_profile_screen, this, false))
val pageChangeListener = new SimpleOnPageChangeListener() {
override def onPageSelected(position: Int): Unit = {
// when user do a swipe the selected tab change
tabs.setSelectedNavigationItem(position)
handleSwitchPage(position)
}
}
val pagerAdapter = new ViewPagerAdapter()
pager.setOffscreenPageLimit(3)
pager.setAdapter(pagerAdapter)
pager.setOnPageChangeListener(pageChangeListener)
pages.indices.foreach { i =>
tabs.addTab(tabs.newTab()
.setText(pages(i).title)
.setTabListener(this))
}
delay(150) { // make sure view is attached first
pageChangeListener.onPageSelected(0)
}
}
override def onCreateOptionsMenu(menuInflater: MenuInflater, menu: Menu): Boolean = {
menuInflater.inflate(R.menu.overflow, menu)
// determine to inflate an add friend menu btn or not
GetFriendsUseCaseImpl().loadFriendByName(name) onSuccess { case Bad(_) => // not in friend list
loadUser onSuccess { case Good(user) =>
// don't inflate if viewing your own profile or a profile from a different region
if (name.toLowerCase != user.inGameName.toLowerCase && regionId.toLowerCase == user.region.id.toLowerCase)
runOnUiThread(menuInflater.inflate(R.menu.add_friend, menu))
}
}
if (pagePosition == 1) { // champions tab
menuInflater.inflate(R.menu.info, menu)
}
true
}
override def onOptionsItemSelected(item: MenuItem): Boolean = {
item.getItemId match {
case R.id.menu_add_friend =>
ManageFriendUseCaseImpl().sendFriendRequest(name)
// todo: show message
true
case R.id.menu_info =>
if (pagePosition == 1) {
new MaterialDialog.Builder(ctx)
.title("Top Ranked Champion")
.customView(R.layout.info_top_champs, true)
.positiveText(android.R.string.ok)
.show()
}
true
case _ => false
}
}
private def handleSwitchPage(position: Int): Unit = {
// stop onCreateOptionsMenu from being call twice after initialization
if (position != pagePosition) {
pagePosition = position
invalidateOptionsMenu()
}
// only load the page the user is currently viewing and initialize it only once
if (!pages(position).isSet) {
position match {
case 0 => profileSummaryView.setProfile(name, regionId)
case 1 => profileTopChampView.setProfile(name, regionId)
case 2 => profileMatchHistView.setProfile(name, regionId)
}
pages(position).isSet = true
}
}
override def getView: View = this
override def onTabSelected(tab: MaterialTab): Unit = pager.setCurrentItem(tab.getPosition)
override def onTabReselected(materialTab: MaterialTab): Unit = {}
override def onTabUnselected(materialTab: MaterialTab): Unit = {}
class ViewPagerAdapter extends PagerAdapter {
override def instantiateItem(container: ViewGroup, position: Int): AnyRef = {
position match {
case 0 => profileSummaryView
case 1 => profileTopChampView
case 2 => profileMatchHistView
}
}
override def destroyItem(container: ViewGroup, position: Int, `object`: scala.Any): Unit = {}
override def getCount: Int = pages.size
override def isViewFromObject(view: View, o: scala.Any): Boolean = {
view == o.asInstanceOf[View]
}
}
}
| Thangiee/LoL-Hangouts | src/com/thangiee/lolhangouts/ui/profile/ProfileContainer.scala | Scala | apache-2.0 | 5,091 |
/* NSC -- new Scala compiler
* Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
package dotty.tools
package dotc
import core.Contexts.Context
import reporting.Reporter
/* To do:
* - simplify hk types
* - have a second look at normalization (leave at method types if pt is method type?)
* - Don't open package objects from class files if they are present in source
* - Revise the way classes are inherited - when not followed by [...] or (...),
* assume the unparameterized type and forward type parameters as we do now for the synthetic head class.
*/
object Bench extends Driver {
def resident(compiler: Compiler): Reporter = unsupported("resident") /*loop { line =>
val command = new CompilerCommand(line split "\\s+" toList, new Settings(scalacError))
compiler.reporter.reset()
new compiler.Run() compile command.files
}*/
private var numRuns = 1
def newCompiler(): Compiler = new Compiler
private def ntimes(n: Int)(op: => Reporter): Reporter =
(emptyReporter /: (0 until n)) ((_, _) => op)
override def doCompile(compiler: Compiler, fileNames: List[String])(implicit ctx: Context): Reporter =
if (new config.Settings.Setting.SettingDecorator[Boolean](ctx.base.settings.resident).value(ctx))
resident(compiler)
else
ntimes(numRuns) {
val start = System.nanoTime()
val r = super.doCompile(compiler, fileNames)
println(s"time elapsed: ${(System.nanoTime - start) / 1000000}ms")
r
}
def extractNumArg(args: Array[String], name: String, default: Int = 1): (Int, Array[String]) = {
val pos = args indexOf name
if (pos < 0) (default, args)
else (args(pos + 1).toInt, (args take pos) ++ (args drop (pos + 2)))
}
override def process(args: Array[String]): Reporter = {
val (numCompilers, args1) = extractNumArg(args, "#compilers")
val (numRuns, args2) = extractNumArg(args1, "#runs")
this.numRuns = numRuns
ntimes(numCompilers)(super.process(args2))
}
}
| magarciaEPFL/dotty | src/dotty/tools/dotc/Bench.scala | Scala | bsd-3-clause | 2,013 |
package agilesitesng.wem.model
//package com.github.marklister.base64
import scala.collection.immutable.HashMap
/**
* Base64 encoder
* @author Mark Lister
* This software is distributed under the 2-Clause BSD license. See the
* LICENSE file in the root of the repository.
*
* Copyright (c) 2014 - 2015 Mark Lister
*
* The repo for this Base64 encoder lives at https://github.com/marklister/base64
* Please send your issues, suggestions and pull requests there.
*
*/
object Base64 {
private[this] val zero = Array(0, 0).map(_.toByte)
class B64Scheme(val encodeTable: IndexedSeq[Char]) {
lazy val decodeTable = HashMap(encodeTable.zipWithIndex: _ *)
}
lazy val base64 = new B64Scheme(('A' to 'Z') ++ ('a' to 'z') ++ ('0' to '9') ++ Seq('+', '/'))
lazy val base64Url = new B64Scheme(base64.encodeTable.dropRight(2) ++ Seq('-', '_'))
implicit class Encoder(b: Array[Byte]) {
lazy val pad = (3 - b.length % 3) % 3
def toBase64(implicit scheme: B64Scheme = base64): String = {
def sixBits(x: Array[Byte]): Array[Int] = {
val a = (x(0) & 0xfc) >> 2
val b = ((x(0) & 0x3) << 4) | ((x(1) & 0xf0) >> 4)
val c = ((x(1) & 0xf) << 2) | ((x(2) & 0xc0) >> 6)
val d = (x(2)) & 0x3f
Array(a, b, c, d)
}
((b ++ zero.take(pad)).grouped(3)
.flatMap(sixBits)
.map(scheme.encodeTable)
.toArray
.dropRight(pad) :+ "=" * pad)
.mkString
}
}
implicit class Decoder(s: String) {
lazy val cleanS = s.reverse.dropWhile(_ == '=').reverse
lazy val pad = s.length - cleanS.length
def toByteArray(implicit scheme: B64Scheme = base64): Array[Byte] = {
def threeBytes(s: String): Array[Byte] = {
val r = s.map(scheme.decodeTable(_)).foldLeft(0)((a, b) => (a << 6) | b)
Array((r >> 16).toByte, (r >> 8).toByte, r.toByte)
}
if (pad > 2 || s.length % 4 != 0) throw new java.lang.IllegalArgumentException("Invalid Base64 String:" + s)
try {
(cleanS + "A" * pad)
.grouped(4)
.map(threeBytes)
.flatten
.toArray
.dropRight(pad)
} catch {case e:NoSuchElementException => throw new java.lang.IllegalArgumentException("Invalid Base64 String:" + s) }
}
}
} | agilesites/agilesites2-build | src/main/scala/agilesitesng/wem/model/Base64.scala | Scala | mit | 2,301 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.nn
import com.intel.analytics.bigdl.nn.abstractnn.{Activity, AbstractModule}
import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric
import scala.reflect.ClassTag
/**
* Identity just return the input to output.
* It's useful in same parallel container to get an origin input.
*/
@SerialVersionUID(- 8429221694319933625L)
class Identity[T: ClassTag]()
(implicit ev: TensorNumeric[T]) extends AbstractModule[Activity, Activity, T] {
override def updateOutput(input: Activity): Activity = {
output = input
output
}
override def updateGradInput(input: Activity,
gradOutput: Activity): Activity = {
gradInput = gradOutput
gradInput
}
}
object Identity {
def apply[@specialized(Float, Double) T: ClassTag]()
(implicit ev: TensorNumeric[T]) : Identity[T] = {
new Identity[T]()
}
}
| yiheng/BigDL | spark/dl/src/main/scala/com/intel/analytics/bigdl/nn/Identity.scala | Scala | apache-2.0 | 1,480 |
package com.typesafe.sbt
package packager
import sbt._
/**
* General purpose keys for the native packager
*/
trait NativePackagerKeys {
val packageName = SettingKey[String]("packageName", "Name of the created output package. Used for dirs/scripts.")
val packageSummary = SettingKey[String]("package-summary", "Summary of the contents of a linux package.")
val packageDescription =
SettingKey[String]("package-description", "The description of the package. Used when searching.")
val maintainer = SettingKey[String]("maintainer", "The name/email address of a maintainer for the native package.")
val executableScriptName =
SettingKey[String]("executableScriptName", "Name of the executing script.")
val maintainerScripts = TaskKey[Map[String, Seq[String]]]("maintainerScripts", "Scriptname to content lines")
}
/**
* This Keys object can be used for
* <ul>
* <li>non autoplugin builds</li>
* <li>import single keys, which are not inside the autoImport</li>
* </ul>
*
* == Non autoplugin builds ==
*
* {{{
* import com.typesafe.sbt.packager.Keys._
*
* packageName := ""
* }}}
*
* == autoplugin builds ==
*
* {{{
* NativePackagerKeys.packageName := ""
* }}}
*/
object Keys
extends NativePackagerKeys
with universal.UniversalKeys
with linux.LinuxKeys
with windows.WindowsKeys
with docker.DockerKeys
with debian.DebianKeys
with rpm.RpmKeys
with archetypes.JavaAppKeys
with archetypes.JavaServerAppKeys
with archetypes.systemloader.SystemloaderKeys
| kodemaniak/sbt-native-packager | src/main/scala/com/typesafe/sbt/packager/Keys.scala | Scala | bsd-2-clause | 1,558 |
package scala_pastebin
import scala.reflect.runtime.universe._
import scala.reflect.runtime.currentMirror
/**
* @author humberto
* http://stackoverflow.com/questions/12797300/in-scala-how-to-turn-objects-values-into-mapstring-string
*/
object ObjectToMap {
def apply(obj: AnyRef) = {
val r = currentMirror.reflect(obj)
r.symbol.typeSignature.members.toStream
.collect { case s: TermSymbol if !s.isMethod => r.reflectField(s) }
.map(r => r.symbol.name.toString.trim -> r.get.toString)
.toMap
}
} | humbhenri/scala_pastebin | src/main/scala/scala_pastebin/ObjectToMap.scala | Scala | unlicense | 529 |
package scwebapp
import java.nio.charset.Charset
import scutil.core.implicits.*
import scutil.lang.*
import scutil.codec.*
import scwebapp.factory.mimeType
import scwebapp.header.*
import scwebapp.data.*
import scwebapp.format.*
trait HttpRequest {
//------------------------------------------------------------------------------
//## metadata
def remoteUser:Option[String]
def remoteIp:String
def remotePort:Int
def localIp:String
def localPort:Int
def secure:Boolean
def method:Either[String,HttpMethod]
// requestURI always contains contextPath, servletPath and pathInfo but is still URL-encoded
// ROOT context contextPath is empty
// mapping /foo/* servletPath is "/foo", pathInfo contains the rest
// mapping /* servletPath is empty, pathInfo contains the rest
// *.foo mapping servletPath contains everything below the context, pathInfo is null
/** full path including the contextPath */
def uri:String
/*
// decoded according to server settings which by default (in tomcat) is ISO-8859-1.
// this is not influenced by setCharacterEncoding or setEncoding
def fullPathServlet:String =
Seq(peer.getServletPath, peer.getPathInfo) filter { _ != null } mkString ""
def pathInfoServlet:Option[String] =
Option(peer.getPathInfo)
*/
/** context of the web app */
def contextPath:String
def servletPath:String
/** the full path after the context path, not yet url-decoded */
final def fullPathRaw:String =
uri cutPrefix contextPath getOrError show"expected uri ${uri} to start with context path ${contextPath}"
final def pathInfoRaw:String =
fullPathRaw cutPrefix servletPath getOrError show"expected uri ${uri} to start with context path ${contextPath} and servlet path ${servletPath}"
final def fullPath(encoding:Charset):Either[URIComponentProblem,String] =
URIComponent forCharset encoding decode fullPathRaw
final def pathInfo(encoding:Charset):Either[URIComponentProblem,String] =
URIComponent forCharset encoding decode pathInfoRaw
final def fullPathUTF8:Either[URIComponentProblem,String] =
fullPath(Charsets.utf_8)
final def pathInfoUTF8:Either[URIComponentProblem,String] =
pathInfo(Charsets.utf_8)
def queryString:Option[String]
final def queryParameters(encoding:Charset):Either[String,CaseParameters] =
queryString.cata(Right(CaseParameters.empty), UrlEncoding.parseQueryParameters(_, encoding))
final def queryParametersUTF8:Either[String,CaseParameters] =
queryParameters(Charsets.utf_8)
//------------------------------------------------------------------------------
//## headers
def headers:HttpHeaders
//------------------------------------------------------------------------------
//## content
final def formParameters(defaultEncoding:Charset):Either[String,CaseParameters] =
for {
contentType <- (headers first ContentType):Either[String,Option[ContentType]]
mime <- contentType map { _.typ } toRight show"missing content type"
_ <- mime sameMajorAndMinor mimeType.application_form guardEither show"unexpected content type ${mime.value}"
encodingOpt <- mime.charset
string = body readString Charsets.us_ascii
encoding = encodingOpt getOrElse defaultEncoding
params <- UrlEncoding.parseForm(string, encoding)
}
yield params
final def formParametersUTF8:Either[String,CaseParameters] =
formParameters(Charsets.utf_8)
def parameters:CaseParameters
def body:HttpInput
def parts:Either[HttpPartsProblem,Seq[HttpPart]]
}
| ritschwumm/scwebapp | modules/core/src/main/scala/scwebapp/HttpRequest.scala | Scala | bsd-2-clause | 3,476 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.columnar
import org.apache.spark.internal.Logging
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.expressions.codegen.{CodeAndComment, CodeFormatter, CodeGenerator, UnsafeRowWriter}
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.CalendarInterval
/**
* An Iterator to walk through the InternalRows from a CachedBatch
*/
abstract class ColumnarIterator extends Iterator[InternalRow] {
def initialize(input: Iterator[DefaultCachedBatch], columnTypes: Array[DataType],
columnIndexes: Array[Int]): Unit
}
/**
* An helper class to update the fields of UnsafeRow, used by ColumnAccessor
*
* WARNING: These setter MUST be called in increasing order of ordinals.
*/
class MutableUnsafeRow(val writer: UnsafeRowWriter) extends BaseGenericInternalRow {
override def isNullAt(i: Int): Boolean = writer.isNullAt(i)
override def setNullAt(i: Int): Unit = writer.setNullAt(i)
override def setBoolean(i: Int, v: Boolean): Unit = writer.write(i, v)
override def setByte(i: Int, v: Byte): Unit = writer.write(i, v)
override def setShort(i: Int, v: Short): Unit = writer.write(i, v)
override def setInt(i: Int, v: Int): Unit = writer.write(i, v)
override def setLong(i: Int, v: Long): Unit = writer.write(i, v)
override def setFloat(i: Int, v: Float): Unit = writer.write(i, v)
override def setDouble(i: Int, v: Double): Unit = writer.write(i, v)
// the writer will be used directly to avoid creating wrapper objects
override def setDecimal(i: Int, v: Decimal, precision: Int): Unit =
throw new UnsupportedOperationException
override def setInterval(i: Int, value: CalendarInterval): Unit =
throw new UnsupportedOperationException
override def update(i: Int, v: Any): Unit = throw new UnsupportedOperationException
// all other methods inherited from GenericMutableRow are not need
override protected def genericGet(ordinal: Int): Any = throw new UnsupportedOperationException
override def numFields: Int = throw new UnsupportedOperationException
}
/**
* Generates bytecode for a [[ColumnarIterator]] for columnar cache.
*/
object GenerateColumnAccessor extends CodeGenerator[Seq[DataType], ColumnarIterator] with Logging {
protected def canonicalize(in: Seq[DataType]): Seq[DataType] = in
protected def bind(in: Seq[DataType], inputSchema: Seq[Attribute]): Seq[DataType] = in
protected def create(columnTypes: Seq[DataType]): ColumnarIterator = {
val ctx = newCodeGenContext()
val numFields = columnTypes.size
val (initializeAccessors, extractors) = columnTypes.zipWithIndex.map { case (dt, index) =>
val accessorCls = dt match {
case NullType => classOf[NullColumnAccessor].getName
case BooleanType => classOf[BooleanColumnAccessor].getName
case ByteType => classOf[ByteColumnAccessor].getName
case ShortType => classOf[ShortColumnAccessor].getName
case IntegerType | DateType | _: YearMonthIntervalType => classOf[IntColumnAccessor].getName
case LongType | TimestampType | TimestampNTZType | _: DayTimeIntervalType =>
classOf[LongColumnAccessor].getName
case FloatType => classOf[FloatColumnAccessor].getName
case DoubleType => classOf[DoubleColumnAccessor].getName
case StringType => classOf[StringColumnAccessor].getName
case BinaryType => classOf[BinaryColumnAccessor].getName
case CalendarIntervalType => classOf[IntervalColumnAccessor].getName
case dt: DecimalType if dt.precision <= Decimal.MAX_LONG_DIGITS =>
classOf[CompactDecimalColumnAccessor].getName
case dt: DecimalType => classOf[DecimalColumnAccessor].getName
case struct: StructType => classOf[StructColumnAccessor].getName
case array: ArrayType => classOf[ArrayColumnAccessor].getName
case t: MapType => classOf[MapColumnAccessor].getName
}
val accessorName = ctx.addMutableState(accessorCls, "accessor")
val createCode = dt match {
case t if CodeGenerator.isPrimitiveType(dt) =>
s"$accessorName = new $accessorCls(ByteBuffer.wrap(buffers[$index]).order(nativeOrder));"
case NullType | StringType | BinaryType | CalendarIntervalType =>
s"$accessorName = new $accessorCls(ByteBuffer.wrap(buffers[$index]).order(nativeOrder));"
case other =>
s"""$accessorName = new $accessorCls(ByteBuffer.wrap(buffers[$index]).order(nativeOrder),
(${dt.getClass.getName}) columnTypes[$index]);"""
}
val extract = s"$accessorName.extractTo(mutableRow, $index);"
val patch = dt match {
case DecimalType.Fixed(p, s) if p > Decimal.MAX_LONG_DIGITS =>
// For large Decimal, it should have 16 bytes for future update even it's null now.
s"""
if (mutableRow.isNullAt($index)) {
rowWriter.write($index, (Decimal) null, $p, $s);
}
"""
case other => ""
}
(createCode, extract + patch)
}.unzip
/*
* 200 = 6000 bytes / 30 (up to 30 bytes per one call))
* the maximum byte code size to be compiled for HotSpot is 8000.
* We should keep less than 8000
*/
val numberOfStatementsThreshold = 200
val (initializerAccessorCalls, extractorCalls) =
if (initializeAccessors.length <= numberOfStatementsThreshold) {
(initializeAccessors.mkString("\\n"), extractors.mkString("\\n"))
} else {
val groupedAccessorsItr = initializeAccessors.grouped(numberOfStatementsThreshold)
val groupedExtractorsItr = extractors.grouped(numberOfStatementsThreshold)
val accessorNames = groupedAccessorsItr.zipWithIndex.map { case (body, i) =>
val funcName = s"accessors$i"
val funcCode = s"""
|private void $funcName() {
| ${body.mkString("\\n")}
|}
""".stripMargin
ctx.addNewFunction(funcName, funcCode)
}
val extractorNames = groupedExtractorsItr.zipWithIndex.map { case (body, i) =>
val funcName = s"extractors$i"
val funcCode = s"""
|private void $funcName() {
| ${body.mkString("\\n")}
|}
""".stripMargin
ctx.addNewFunction(funcName, funcCode)
}
(accessorNames.map { accessorName => s"$accessorName();" }.mkString("\\n"),
extractorNames.map { extractorName => s"$extractorName();"}.mkString("\\n"))
}
val codeBody = s"""
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import scala.collection.Iterator;
import org.apache.spark.sql.types.DataType;
import org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder;
import org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter;
import org.apache.spark.sql.execution.columnar.MutableUnsafeRow;
public SpecificColumnarIterator generate(Object[] references) {
return new SpecificColumnarIterator();
}
class SpecificColumnarIterator extends ${classOf[ColumnarIterator].getName} {
private ByteOrder nativeOrder = null;
private byte[][] buffers = null;
private UnsafeRowWriter rowWriter = new UnsafeRowWriter($numFields);
private MutableUnsafeRow mutableRow = null;
private int currentRow = 0;
private int numRowsInBatch = 0;
private scala.collection.Iterator input = null;
private DataType[] columnTypes = null;
private int[] columnIndexes = null;
${ctx.declareMutableStates()}
public SpecificColumnarIterator() {
this.nativeOrder = ByteOrder.nativeOrder();
this.buffers = new byte[${columnTypes.length}][];
this.mutableRow = new MutableUnsafeRow(rowWriter);
}
public void initialize(Iterator input, DataType[] columnTypes, int[] columnIndexes) {
this.input = input;
this.columnTypes = columnTypes;
this.columnIndexes = columnIndexes;
}
public boolean hasNext() {
if (currentRow < numRowsInBatch) {
return true;
}
if (!input.hasNext()) {
return false;
}
${classOf[DefaultCachedBatch].getName} batch =
(${classOf[DefaultCachedBatch].getName}) input.next();
currentRow = 0;
numRowsInBatch = batch.numRows();
for (int i = 0; i < columnIndexes.length; i ++) {
buffers[i] = batch.buffers()[columnIndexes[i]];
}
${initializerAccessorCalls}
return hasNext();
}
public InternalRow next() {
currentRow += 1;
rowWriter.reset();
rowWriter.zeroOutNullBytes();
${extractorCalls}
return rowWriter.getRow();
}
${ctx.declareAddedFunctions()}
}"""
val code = CodeFormatter.stripOverlappingComments(
new CodeAndComment(codeBody, ctx.getPlaceHolderToComments()))
logDebug(s"Generated ColumnarIterator:\\n${CodeFormatter.format(code)}")
val (clazz, _) = CodeGenerator.compile(code)
clazz.generate(Array.empty).asInstanceOf[ColumnarIterator]
}
}
| ueshin/apache-spark | sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/GenerateColumnAccessor.scala | Scala | apache-2.0 | 10,108 |
/*
* Copyright 2016 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.ct600a.v2
import uk.gov.hmrc.ct.box.{CtBoxIdentifier, CtOptionalBoolean, Input}
case class LPQ06(value: Option[Boolean]) extends CtBoxIdentifier(name = "Do you intend to file your return before 30 June 2014?") with CtOptionalBoolean with Input
| ahudspith-equalexperts/ct-calculations | src/main/scala/uk/gov/hmrc/ct/ct600a/v2/LPQ06.scala | Scala | apache-2.0 | 874 |
/*
* La Trobe University - Distributed Deep Learning System
* Copyright 2016 Matthias Langer ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package edu.latrobe.blaze.modules.jvm
import edu.latrobe._
import edu.latrobe.blaze._
import edu.latrobe.blaze.modules._
abstract class MeanPooling_JVM
extends MeanPooling {
final override lazy val outputPlatform
: JVM.type = JVM
// ---------------------------------------------------------------------------
// Forward propagation related.
// ---------------------------------------------------------------------------
final override protected def doPredict(mode: Mode,
input: Tensor)
: (RealArrayTensor, PredictContext) = {
val inp = input.asOrToRealArrayTensor
val inpLayout = inp.layout
val inpSize = inpLayout.size
val outSize = kernel.outputSizeFor(inpSize, inpSize.noChannels)
val outLayout = inpLayout.derive(outSize)
val out = RealArrayTensor.zeros(outLayout)
val ctx = mode match {
case mode: Training =>
val countsInv = doPredictForTraining(inp, out)
MeanPooling_JVM_Context(inpLayout, countsInv)
case mode: Inference =>
doPredictForInference(inp, out)
EmptyContext
case _ =>
throw new MatchError(mode)
}
// Cleanup.
if (inp ne input) {
inp.close()
}
(out, ctx)
}
protected def doPredictForTraining(input: RealArrayTensor,
output: RealArrayTensor)
: Array[Real]
protected def doPredictForInference(input: RealArrayTensor,
output: RealArrayTensor)
: Unit
// ---------------------------------------------------------------------------
// Back propagation related.
// ---------------------------------------------------------------------------
final override val backpropagationRequirementsForInput
: TensorDependency = TensorDependency.NotRequired
final override val backpropagationRequirementsForOutput
: TensorDependency = TensorDependency.NotRequired
/**
* Since we average on predict, we have errors regarding the averages of many
* activations. Hence, we simply reverse the averaging process here.
*/
final override protected def doDeriveInputError(input: Tensor,
output: Tensor,
context: PredictContext,
error: Tensor)
: RealArrayTensor = context match {
case MeanPooling_JVM_Context(inputLayout, countsInv) =>
val oldErr = error.asOrToRealArrayTensor
val newErr = RealArrayTensor.zeros(inputLayout)
doDeriveInputError(countsInv, oldErr, newErr)
if (oldErr ne error) {
oldErr.close()
}
newErr
case _ =>
throw new MatchError(context)
}
protected def doDeriveInputError(countsInv: Array[Real],
oldError: RealArrayTensor,
newError: RealArrayTensor)
: Unit
}
final case class MeanPooling_JVM_Context(inputLayout: IndependentTensorLayout,
countsInv: Array[Real])
extends PredictContext {
require(inputLayout != null && countsInv != null)
} | bashimao/ltudl | blaze/src/main/scala/edu/latrobe/blaze/modules/jvm/MeanPooling_JVM.scala | Scala | apache-2.0 | 3,902 |
package com.microsoft.awt.data
import org.scalajs.nodejs.mongodb.{Db, _}
import scala.concurrent.ExecutionContext
import scala.scalajs.js
/**
* Session DAO
* @author [email protected]
*/
@js.native
trait SessionDAO extends Collection
/**
* Session DAO Companion
* @author [email protected]
*/
object SessionDAO {
/**
* Session DAO Enrichment
* @param sessionDAO the given [[SessionDAO Session DAO]]
*/
implicit class SessionDAOEnrichment(val sessionDAO: SessionDAO) extends AnyVal {
def findAndUpdateByID(sessionID: String, isAnonymous: Boolean = false)(implicit mongodb: MongoDB) = {
sessionDAO.findOneAndUpdate(
filter = "_id" $eq sessionID.$oid,
update = $set(doc("isAnonymous" -> isAnonymous, "lastUpdated" -> js.Date.now())))
}
}
/**
* Session DAO Extensions
* @param db the given [[Db DB]]
*/
implicit class SessionDAOExtensions(val db: Db) extends AnyVal {
@inline
def getSessionDAO(implicit ec: ExecutionContext) = {
db.collectionFuture("sessions").mapTo[SessionDAO]
}
}
} | ldaniels528/awt | app-nodejs/src/main/scala/com/microsoft/awt/data/SessionDAO.scala | Scala | apache-2.0 | 1,106 |
package sttp.client3.testing.streaming
import org.scalatest.BeforeAndAfterAll
import org.scalatest.freespec.AsyncFreeSpec
import org.scalatest.matchers.should.Matchers
import sttp.capabilities.Streams
import sttp.client3._
import sttp.client3.internal.Utf8
import sttp.client3.testing.HttpTest.endpoint
import sttp.client3.testing.streaming.StreamingTest._
import sttp.client3.testing.{ConvertToFuture, ToFutureWrapper}
import sttp.model.sse.ServerSentEvent
abstract class StreamingTest[F[_], S]
extends AsyncFreeSpec
with Matchers
with ToFutureWrapper
with BeforeAndAfterAll
with StreamingTestExtensions[F, S] {
val streams: Streams[S]
def backend: SttpBackend[F, S]
implicit def convertToFuture: ConvertToFuture[F]
def bodyProducer(chunks: Iterable[Array[Byte]]): streams.BinaryStream
private def stringBodyProducer(body: String): streams.BinaryStream =
bodyProducer(body.getBytes(Utf8).grouped(10).toIterable)
def bodyConsumer(stream: streams.BinaryStream): F[String]
def sseConsumer(stream: streams.BinaryStream): F[List[ServerSentEvent]]
protected def supportsStreamingMultipartParts = true
"stream request body" in {
basicRequest
.post(uri"$endpoint/streaming/echo")
.streamBody(streams)(stringBodyProducer(Body))
.send(backend)
.toFuture()
.map { response =>
response.body shouldBe Right(Body)
}
}
"stream large request body" in {
basicRequest
.post(uri"$endpoint/streaming/echo")
.streamBody(streams)(stringBodyProducer(Body))
.send(backend)
.toFuture()
.map { response =>
response.body shouldBe Right(Body)
}
}
"handle server sent events SSE" in {
val sseData = "ala ma kota\\nzbyszek ma psa"
val expectedEvent = ServerSentEvent(data = Some(sseData), eventType = Some("test-event"), retry = Some(42000))
val expectedEvents =
Seq(expectedEvent.copy(id = Some("1")), expectedEvent.copy(id = Some("2")), expectedEvent.copy(id = Some("3")))
basicRequest
.post(uri"$endpoint/sse/echo3")
.body(sseData)
.response(asStreamAlways(streams)(sseConsumer(_)))
.send(backend)
.toFuture()
.map { response =>
response.body shouldBe expectedEvents
}
}
"receive a stream" in {
basicRequest
.post(uri"$endpoint/streaming/echo")
.body(Body)
.response(asStreamAlways(streams)(bodyConsumer(_)))
.send(backend)
.toFuture()
.map { response =>
response.body shouldBe Body
}
}
"receive a stream and ignore it (without consuming)" in {
basicRequest
.post(uri"$endpoint/streaming/echo")
.body(Body)
// if the backend has any, mechanisms to consume an incorrectly handled (ignored) stream should kick in
.response(asStreamAlways(streams)(_ => bodyConsumer(stringBodyProducer("ignore"))))
.send(backend)
.toFuture()
.map { response =>
response.body shouldBe "ignore"
}
}
"receive a stream (unsafe)" in {
// TODO: for some reason these explicit types are needed in Dotty
val r0: RequestT[Identity, streams.BinaryStream, S] = basicRequest
.post(uri"$endpoint/streaming/echo")
.body(Body)
.response(asStreamAlwaysUnsafe(streams))
r0.send(backend)
.toFuture()
.flatMap { response =>
bodyConsumer(response.body).toFuture()
}
.map { responseBody =>
responseBody shouldBe Body
}
}
"receive a large stream (unsafe)" in {
// TODO: for some reason these explicit types are needed in Dotty
val r0: RequestT[Identity, streams.BinaryStream, S] = basicRequest
.post(uri"$endpoint/streaming/echo")
.body(LargeBody)
.response(asStreamAlwaysUnsafe(streams))
r0.send(backend)
.toFuture()
.flatMap { response =>
bodyConsumer(response.body).toFuture()
}
.map { responseBody =>
if (responseBody.length != LargeBody.length) {
fail(s"Response body had length ${responseBody.length}, instead of ${LargeBody.length}, starts with: ${responseBody
.take(512)}")
} else {
succeed
}
}
}
"receive a stream or error (unsafe)" in {
// TODO: for some reason these explicit types are needed in Dotty
val r0: RequestT[Identity, Either[String, streams.BinaryStream], S] = basicRequest
.post(uri"$endpoint/streaming/echo")
.body(Body)
.response(asStreamUnsafe(streams))
r0.send(backend)
.toFuture()
.flatMap { response =>
bodyConsumer(response.body.right.get).toFuture()
}
.map { responseBody =>
responseBody shouldBe Body
}
}
"receive a mapped stream (unsafe)" in {
// TODO: for some reason these explicit types are needed in Dotty
val r0: RequestT[Identity, (streams.BinaryStream, Boolean), S] = basicRequest
.post(uri"$endpoint/streaming/echo")
.body(Body)
.response(asStreamAlwaysUnsafe(streams).map(s => (s, true)))
r0
.send(backend)
.toFuture()
.flatMap { response =>
val (stream, flag) = response.body
bodyConsumer(stream).toFuture().map((_, flag))
}
.map { responseBody =>
responseBody shouldBe ((Body, true))
}
}
"receive a stream from an https site (unsafe)" in {
val numChunks = 100
val url = uri"https://httpbin.org/stream/$numChunks"
// TODO: for some reason these explicit types are needed in Dotty
val r0: RequestT[Identity, streams.BinaryStream, S] = basicRequest
// of course, you should never rely on the internet being available
// in tests, but that's so much easier than setting up an https
// testing server
.get(url)
.response(asStreamAlwaysUnsafe(streams))
r0
.send(backend)
.toFuture()
.flatMap { response =>
bodyConsumer(response.body).toFuture()
}
.map { responseBody =>
val urlRegex = s""""${url.toString}"""".r
urlRegex.findAllIn(responseBody).length shouldBe numChunks
}
}
if (supportsStreamingMultipartParts) {
"send a stream part in a multipart request" in {
basicRequest
.post(uri"$endpoint/multipart")
.response(asStringAlways)
.multipartBody(
multipart("p1", "v1"),
multipartStream(streams)("p2", stringBodyProducer("v2")),
multipart("p3", "v3")
)
.send(backend)
.toFuture()
.map { response =>
response.body shouldBe s"p1=v1, p2=v2, p3=v3"
}
}
}
override protected def afterAll(): Unit = {
backend.close().toFuture()
super.afterAll()
}
}
object StreamingTest {
val Body = "streaming test"
val LargeBody: String = "x" * 4000000
}
| softwaremill/sttp | core/src/test/scala/sttp/client3/testing/streaming/StreamingTest.scala | Scala | apache-2.0 | 6,811 |
package com.wallace.demo.app.common
import com.wallace.demo.app.utils.FuncRuntimeDur
import scala.util.control.NonFatal
trait Using extends FuncRuntimeDur {
protected def usingWithErrMsg[A <: {def close() : Unit}, B](param: A, errMsg: String)(f: A => B): Unit = {
try {
f(param)
} catch {
case NonFatal(e) =>
log.error(s"$errMsg: ", e)
} finally {
param.close()
}
}
protected def using[A <: {def close() : Unit}, B](param: A)(f: A => B): B = {
try {
f(param)
} finally {
param.close()
}
}
}
trait Formatter[T] {
def toString(obj: T): String
def fromString(sObj: String): Option[T]
} | BiyuHuang/CodePrototypesDemo | demo/ScalaDemo/src/main/scala/com/wallace/demo/app/common/Using.scala | Scala | apache-2.0 | 666 |
package org.vipervm.fp
class REPL {
val console = new jline.ConsoleReader
val prompt = "vvm> "
repl
def repl:Unit = {
val s = console.readLine(prompt)
val idx = s.indexOf(" ",0)
val (cmd,args) = if (idx > 0) s.splitAt(idx) else (s,"")
cmd match {
case ":quit" =>
case ":parse" => {
println(Parser.parse(args))
repl
}
case ":eval" => {
val e = Parser.parse(args)
e match {
case Parser.Success(t,_) => {
println(Printer.print(Term.eval(new Context, t), new Context))
}
case Parser.NoSuccess(msg,_) => println(msg)
}
repl
}
case _ => {
val e = Parser.parse(s)
e match {
case Parser.Success(t,_) => println(Printer.print(Term.eval(new Context,t), new Context))
case Parser.NoSuccess(msg,_) => println(msg)
}
repl
}
}
}
}
object REPL {
def main(args:Array[String]):Unit = {
val r = new REPL
}
}
| hsyl20/Scala_ViperVM | src/main/scala/org/vipervm/_old/fp/REPL.scala | Scala | gpl-3.0 | 1,004 |
import shapeless._
import shapeless.ops.nat._
trait NumFields[A] {
type Out <: Nat
def value(implicit toInt: ToInt[Out]): Int =
toInt.apply()
}
object NumFields extends NumFieldsInstances
trait NumFieldsFunctions {
type Aux[A, N <: Nat] = NumFields[A] { type Out = N }
def apply[A](implicit numFields: NumFields[A]): Aux[A, numFields.Out] =
numFields
}
trait NumFieldsInstances extends LowPriorityNumFieldsInstances {
implicit val hnilInstance: Aux[HNil, Nat._0] =
new NumFields[HNil] { type Out = Nat._0 }
implicit def hlistInstance[Head, Tail <: HList, HeadSize <: Nat, TailSize <: Nat, TotalSize <: Nat](
implicit
hSize: Lazy[NumFields.Aux[Head, HeadSize]],
tSize: NumFields.Aux[Tail, TailSize],
sum: Sum.Aux[HeadSize, TailSize, TotalSize]
): Aux[Head :: Tail, TotalSize] =
new NumFields[Head :: Tail] { type Out = TotalSize }
implicit val cnilInstance: Aux[CNil, Nat._0] =
new NumFields[CNil] { type Out = Nat._0 }
implicit def coproductInstance[Head, Tail <: Coproduct, HeadSize <: Nat, TailSize <: Nat, MaxSize <: Nat](
implicit
hSize: Lazy[NumFields.Aux[Head, HeadSize]],
tSize: NumFields.Aux[Tail, TailSize],
max: Max.Aux[HeadSize, TailSize, MaxSize]
): Aux[Head :+: Tail, MaxSize] =
new NumFields[Head :+: Tail] { type Out = MaxSize }
implicit def genericInstance[A, Repr, Size <: Nat](
implicit
gen: Generic.Aux[A, Repr],
size: Lazy[NumFields.Aux[Repr, Size]]
): Aux[A, Size] =
new NumFields[A] { type Out = Size }
}
trait LowPriorityNumFieldsInstances extends NumFieldsFunctions {
implicit def anyInstance[A](implicit ev: LowPriority): Aux[A, Nat._1] =
new NumFields[A] { type Out = Nat._1 }
}
sealed trait Shape
final case class Rectangle(width: Double, height: Double) extends Shape
final case class Circle(radius: Double) extends Shape
object Main {
def main(args: Array[String]): Unit = {
println("Number of fields in Shape: " + NumFields[Shape].value)
println("Number of fields in Rectangle: " + NumFields[Rectangle].value)
println("Number of fields in Circle: " + NumFields[Circle].value)
}
} | mikkka/shapeless-guide-code | numfields/src/main/scala/numfields.scala | Scala | apache-2.0 | 2,146 |
package cz.jenda.pidifrky.logic.http
import java.util.concurrent._
import cz.jenda.pidifrky.logic.DebugReporter
import scala.util.control.NonFatal
/**
* @author Jenda Kolena, [email protected]
*/
object RequestTimeouter {
private val executor = Executors.newSingleThreadScheduledExecutor()
private val requestsMap = new ConcurrentHashMap[String, ScheduledFuture[_]]()
def requestSent(request: RequestId)(timeoutAction: => Unit)(implicit requestSettings: RequestSettings = RequestSettings()): Unit = {
val timeoutMillis = requestSettings.requestTimeout.getOrElse(2000)
DebugReporter.debug(s"Planning timeout action for $request")
val f = executor.schedule(new Runnable {
override def run(): Unit = try {
remove(request.value)
DebugReporter.debug(s"Timing out $request")
timeoutAction
} catch {
case NonFatal(e) => DebugReporter.debug("Timeout action has thrown an exception; this is probably a BUG", e)
}
}, timeoutMillis, TimeUnit.MILLISECONDS)
if (requestsMap.putIfAbsent(request.value, f) != null) {
DebugReporter.debug(s"Detected probably repeated $request, timeout may not work as expected")
}
}
def requestFinished(request: RequestId): Unit = {
remove(request.value)
DebugReporter.debug(s"Request $request finished, won't be timed out")
}
private def remove(request: String): Unit = {
Option(requestsMap.remove(request)).foreach(_.cancel(false))
}
}
| jendakol/pidifrky | client/src/main/scala/cz/jenda/pidifrky/logic/http/RequestTimeouter.scala | Scala | apache-2.0 | 1,479 |