code
stringlengths
5
1M
repo_name
stringlengths
5
109
path
stringlengths
6
208
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
5
1M
package com.eevolution.context.dictionary.infrastructure.service import java.util.UUID import akka.NotUsed import com.eevolution.context.dictionary.domain._ import com.eevolution.context.dictionary.domain.model.PinStance import com.eevolution.utils.PaginatedSequence import com.lightbend.lagom.scaladsl.api.{Service, ServiceCall} /** * Copyright (C) 2003-2017, e-Evolution Consultants S.A. , http://www.e-evolution.com * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. * Email: [email protected], http://www.e-evolution.com , http://github.com/EmerisScala * Created by [email protected] , www.e-evolution.com on 15/11/17. */ /** * Pin Stance Service */ trait PinStanceService extends Service with api.service.PinStanceService { override def getAll() : ServiceCall[NotUsed, List[PinStance]] override def getById(id: Int): ServiceCall[NotUsed, PinStance] override def getByUUID(uuid :UUID): ServiceCall[NotUsed, PinStance] override def getAllByPage(pageNo: Option[Int], pageSize: Option[Int]): ServiceCall[NotUsed, PaginatedSequence[PinStance]] def descriptor = { import Service._ named("pinStance").withCalls( pathCall("/api/v1_0_0/pinStance/all", getAll _) , pathCall("/api/v1_0_0/pinStance/:id", getById _), pathCall("/api/v1_0_0/pinStance/:uuid", getByUUID _) , pathCall("/api/v1_0_0/pinStance?pageNo&pageSize", getAllByPage _) ) } }
adempiere/ADReactiveSystem
dictionary-impl/src/main/scala/com/eevolution/context/dictionary/infrastructure/service/PinStanceService.scala
Scala
gpl-3.0
2,035
import stainless.annotation.{ghost => ghostAnnot} import stainless.lang._ import stainless.collection._ object GhostFlow1 { case class Ghost(@ghostAnnot var p: BigInt) { def f(x: BigInt) = { ghost { p = p + 1 } } } }
epfl-lara/stainless
frontends/benchmarks/extraction/valid/GhostFlow1.scala
Scala
apache-2.0
250
import jgo.tools.compiler._ import parser.BlockLang import parser.combinatorExten._ import lexer.Scanner import interm.codeseq._ class ParserTests(val tests: String*) extends ParserTestSuite trait ParserTestSuite extends App { testAll() val tests: Seq[String] def testAll() { tests foreach test } def test(in: String) { try { println("testing: " + in) val bl = new BlockLang(Scanner(in)) with ExceptionTracing //with TracePrintingParsers bl.result match { case ns: bl.NoSuccess => println("\nsyntax error:\n" + ns) case bl.Success(outM, _) => if (outM.isDefined) println("\n" + outM.get.listing) else { println("\ncompilation errors:") outM.errors foreach { err => println(err.longString) } } } } catch { case e => println("!!!!!\t!!!!!\t!!!!!") println("EXCEPTION: " + e) e.printStackTrace() println("!!!!!\t!!!!!\t!!!!!") } println() } }
thomasmodeneis/jgo
src/src/test/scala/ParserTest.scala
Scala
gpl-3.0
1,054
package net.aicomp.sample.sbt object Main { def main(args: Array[String]) { var line = "" System.out.println("SampleSbt") while ({ line = readLine(); line ne null }) { Iterator.continually(readLine()).takeWhile(_ != "EOS").toList System.out.println("finish") } } }
AI-comp/Terraforming
SampleAI/sbt/src/main/scala/net/aicomp/sample/sbt/Main.scala
Scala
apache-2.0
298
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.hive import java.io._ import java.util.UUID import scala.Array.canBuildFrom import scala.collection.mutable.ArrayBuffer import scala.language.implicitConversions import scala.util.parsing.combinator.RegexParsers import org.apache.spark.sql._ import org.apache.spark.sql.catalyst.TableIdentifier import org.apache.spark.sql.catalyst.analysis.NoSuchTableException import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan import org.apache.spark.sql.execution.command.Partitioner import org.apache.spark.sql.hive.client.ClientInterface import org.apache.spark.sql.types._ import org.apache.carbondata.common.logging.LogServiceFactory import org.apache.carbondata.core.constants.CarbonCommonConstants import org.apache.carbondata.core.datastore.impl.FileFactory import org.apache.carbondata.core.datastore.impl.FileFactory.FileType import org.apache.carbondata.core.locks.ZookeeperInit import org.apache.carbondata.core.metadata.{CarbonMetadata, CarbonTableIdentifier} import org.apache.carbondata.core.metadata.converter.ThriftWrapperSchemaConverterImpl import org.apache.carbondata.core.metadata.schema.table.CarbonTable import org.apache.carbondata.core.reader.ThriftReader import org.apache.carbondata.core.stats.{QueryStatistic, QueryStatisticsConstants} import org.apache.carbondata.core.util.{CarbonProperties, CarbonTimeStatisticsFactory, CarbonUtil} import org.apache.carbondata.core.util.path.{CarbonStorePath, CarbonTablePath} import org.apache.carbondata.core.writer.ThriftWriter import org.apache.carbondata.format.{SchemaEvolutionEntry, TableInfo} import org.apache.carbondata.processing.merger.TableMeta case class MetaData(var tablesMeta: ArrayBuffer[TableMeta]) case class CarbonMetaData(dims: Seq[String], msrs: Seq[String], carbonTable: CarbonTable, dictionaryMap: DictionaryMap) object CarbonMetastore { def readSchemaFileToThriftTable(schemaFilePath: String): TableInfo = { val createTBase = new ThriftReader.TBaseCreator() { override def create(): org.apache.thrift.TBase[TableInfo, TableInfo._Fields] = { new TableInfo() } } val thriftReader = new ThriftReader(schemaFilePath, createTBase) var tableInfo: TableInfo = null try { thriftReader.open() tableInfo = thriftReader.read().asInstanceOf[TableInfo] } finally { thriftReader.close() } tableInfo } def writeThriftTableToSchemaFile(schemaFilePath: String, tableInfo: TableInfo): Unit = { val thriftWriter = new ThriftWriter(schemaFilePath, false) try { thriftWriter.open() thriftWriter.write(tableInfo); } finally { thriftWriter.close() } } } case class DictionaryMap(dictionaryMap: Map[String, Boolean]) { def get(name: String): Option[Boolean] = { dictionaryMap.get(name.toLowerCase) } } class CarbonMetastore(hiveContext: HiveContext, val storePath: String, client: ClientInterface, queryId: String) extends HiveMetastoreCatalog(client, hiveContext) { @transient val LOGGER = LogServiceFactory.getLogService("org.apache.spark.sql.CarbonMetastoreCatalog") val tableModifiedTimeStore = new java.util.HashMap[String, Long]() tableModifiedTimeStore .put(CarbonCommonConstants.DATABASE_DEFAULT_NAME, System.currentTimeMillis()) val metadata = loadMetadata(storePath) def getTableCreationTime(databaseName: String, tableName: String): Long = { val tableMeta = metadata.tablesMeta.filter( c => c.carbonTableIdentifier.getDatabaseName.equalsIgnoreCase(databaseName) && c.carbonTableIdentifier.getTableName.equalsIgnoreCase(tableName)) val tableCreationTime = tableMeta.head.carbonTable.getTableLastUpdatedTime tableCreationTime } def lookupRelation1(dbName: Option[String], tableName: String)(sqlContext: SQLContext): LogicalPlan = { lookupRelation1(TableIdentifier(tableName, dbName))(sqlContext) } def lookupRelation1(tableIdentifier: TableIdentifier, alias: Option[String] = None)(sqlContext: SQLContext): LogicalPlan = { checkSchemasModifiedTimeAndReloadTables() val database = tableIdentifier.database.getOrElse(getDB.getDatabaseName(None, sqlContext)) val tables = getTableFromMetadata(database, tableIdentifier.table) tables match { case Some(t) => CarbonRelation(database, tableIdentifier.table, CarbonSparkUtil.createSparkMeta(tables.head.carbonTable), tables.head, alias)(sqlContext) case None => LOGGER.audit(s"Table Not Found: ${tableIdentifier.table}") throw new NoSuchTableException } } /** * This method will search for a table in the catalog metadata * * @param database * @param tableName * @return */ def getTableFromMetadata(database: String, tableName: String): Option[TableMeta] = { metadata.tablesMeta .find(c => c.carbonTableIdentifier.getDatabaseName.equalsIgnoreCase(database) && c.carbonTableIdentifier.getTableName.equalsIgnoreCase(tableName)) } def tableExists(identifier: TableIdentifier)(sqlContext: SQLContext): Boolean = { checkSchemasModifiedTimeAndReloadTables() val database = identifier.database.getOrElse(getDB.getDatabaseName(None, sqlContext)) val tables = metadata.tablesMeta.filter( c => c.carbonTableIdentifier.getDatabaseName.equalsIgnoreCase(database) && c.carbonTableIdentifier.getTableName.equalsIgnoreCase(identifier.table)) tables.nonEmpty } def loadMetadata(metadataPath: String): MetaData = { val recorder = CarbonTimeStatisticsFactory.createDriverRecorder() val statistic = new QueryStatistic() // creating zookeeper instance once. // if zookeeper is configured as carbon lock type. val zookeeperurl = hiveContext.getConf(CarbonCommonConstants.ZOOKEEPER_URL, null) if (null != zookeeperurl) { CarbonProperties.getInstance .addProperty(CarbonCommonConstants.ZOOKEEPER_URL, zookeeperurl) } if (metadataPath == null) { return null } // if no locktype is configured and store type is HDFS set HDFS lock as default if (null == CarbonProperties.getInstance .getProperty(CarbonCommonConstants.LOCK_TYPE) && FileType.HDFS == FileFactory.getFileType(metadataPath)) { CarbonProperties.getInstance .addProperty(CarbonCommonConstants.LOCK_TYPE, CarbonCommonConstants.CARBON_LOCK_TYPE_HDFS ) LOGGER.info("Default lock type HDFSLOCK is configured") } val fileType = FileFactory.getFileType(metadataPath) val metaDataBuffer = new ArrayBuffer[TableMeta] fillMetaData(metadataPath, fileType, metaDataBuffer) updateSchemasUpdatedTime(readSchemaFileSystemTime("", "")) statistic.addStatistics(QueryStatisticsConstants.LOAD_META, System.currentTimeMillis()) recorder.recordStatisticsForDriver(statistic, queryId) MetaData(metaDataBuffer) } private def fillMetaData(basePath: String, fileType: FileType, metaDataBuffer: ArrayBuffer[TableMeta]): Unit = { val databasePath = basePath // + "/schemas" try { if (FileFactory.isFileExist(databasePath, fileType)) { val file = FileFactory.getCarbonFile(databasePath, fileType) val databaseFolders = file.listFiles() databaseFolders.foreach(databaseFolder => { if (databaseFolder.isDirectory) { val dbName = databaseFolder.getName val tableFolders = databaseFolder.listFiles() tableFolders.foreach(tableFolder => { if (tableFolder.isDirectory) { val carbonTableIdentifier = new CarbonTableIdentifier(databaseFolder.getName, tableFolder.getName, UUID.randomUUID().toString) val carbonTablePath = CarbonStorePath.getCarbonTablePath(basePath, carbonTableIdentifier) val tableMetadataFile = carbonTablePath.getSchemaFilePath if (FileFactory.isFileExist(tableMetadataFile, fileType)) { val tableName = tableFolder.getName val tableUniqueName = databaseFolder.getName + "_" + tableFolder.getName val createTBase = new ThriftReader.TBaseCreator() { override def create(): org.apache.thrift.TBase[TableInfo, TableInfo._Fields] = { new TableInfo() } } val thriftReader = new ThriftReader(tableMetadataFile, createTBase) thriftReader.open() val tableInfo: TableInfo = thriftReader.read().asInstanceOf[TableInfo] thriftReader.close() val schemaConverter = new ThriftWrapperSchemaConverterImpl val wrapperTableInfo = schemaConverter .fromExternalToWrapperTableInfo(tableInfo, dbName, tableName, basePath) val schemaFilePath = CarbonStorePath .getCarbonTablePath(storePath, carbonTableIdentifier).getSchemaFilePath wrapperTableInfo.setStorePath(storePath) wrapperTableInfo .setMetaDataFilepath(CarbonTablePath.getFolderContainingFile(schemaFilePath)) CarbonMetadata.getInstance().loadTableMetadata(wrapperTableInfo) val carbonTable = CarbonMetadata.getInstance().getCarbonTable(tableUniqueName) metaDataBuffer += new TableMeta(carbonTable.getCarbonTableIdentifier, storePath, null, carbonTable) } } }) } }) } else { // Create folders and files. FileFactory.mkdirs(databasePath, fileType) } } catch { case s: java.io.FileNotFoundException => // Create folders and files. FileFactory.mkdirs(databasePath, fileType) } } /** * * Prepare Thrift Schema from wrapper TableInfo and write to Schema file. * Load CarbonTable from wrapper tableinfo * */ def createTableFromThrift( tableInfo: org.apache.carbondata.core.metadata.schema.table.TableInfo, dbName: String, tableName: String, partitioner: Partitioner) (sqlContext: SQLContext): String = { if (tableExists(TableIdentifier(tableName, Some(dbName)))(sqlContext)) { sys.error(s"Table [$tableName] already exists under Database [$dbName]") } val schemaConverter = new ThriftWrapperSchemaConverterImpl val thriftTableInfo = schemaConverter .fromWrapperToExternalTableInfo(tableInfo, dbName, tableName) val schemaEvolutionEntry = new SchemaEvolutionEntry(tableInfo.getLastUpdatedTime) thriftTableInfo.getFact_table.getSchema_evolution.getSchema_evolution_history .add(schemaEvolutionEntry) val carbonTableIdentifier = new CarbonTableIdentifier(dbName, tableName, tableInfo.getFactTable.getTableId) val carbonTablePath = CarbonStorePath.getCarbonTablePath(storePath, carbonTableIdentifier) val schemaFilePath = carbonTablePath.getSchemaFilePath val schemaMetadataPath = CarbonTablePath.getFolderContainingFile(schemaFilePath) tableInfo.setMetaDataFilepath(schemaMetadataPath) tableInfo.setStorePath(storePath) CarbonMetadata.getInstance().loadTableMetadata(tableInfo) val tableMeta = new TableMeta(carbonTableIdentifier, storePath, null, CarbonMetadata.getInstance().getCarbonTable(dbName + "_" + tableName)) val fileType = FileFactory.getFileType(schemaMetadataPath) if (!FileFactory.isFileExist(schemaMetadataPath, fileType)) { FileFactory.mkdirs(schemaMetadataPath, fileType) } val thriftWriter = new ThriftWriter(schemaFilePath, false) thriftWriter.open() thriftWriter.write(thriftTableInfo) thriftWriter.close() metadata.tablesMeta += tableMeta logInfo(s"Table $tableName for Database $dbName created successfully.") LOGGER.info(s"Table $tableName for Database $dbName created successfully.") updateSchemasUpdatedTime(touchSchemaFileSystemTime(dbName, tableName)) carbonTablePath.getPath } private def updateMetadataByWrapperTable( wrapperTableInfo: org.apache.carbondata.core.metadata.schema.table.TableInfo): Unit = { CarbonMetadata.getInstance().loadTableMetadata(wrapperTableInfo) val carbonTable = CarbonMetadata.getInstance().getCarbonTable( wrapperTableInfo.getTableUniqueName) for (i <- metadata.tablesMeta.indices) { if (wrapperTableInfo.getTableUniqueName.equals( metadata.tablesMeta(i).carbonTableIdentifier.getTableUniqueName)) { metadata.tablesMeta(i).carbonTable = carbonTable } } } def updateMetadataByThriftTable(schemaFilePath: String, tableInfo: TableInfo, dbName: String, tableName: String, storePath: String): Unit = { tableInfo.getFact_table.getSchema_evolution.getSchema_evolution_history.get(0) .setTime_stamp(System.currentTimeMillis()) val schemaConverter = new ThriftWrapperSchemaConverterImpl val wrapperTableInfo = schemaConverter .fromExternalToWrapperTableInfo(tableInfo, dbName, tableName, storePath) wrapperTableInfo .setMetaDataFilepath(CarbonTablePath.getFolderContainingFile(schemaFilePath)) wrapperTableInfo.setStorePath(storePath) updateMetadataByWrapperTable(wrapperTableInfo) } /** * Shows all tables for given schema. */ def getTables(databaseName: Option[String])(sqlContext: SQLContext): Seq[(String, Boolean)] = { val dbName = databaseName.getOrElse(sqlContext.asInstanceOf[HiveContext].catalog.client.currentDatabase) checkSchemasModifiedTimeAndReloadTables() metadata.tablesMeta.filter { c => c.carbonTableIdentifier.getDatabaseName.equalsIgnoreCase(dbName) }.map { c => (c.carbonTableIdentifier.getTableName, false) } } def isTablePathExists(tableIdentifier: TableIdentifier)(sqlContext: SQLContext): Boolean = { val dbName = tableIdentifier.database.getOrElse(getDB.getDatabaseName(None, sqlContext)) val tableName = tableIdentifier.table val tablePath = CarbonStorePath.getCarbonTablePath(this.storePath, new CarbonTableIdentifier(dbName, tableName, "")).getPath val fileType = FileFactory.getFileType(tablePath) FileFactory.isFileExist(tablePath, fileType) } def dropTable(tableStorePath: String, tableIdentifier: TableIdentifier) (sqlContext: SQLContext) { val dbName = tableIdentifier.database.get val tableName = tableIdentifier.table val metadataFilePath = CarbonStorePath.getCarbonTablePath(tableStorePath, new CarbonTableIdentifier(dbName, tableName, "")).getMetadataDirectoryPath val fileType = FileFactory.getFileType(metadataFilePath) if (FileFactory.isFileExist(metadataFilePath, fileType)) { // while drop we should refresh the schema modified time so that if any thing has changed // in the other beeline need to update. checkSchemasModifiedTimeAndReloadTables val file = FileFactory.getCarbonFile(metadataFilePath, fileType) CarbonUtil.deleteFoldersAndFilesSilent(file.getParentFile) val metadataToBeRemoved: Option[TableMeta] = getTableFromMetadata(dbName, tableIdentifier.table) metadataToBeRemoved match { case Some(tableMeta) => metadata.tablesMeta -= tableMeta CarbonMetadata.getInstance.removeTable(dbName + "_" + tableName) CarbonMetadata.getInstance.removeTable(dbName + "_" + tableName) updateSchemasUpdatedTime(touchSchemaFileSystemTime(dbName, tableName)) case None => logInfo(s"Metadata does not contain entry for table $tableName in database $dbName") } CarbonHiveMetadataUtil.invalidateAndDropTable(dbName, tableName, sqlContext) // discard cached table info in cachedDataSourceTables sqlContext.catalog.refreshTable(tableIdentifier) } } private def getTimestampFileAndType(databaseName: String, tableName: String) = { val timestampFile = storePath + "/" + CarbonCommonConstants.SCHEMAS_MODIFIED_TIME_FILE val timestampFileType = FileFactory.getFileType(timestampFile) (timestampFile, timestampFileType) } /** * This method will put the updated timestamp of schema file in the table modified time store map * * @param timeStamp */ def updateSchemasUpdatedTime(timeStamp: Long) { tableModifiedTimeStore.put("default", timeStamp) } /** * This method will read the timestamp of empty schema file * * @param databaseName * @param tableName * @return */ def readSchemaFileSystemTime(databaseName: String, tableName: String): Long = { val (timestampFile, timestampFileType) = getTimestampFileAndType(databaseName, tableName) if (FileFactory.isFileExist(timestampFile, timestampFileType)) { FileFactory.getCarbonFile(timestampFile, timestampFileType).getLastModifiedTime } else { System.currentTimeMillis() } } /** * This method will check and create an empty schema timestamp file * * @param databaseName * @param tableName * @return */ def touchSchemaFileSystemTime(databaseName: String, tableName: String): Long = { val (timestampFile, timestampFileType) = getTimestampFileAndType(databaseName, tableName) if (!FileFactory.isFileExist(timestampFile, timestampFileType)) { LOGGER.audit(s"Creating timestamp file for $databaseName.$tableName") FileFactory.createNewFile(timestampFile, timestampFileType) } val systemTime = System.currentTimeMillis() FileFactory.getCarbonFile(timestampFile, timestampFileType) .setLastModifiedTime(systemTime) systemTime } def checkSchemasModifiedTimeAndReloadTables() { val (timestampFile, timestampFileType) = getTimestampFileAndType("", "") if (FileFactory.isFileExist(timestampFile, timestampFileType)) { if (!(FileFactory.getCarbonFile(timestampFile, timestampFileType). getLastModifiedTime == tableModifiedTimeStore.get(CarbonCommonConstants.DATABASE_DEFAULT_NAME))) { refreshCache() } } } def refreshCache() { metadata.tablesMeta = loadMetadata(storePath).tablesMeta } def getSchemaLastUpdatedTime(databaseName: String, tableName: String): Long = { var schemaLastUpdatedTime = System.currentTimeMillis val (timestampFile, timestampFileType) = getTimestampFileAndType(databaseName, tableName) if (FileFactory.isFileExist(timestampFile, timestampFileType)) { schemaLastUpdatedTime = FileFactory.getCarbonFile(timestampFile, timestampFileType) .getLastModifiedTime } schemaLastUpdatedTime } def createDatabaseDirectory(dbName: String) { val databasePath = storePath + File.separator + dbName val fileType = FileFactory.getFileType(databasePath) FileFactory.mkdirs(databasePath, fileType) } def dropDatabaseDirectory(dbName: String) { val databasePath = storePath + File.separator + dbName val fileType = FileFactory.getFileType(databasePath) if (FileFactory.isFileExist(databasePath, fileType)) { val dbPath = FileFactory.getCarbonFile(databasePath, fileType) CarbonUtil.deleteFoldersAndFiles(dbPath) } } } object CarbonMetastoreTypes extends RegexParsers { protected lazy val primitiveType: Parser[DataType] = "string" ^^^ StringType | "float" ^^^ FloatType | "int" ^^^ IntegerType | "tinyint" ^^^ ShortType | "short" ^^^ ShortType | "double" ^^^ DoubleType | "long" ^^^ LongType | "binary" ^^^ BinaryType | "boolean" ^^^ BooleanType | fixedDecimalType | "decimal" ^^^ "decimal" ^^^ DecimalType(18, 2) | "varchar\\\\((\\\\d+)\\\\)".r ^^^ StringType | "timestamp" ^^^ TimestampType | "date" ^^^ DateType | "char\\\\((\\\\d+)\\\\)".r ^^^ StringType protected lazy val fixedDecimalType: Parser[DataType] = "decimal" ~> "(" ~> "^[1-9]\\\\d*".r ~ ("," ~> "^[0-9]\\\\d*".r <~ ")") ^^ { case precision ~ scale => DecimalType(precision.toInt, scale.toInt) } protected lazy val arrayType: Parser[DataType] = "array" ~> "<" ~> dataType <~ ">" ^^ { case tpe => ArrayType(tpe) } protected lazy val mapType: Parser[DataType] = "map" ~> "<" ~> dataType ~ "," ~ dataType <~ ">" ^^ { case t1 ~ _ ~ t2 => MapType(t1, t2) } protected lazy val structField: Parser[StructField] = "[a-zA-Z0-9_]*".r ~ ":" ~ dataType ^^ { case name ~ _ ~ tpe => StructField(name, tpe, nullable = true) } protected lazy val structType: Parser[DataType] = "struct" ~> "<" ~> repsep(structField, ",") <~ ">" ^^ { case fields => StructType(fields) } protected lazy val dataType: Parser[DataType] = arrayType | mapType | structType | primitiveType def toDataType(metastoreType: String): DataType = { parseAll(dataType, metastoreType) match { case Success(result, _) => result case failure: NoSuccess => sys.error(s"Unsupported dataType: $metastoreType") } } def toMetastoreType(dt: DataType): String = { dt match { case ArrayType(elementType, _) => s"array<${ toMetastoreType(elementType) }>" case StructType(fields) => s"struct<${ fields.map(f => s"${ f.name }:${ toMetastoreType(f.dataType) }") .mkString(",") }>" case StringType => "string" case FloatType => "float" case IntegerType => "int" case ShortType => "tinyint" case DoubleType => "double" case LongType => "bigint" case BinaryType => "binary" case BooleanType => "boolean" case DecimalType() => "decimal" case DateType => "date" case TimestampType => "timestamp" } } }
shivangi1015/incubator-carbondata
integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonMetastore.scala
Scala
apache-2.0
22,499
object P09 { private def packHelper[A](l:List[A], curr:List[A], out:List[List[A]]):List[List[A]] = (l, curr) match { case (Nil, _) => curr::out case (h::tail, _) => if (h == curr.head) packHelper[A](tail, h::curr, out) else packHelper[A](tail, List(h), curr::out) } def pack[A](l:List[A]):List[List[A]] = l match { case Nil => Nil case h::tail => packHelper(tail, List(h), Nil).reverse } def pack2[A](l:List[A]):List[List[A]] = l match { case Nil => List(List()) case _ => val (packed, next) = l span {_ == l.head} if (next == Nil) List(packed) else packed::pack2(next) } }
liefswanson/S-99
src/main/scala/09.scala
Scala
gpl-2.0
684
package org.precompiler.spark101.utils import com.typesafe.config.{Config, ConfigFactory} /** * * @author Richard Li */ trait ParameterizedApp { def loadProperties(path: String = "application.properties"): Config = { ConfigFactory.load(path) } }
precompiler/spark-101
learning-spark/src/main/scala/org/precompiler/spark101/utils/ParameterizedApp.scala
Scala
apache-2.0
262
package com.github.gdefacci.briscola.presentation.player import org.obl.raz.Path import com.github.gdefacci.briscola.presentation.ADT object PlayerEventKind extends Enumeration { val playerLogOn, playerLogOff = Value } sealed trait PlayerEvent extends ADT[PlayerEventKind.type] { def kind: PlayerEventKind.Value } final case class PlayerLogOn(player: Path) extends PlayerEvent { lazy val kind = PlayerEventKind.playerLogOn } final case class PlayerLogOff(player: Path) extends PlayerEvent { lazy val kind = PlayerEventKind.playerLogOn }
gdefacci/briscola
ddd-briscola-web/src/main/scala/com/github/gdefacci/briscola/presentation/player/playerEvents.scala
Scala
bsd-3-clause
550
/* * Copyright (c) 2013 Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see http://www.gnu.org/licenses/agpl.html. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package lancet package analysis class TestAnalysis6 extends FileDiffSuite { val prefix = "test-out/test-analysis-6" /* logic programming with constraints */ object Test1 { // *** run loop def run[T](f: Exp[T] => Rel): Unit = { cstore = cstore0 varCount = varCount0 var d = 0 def printd(x: Any) = println(" "*d+x) def rec(e: () => Rel)(f: () => Unit): Unit = { //printd("rec: "+e) if (d == 2000) { printd("ABORT depth "+d) return } val d1 = d val save = cstore d += 1 val r = e() match { case Or(a,b) => rec(a)(f) rec(b)(f) case And(a,b) => rec(a) { () => if (propagate()) rec(b)(f) } case Yes => f() } cstore = save d = d1 r } def propagate(): Boolean = { // propagate constraints and look for contradictions //printd("simplify") val cnew = cstore flatMap { c1 => cstore flatMap { c2 => (c1,c2) match { case (IsEqual(Exp(a),Exp(b)), IsTerm(a1, key, args)) if a == a1 => List(IsTerm(b, key, args)) case (IsEqual(Exp(a),Exp(b)), IsTerm(b1, key, args)) if b == b1 => List(IsTerm(a, key, args)) case (IsTerm(a1, key1, args1), IsTerm(a2, key2, args2)) if a1 == a2 => if (key1 != key2 || args1.length != args2.length) return false (args1,args2).zipped map (IsEqual(_,_)) case _ => Nil }}} //cnew filterNot (cstore contains _) foreach println val cstore0 = cstore cstore = (cstore ++ cnew).distinct.sortBy(_.toString) (cstore == cstore0) || propagate() // until converged } def extract(x: Exp[Any]): String = cstore collectFirst { // extract term case IsTerm(id, key, args) if id == x.id => key+"("+args.map(extract).mkString(",")+")" } getOrElse canon(x) def canon(x: Exp[Any]): String = { // canonicalize var name val id = (x.id::(cstore collect { case IsEqual(`x`,y) if y.id < x.id => y.id case IsEqual(y,`x`) if y.id < x.id => y.id })).min "x"+id } val q = fresh[T] rec(() => f(q)){() => if (propagate()) { //printd("success!") //printd(eval(q)) //cstore foreach { c => printd(" "+c)} println(extract(q)) } } println("----") } // *** terms and constraints case class Exp[+T](id: Int) val varCount0 = 0 var varCount = varCount0 def fresh[T] = Exp[T] { varCount += 1; varCount - 1 } abstract class Constraint case class IsTerm(id: Int, key: String, args: List[Exp[Any]]) extends Constraint case class IsEqual(x: Exp[Any], y: Exp[Any]) extends Constraint abstract class Rel case class Or(x: () => Rel, y: () => Rel) extends Rel case class And(x: () => Rel, y: () => Rel) extends Rel case object Yes extends Rel val cstore0: List[Constraint] = Nil var cstore: List[Constraint] = cstore0 def register(c: Constraint): Unit = { cstore = c::cstore // start simplify right here? } def term[T](key: String, args: List[Exp[Any]]): Exp[T] = { val id = fresh[T] val c = IsTerm(id.id, key, args) register(c) id } def exists[T](f: Exp[T] => Rel): Rel = { f(fresh[T]) } def exists[T,U](f: (Exp[T],Exp[U]) => Rel): Rel = { f(fresh[T],fresh[U]) } def exists[T,U,V](f: (Exp[T],Exp[U],Exp[V]) => Rel): Rel = { f(fresh[T],fresh[U],fresh[V]) } def infix_===[T](a: => Exp[T], b: => Exp[T]): Rel = { val c = IsEqual(a,b) register(c) Yes } def infix_&&(a: => Rel, b: => Rel): Rel = { And(() => a,() => b) } def infix_||(a: => Rel, b: => Rel): Rel = { Or(() => a,() => b) } } // *** test def testA = withOutFileChecked(prefix+"A") { import Test1._ def list(xs: String*): Exp[List[String]] = if (xs.isEmpty) nil else cons(term(xs.head,Nil),list(xs.tail:_*)) def cons[T](hd: Exp[T], tl: Exp[List[T]]): Exp[List[T]] = term("cons",List(hd,tl)) def nil: Exp[List[Nothing]] = term("nil",List()) def pair[A,B](a: Exp[A], b: Exp[B]): Exp[(A,B)] = term("pair",List(a,b)) object Cons { def unapply[T](x: Exp[List[T]]): Some[(Exp[T],Exp[List[T]])] = { val h = fresh[T] val t = fresh[List[T]] x === cons(h,t) Some((h,t)) } } object Pair { def unapply[A,B](x: Exp[(A,B)]): Some[(Exp[A],Exp[B])] = { val a = fresh[A] val b = fresh[B] x === pair(a,b) Some((a,b)) } } def append[T](as: Exp[List[T]], bs: Exp[List[T]], cs: Exp[List[T]]): Rel = (as === nil && bs === cs) || exists[T,List[T],List[T]] { (h,t1,t2) => (as === cons(h,t1)) && (cs === cons(h,t2)) && append(t1,bs,t2) } Test1.run[List[String]] { q => append(list("a","b","c"), list("d","e","f"), q) } Test1.run[List[String]] { q => append(list("a","b","c"), q, list("a","b","c","d","e","f")) } Test1.run[List[String]] { q => append(q, list("d","e","f"), list("a","b","c","d","e","f")) } Test1.run[(List[String],List[String])] { q => val q1,q2 = fresh[List[String]] (q === pair(q1,q2)) && append(q1, q2, list("a","b","c","d","e","f")) } Test1.run[(List[String],List[String])] { case Pair(q1,q2) => append(q1, q2, list("a","b","c","d","e","f")) } Test1.run[(List[String],List[String])] { case Pair(q1,q2) => q1 === q2 } } }
TiarkRompf/lancet
src/test/scala/lancet/analysis/test6.scala
Scala
agpl-3.0
6,743
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.ml.feature import org.apache.spark.ml.linalg.Vector /** * Class that represents an instance of weighted data point with label and features. * * @param label Label for this data point. * @param weight The weight of this instance. * @param features The vector of features for this data point. */ private[ml] case class Instance(label: Double, weight: Double, features: Vector)
Panos-Bletsos/spark-cost-model-optimizer
mllib/src/main/scala/org/apache/spark/ml/feature/Instance.scala
Scala
apache-2.0
1,210
/** * @author Victor Caballero (vicaba) * @author Xavier Domingo (xadobu) */ package actors.node import actors._ import akka.actor.ActorRef import net.message.{ContentMessage, ControlMessage, DiscoveryAndLookupMessage, Message} import transaction.{ReadOnlyTransaction, ReadWriteTransaction, Transaction} import scala.collection.mutable import scala.collection.mutable.ListBuffer import scala.util.Random /** * Message type for adding a new node to the onion system * @param node the node identity * @param idPart the partition where the node should be placed (logical place) * @param mode the mode of the node (write or read) */ case class AddNode(node: ActorRef, idPart: Int, mode: Int) extends DiscoveryAndLookupMessage /** * AddNode companion object */ object AddNode { val WriteMode = 1 val ReadMode = 0 } case class PartitionUpdate(idPart: Int) extends DiscoveryAndLookupMessage /** * This actor manages all new nodes and connections between them. */ class CDNMaster extends ComposableActor { /** * List of partitions */ private val partitions = new mutable.HashMap[Int, Partition]() /** * List of pending connections between nodes. */ private val pendingConnections = new mutable.HashMap[Double, ListBuffer[ActorRef]]() private var manageConnectionId: Double = 0 /** * @return the next connection id to be used */ private def nextManageConnectionId(): Double = { manageConnectionId += 1 manageConnectionId } /** * Adds a partition to the list of partitions. * @param idPart the id of the partition * @return */ private def addPartition(idPart: Int): Partition = { val p = new Partition partitions += idPart -> new Partition p } /** * The CDNMaster vector clock */ private val vectorClock = VectorClock(VectorClock.Zero) receiveBuilder += { // If the actor receives a message... case msg: Message => vectorClock inc() msg match { case contentMsg: ContentMessage => vectorClock updateWith contentMsg.vectorClock contentMsg.content match { case transaction: Transaction => transaction match { case ReadOnlyTransaction(id, operations, idPart) => sendReadOnlyTransaction(ReadOnlyTransaction(id, operations, idPart)) case ReadWriteTransaction(id, operations, idPart) => sendReadWriteTransaction(ReadWriteTransaction(id, operations, idPart)) } } case controlMsg: ControlMessage => controlMsg match { //case TransactionReceived(transactionId) => doWhenTransactionReceivedACK(transactionId) case VectorClockUpdate(vc) => vectorClock updateWith vc case PartitionUpdate(idPart) => setWriteOperation(idPart) case dMsg: DiscoveryAndLookupMessage => dMsg match { case AddNode(node, idPart, mode) => // Get the partition if exists, if not, create one val partition = partitions.getOrElse(idPart, { val p = new Partition partitions += idPart -> p p }) val readList = partition.readList readList.contains(node) match { case false => readList += node case true => println("Error: Duplicated node in readList") } val writeList = partition.writeList writeList.contains(node) match { case false => mode match { case AddNode.WriteMode => writeList += node case AddNode.ReadMode => println("Node on read mode") } case true => println("Error: Duplicated node in writeList") } } case RequestNodeConnection(node1, node2) => val id = nextManageConnectionId() pendingConnections += id -> new ListBuffer[ActorRef] node1._1 ! RequestNewConnection(id, node1._2, node1._3) node2._1 ! RequestNewConnection(id, node2._2, node2._3) case ResponseConnectionCreated(id, port) => pendingConnections.get(id) match { case None => println("Something goes wrong with the link creation") case Some(list) => list.+=(port) if (list.size == 2) { val port1 = list.head val port2 = list.last port1 ! NewNode(port2) port2 ! NewNode(port1) } } case TransactionReceived(id) => println("Transaction " + id + ", job done!") } } } /** * Send a transaction to a read partition * @param transaction the transaction */ private def sendReadOnlyTransaction(transaction: ReadOnlyTransaction): Unit = { val idPart = transaction.idPart partitions.get(idPart) match { case None => println("Error: no such partition " + idPart) case Some(partition) => val keys = partition.readList val node = keys.toVector(Random.nextInt(keys.size)) // Send the message node ! ContentMessage(VectorClock.Zero, transaction) } } /** * Sends a read/write transaction to a partition * @param transaction */ private def sendReadWriteTransaction(transaction: ReadWriteTransaction): Unit = { val idPart = transaction.idPart partitions.get(idPart) match { case None => println("Error: no such partition " + idPart) case Some(partition) => partition.writeOperation match { case true => val keys = partition.writeList val node = keys.toVector(Random.nextInt(keys.size)) // Send the message node ! ContentMessage(VectorClock.Zero, transaction) println("Transaction " + transaction.id + " sent to " + node.path.name + " from partition " + idPart) case false => println("Error: partition " + idPart + " not writable") } } } /** * Sets the partition with idPart to be writable. If the partition does not exist a message is printed in the screen * @param idPart the partition id */ def setWriteOperation(idPart: Int): Unit = { partitions.get(idPart) match { case None => println("Error: no such partition " + idPart) case Some(partition) => { partition.writeOperation = true // println("Partition " + idPart + " is now writable") } } } /** * A partition containing read and write nodes. A partition can be writable or not. If a partition can be writable, * the client can write in any node of the partition. */ private class Partition { lazy val readList = new mutable.HashSet[ActorRef]() lazy val writeList = new mutable.HashSet[ActorRef]() var writeOperation = false } }
vicobu/DistributedSystem
src/main/scala/actors/node/CDNMaster.scala
Scala
mit
7,071
package de.sciss.fscape package tests import de.sciss.fscape.Ops._ object RotateWindowTest extends App { val g = Graph { import graph._ def sin = SinOsc(1.0 / 8).take(16384) val rL = RotateWindow(sin, size = 24, amount = -2) val rR = RotateWindow(sin, size = 24, amount = +2) Plot1D(sin, 72, "sin") Plot1D(rL , 72, "rL") Plot1D(rR , 72, "rR") } val ctrl = stream.Control() ctrl.run(g) import ctrl.config.executionContext ctrl.status.foreach { _ => sys.exit() } }
Sciss/FScape-next
core/jvm/src/test/scala/de/sciss/fscape/tests/RotateWindowTest.scala
Scala
agpl-3.0
516
package osgifelix import aQute.bnd.version.Version import sbt._ import sbt.librarymanagement.DependencyFilter /** * Created by jolz on 13/08/15. */ trait InstructionFilters { def rewrite(moduleName: String, imports: String = "*", exports: String = "*;version=VERSION"): InstructionFilter = rewriteFilter(moduleName, moduleFilter(name = moduleName), imports, exports) def rewriteCustom(moduleName: String, insts: ManifestInstructions): InstructionFilter = RewriteFilter(moduleName, moduleFilter(name = moduleName), insts) def rewriteFilter(filterName: String, filter: DependencyFilter, imports: String = "*", exports: String = "*;version=VERSION"): InstructionFilter = RewriteFilter(filterName, filter, ManifestInstructions(imports = imports, exports = exports)) def ignore(moduleName: String) = IgnoreFilter(moduleName, moduleFilter(name = moduleName)) def ignoreAll(firstModule: String, next: String*) = IgnoreFilter(firstModule, moduleFilter(name = next.foldLeft[NameFilter](firstModule)((b,n) => b|n))) def ignoreCustom(name: String, filter: DependencyFilter) = IgnoreFilter(name, filter) def create(moduleNames: NameFilter, symbolicName: String, version: String, imports: String = "*", exports: String = "*;version=VERSION") = CreateFilter(symbolicName, moduleFilter(name = moduleNames), symbolicName, new Version(version), ManifestInstructions(imports = imports, exports = exports), false) def createCustom(moduleNames: NameFilter, symbolicName: String, version: String, processDefault: Boolean = false, instructions: ManifestInstructions) = CreateFilter(symbolicName, moduleFilter(name = moduleNames), symbolicName, new Version(version), instructions, processDefault) } sealed trait InstructionFilter { def filter: DependencyFilter def filterName: String } case class RewriteFilter(filterName: String, filter: DependencyFilter, instructions: ManifestInstructions, name: Option[String] = None, version: Option[Version] = None) extends InstructionFilter case class CreateFilter(filterName: String, filter: DependencyFilter, name: String, version: Version, instructions: ManifestInstructions, processDefault: Boolean) extends InstructionFilter case class IgnoreFilter(filterName: String, filter: DependencyFilter) extends InstructionFilter
doolse/sbt-osgi-felix
src/main/scala/osgifelix/InstructionFilter.scala
Scala
apache-2.0
2,300
/* * Copyright (c) 2014-2021 by The Monix Project Developers. * See the project homepage at: https://monix.io * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package monix.execution.cancelables import minitest.SimpleTestSuite object AssignableCancelableSuite extends SimpleTestSuite { test("AssignableCancelable.multi() returns a MultiAssignmentCancelable") { val c = AssignableCancelable.multi() assert(c.isInstanceOf[MultiAssignCancelable], "isInstanceOf[MultiAssignCancelable]") } test("AssignableCancelable.single() returns a SingleAssignmentCancelable") { val c = AssignableCancelable.single() assert(c.isInstanceOf[SingleAssignCancelable], "isInstanceOf[SingleAssignmentCancelable]") } test("AssignableCancelable.alreadyCanceled") { val c = AssignableCancelable.alreadyCanceled assert(c.isCanceled, "c.isCanceled") val b = BooleanCancelable(); c := b assert(b.isCanceled, "b.isCanceled") c.cancel() assert(c.isCanceled, "c.isCanceled") val b2 = BooleanCancelable(); c := b2 assert(b2.isCanceled, "b2.isCanceled") } test("AssignableCancelable.dummy") { val c = AssignableCancelable.dummy val b = BooleanCancelable(); c := b assert(!b.isCanceled, "!b.isCanceled") c.cancel() val b2 = BooleanCancelable(); c := b2 assert(!b2.isCanceled, "!b2.isCanceled") } }
monixio/monix
monix-execution/shared/src/test/scala/monix/execution/cancelables/AssignableCancelableSuite.scala
Scala
apache-2.0
1,876
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.carbondata.spark.load import java.util.Comparator import scala.collection.JavaConverters._ import scala.collection.mutable import scala.reflect.classTag import org.apache.hadoop.conf.Configuration import org.apache.hadoop.mapreduce.InputSplit import org.apache.spark.{CarbonInputMetrics, DataSkewRangePartitioner, TaskContext} import org.apache.spark.broadcast.Broadcast import org.apache.spark.rdd.RDD import org.apache.spark.sql.{DataFrame, SparkSession} import org.apache.spark.sql.catalyst.expressions.GenericInternalRow import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.execution.command.ExecutionErrors import org.apache.spark.sql.util.{SparkSQLUtil, SparkTypeConverter} import org.apache.spark.storage.StorageLevel import org.apache.spark.unsafe.types.UTF8String import org.apache.spark.util.LongAccumulator import org.apache.carbondata.common.logging.LogServiceFactory import org.apache.carbondata.converter.SparkDataTypeConverterImpl import org.apache.carbondata.core.constants.CarbonCommonConstants import org.apache.carbondata.core.datastore.row.CarbonRow import org.apache.carbondata.core.metadata.datatype.{DataType, DataTypes, StructField, StructType} import org.apache.carbondata.core.metadata.schema.table.CarbonTable import org.apache.carbondata.core.metadata.schema.table.column.{CarbonColumn, CarbonDimension} import org.apache.carbondata.core.statusmanager.{LoadMetadataDetails, SegmentStatus} import org.apache.carbondata.core.util._ import org.apache.carbondata.core.util.ByteUtil.UnsafeComparer import org.apache.carbondata.hadoop.CarbonProjection import org.apache.carbondata.hadoop.api.CarbonTableOutputFormat import org.apache.carbondata.processing.datatypes.GenericDataType import org.apache.carbondata.processing.loading.{CarbonDataLoadConfiguration, DataField, DataLoadProcessBuilder, FailureCauses} import org.apache.carbondata.processing.loading.constants.DataLoadProcessorConstants import org.apache.carbondata.processing.loading.csvinput.CSVInputFormat import org.apache.carbondata.processing.loading.model.CarbonLoadModel import org.apache.carbondata.processing.sort.sortdata.{NewRowComparator, NewRowComparatorForNormalDims, SortParameters} import org.apache.carbondata.processing.util.{CarbonDataProcessorUtil, TableOptionConstant} import org.apache.carbondata.spark.rdd.{CarbonScanRDD, StringArrayRow} import org.apache.carbondata.spark.util.{CommonUtil, Util} import org.apache.carbondata.store.CarbonRowReadSupport /** * Use sortBy operator in spark to load the data */ object DataLoadProcessBuilderOnSpark { private val LOGGER = LogServiceFactory.getLogService(this.getClass.getCanonicalName) def loadDataUsingGlobalSort( sparkSession: SparkSession, dataFrame: Option[DataFrame], model: CarbonLoadModel, hadoopConf: Configuration): Array[(String, (LoadMetadataDetails, ExecutionErrors))] = { var isLoadFromCSV = false val originRDD = if (dataFrame.isDefined) { dataFrame.get.rdd } else { // input data from files isLoadFromCSV = true val columnCount = model.getCsvHeaderColumns.length CsvRDDHelper.csvFileScanRDD(sparkSession, model, hadoopConf) .map(DataLoadProcessorStepOnSpark.toStringArrayRow(_, columnCount)) } val sc = sparkSession.sparkContext val modelBroadcast = sc.broadcast(model) val partialSuccessAccum = sc.longAccumulator("Partial Success Accumulator") val inputStepRowCounter = sc.longAccumulator("Input Processor Accumulator") val convertStepRowCounter = sc.longAccumulator("Convert Processor Accumulator") val sortStepRowCounter = sc.longAccumulator("Sort Processor Accumulator") val writeStepRowCounter = sc.longAccumulator("Write Processor Accumulator") hadoopConf .set(CarbonCommonConstants.CARBON_WRITTEN_BY_APPNAME, sparkSession.sparkContext.appName) val conf = SparkSQLUtil.broadCastHadoopConf(sc, hadoopConf) // 1. Input val inputRDD = if (isLoadFromCSV) { // No need of wrap with NewRDDIterator, which converts object to string, // as it is already a string. // So, this will avoid new object creation in case of CSV global sort load for each row originRDD.mapPartitionsWithIndex { case (index, rows) => DataLoadProcessorStepOnSpark.inputFuncForCsvRows( rows.asInstanceOf[Iterator[StringArrayRow]], index, modelBroadcast, inputStepRowCounter) } } else { originRDD .mapPartitions(rows => DataLoadProcessorStepOnSpark.toRDDIterator(rows, modelBroadcast)) .mapPartitionsWithIndex { case (index, rows) => DataLoadProcessorStepOnSpark.inputFunc(rows, index, modelBroadcast, inputStepRowCounter) } } // 2. Convert val convertRDD = inputRDD.mapPartitionsWithIndex { case (index, rows) => ThreadLocalSessionInfo.setConfigurationToCurrentThread(conf.value.value) DataLoadProcessorStepOnSpark.convertFunc(rows, index, modelBroadcast, partialSuccessAccum, convertStepRowCounter) }.filter(_ != null) // Filter the bad record // 3. Sort val configuration = DataLoadProcessBuilder.createConfiguration(model) val sortParameters = SortParameters.createSortParameters(configuration) val rowComparator: Comparator[Array[AnyRef]] = if (sortParameters.getNoDictionaryCount > 0) { new NewRowComparator(sortParameters.getNoDictionarySortColumn, sortParameters.getNoDictDataType) } else { new NewRowComparatorForNormalDims(sortParameters.getDimColCount) } object RowOrdering extends Ordering[Array[AnyRef]] { def compare(rowA: Array[AnyRef], rowB: Array[AnyRef]): Int = { rowComparator.compare(rowA, rowB) } } var numPartitions = CarbonDataProcessorUtil.getGlobalSortPartitions( configuration.getDataLoadProperty(CarbonCommonConstants.LOAD_GLOBAL_SORT_PARTITIONS)) if (numPartitions <= 0) { numPartitions = convertRDD.partitions.length } // Because if the number of partitions greater than 1, there will be action operator(sample) in // sortBy operator. So here we cache the rdd to avoid do input and convert again. if (numPartitions > 1) { convertRDD.persist(StorageLevel.fromString( CarbonProperties.getInstance().getGlobalSortRddStorageLevel())) } import scala.reflect.classTag val sortRDD = convertRDD .sortBy(_.getData, numPartitions = numPartitions)(RowOrdering, classTag[Array[AnyRef]]) .mapPartitionsWithIndex { case (index, rows) => DataLoadProcessorStepOnSpark.convertTo3Parts(rows, index, modelBroadcast, sortStepRowCounter) } // 4. Write sc.runJob(sortRDD, (context: TaskContext, rows: Iterator[CarbonRow]) => { setTaskListener() val model = modelBroadcast.value.getCopyWithTaskNo(context.partitionId.toString) DataLoadProcessorStepOnSpark.writeFunc( rows, context.partitionId, model, writeStepRowCounter, conf.value.value) }) // clean cache only if persisted and keeping unpersist non-blocking as non-blocking call will // not have any functional impact as spark automatically monitors the cache usage on each node // and drops out old data partiotions in a least-recently used (LRU) fashion. if (numPartitions > 1) { convertRDD.unpersist(false) } // Log the number of rows in each step LOGGER.info("Total rows processed in step Input Processor: " + inputStepRowCounter.value) LOGGER.info("Total rows processed in step Data Converter: " + convertStepRowCounter.value) LOGGER.info("Total rows processed in step Sort Processor: " + sortStepRowCounter.value) LOGGER.info("Total rows processed in step Data Writer: " + writeStepRowCounter.value) updateLoadStatus(model, partialSuccessAccum) } def insertDataUsingGlobalSortWithInternalRow( sparkSession: SparkSession, scanResultRDD : RDD[InternalRow], model: CarbonLoadModel, hadoopConf: Configuration): Array[(String, (LoadMetadataDetails, ExecutionErrors))] = { val originRDD = scanResultRDD val sc = sparkSession.sparkContext val modelBroadcast = sc.broadcast(model) val partialSuccessAccum = sc.longAccumulator("Partial Success Accumulator") val sortStepRowCounter = sc.longAccumulator("Sort Processor Accumulator") val writeStepRowCounter = sc.longAccumulator("Write Processor Accumulator") hadoopConf .set(CarbonCommonConstants.CARBON_WRITTEN_BY_APPNAME, sparkSession.sparkContext.appName) val conf = SparkSQLUtil.broadCastHadoopConf(sc, hadoopConf) val configuration = DataLoadProcessBuilder.createConfiguration(model) // 1. Convert internalRow to object array val fields = Util .convertToSparkSchemaFromColumnSchema(model.getCarbonDataLoadSchema.getCarbonTable, true) .fields .toSeq val dataTypes = fields.map(field => field.dataType) val map: mutable.Map[String, GenericDataType[_]] = mutable.Map[String, GenericDataType[_]]() CommonUtil.convertComplexDataType(map, configuration) val rdd = originRDD.map { internalRow => CommonUtil.getObjectArrayFromInternalRowAndConvertComplexTypeForGlobalSort(internalRow, fields, map) } // 2. sort var numPartitions = CarbonDataProcessorUtil.getGlobalSortPartitions( configuration.getDataLoadProperty(CarbonCommonConstants.LOAD_GLOBAL_SORT_PARTITIONS)) if (numPartitions <= 0) { numPartitions = originRDD.partitions.length } // Because if the number of partitions greater than 1, there will be action operator // (sample) in // sortBy operator. So here we cache the rdd to avoid do input and convert again. if (numPartitions > 1) { rdd.persist(StorageLevel.fromString( CarbonProperties.getInstance().getGlobalSortRddStorageLevel())) } val sortColumnsLength = model.getCarbonDataLoadSchema.getCarbonTable.getSortColumns.size() val sortColumnDataTypes = dataTypes.take(sortColumnsLength) val rowComparator = GlobalSortHelper.generateRowComparator(sortColumnDataTypes) val sortRDD = rdd.sortBy(row => getKey(row, sortColumnsLength, sortColumnDataTypes), true, numPartitions)( rowComparator, classTag[Array[AnyRef]]) val newRDD = sortRDD .mapPartitionsWithIndex { case (index, rows) => DataLoadProcessorStepOnSpark.convertTo3PartsFromObjectArray(rows, index, model, sortStepRowCounter) } // 3. Write sc.runJob(newRDD, (context: TaskContext, rows: Iterator[CarbonRow]) => { setTaskListener() val model = modelBroadcast.value.getCopyWithTaskNo(context.partitionId.toString) DataLoadProcessorStepOnSpark.writeFunc(rows, context.partitionId, model, writeStepRowCounter, conf.value.value) }) // clean cache only if persisted and keeping unpersist non-blocking as non-blocking call will // not have any functional impact as spark automatically monitors the cache usage on each node // and drops out old data partiotions in a least-recently used (LRU) fashion. if (numPartitions > 1) { rdd.unpersist(false) } // Log the number of rows in each step LOGGER.info("Total rows processed in step Sort Processor: " + sortStepRowCounter.value) LOGGER.info("Total rows processed in step Data Writer: " + writeStepRowCounter.value) updateLoadStatus(model, partialSuccessAccum) } def getKey(row: Array[AnyRef], sortColumnsLength: Int, dataTypes: Seq[org.apache.spark.sql.types.DataType]): Array[AnyRef] = { val key: Array[AnyRef] = new Array[AnyRef](sortColumnsLength) System.arraycopy(row, 0, key, 0, sortColumnsLength) key } private def updateLoadStatus(model: CarbonLoadModel, partialSuccessAccum: LongAccumulator ): Array[(String, (LoadMetadataDetails, ExecutionErrors))] = { // Update status if (partialSuccessAccum.value != 0) { val uniqueLoadStatusId = model.getTableName + CarbonCommonConstants.UNDERSCORE + "Partial_Success" val loadMetadataDetails = new LoadMetadataDetails() loadMetadataDetails.setSegmentStatus(SegmentStatus.LOAD_PARTIAL_SUCCESS) val executionErrors = new ExecutionErrors(FailureCauses.NONE, "") executionErrors.failureCauses = FailureCauses.BAD_RECORDS Array((uniqueLoadStatusId, (loadMetadataDetails, executionErrors))) } else { val uniqueLoadStatusId = model.getTableName + CarbonCommonConstants.UNDERSCORE + "Success" val loadMetadataDetails = new LoadMetadataDetails() loadMetadataDetails.setSegmentStatus(SegmentStatus.SUCCESS) val executionErrors = new ExecutionErrors(FailureCauses.NONE, "") Array((uniqueLoadStatusId, (loadMetadataDetails, executionErrors))) } } /** * 1. range partition the whole input data * 2. for each range, sort the data and writ it to CarbonData files */ def loadDataUsingRangeSort( sparkSession: SparkSession, model: CarbonLoadModel, hadoopConf: Configuration): Array[(String, (LoadMetadataDetails, ExecutionErrors))] = { // initialize and prepare row counter val sc = sparkSession.sparkContext val modelBroadcast = sc.broadcast(model) val partialSuccessAccum = sc.longAccumulator("Partial Success Accumulator") val inputStepRowCounter = sc.longAccumulator("Input Processor Accumulator") val convertStepRowCounter = sc.longAccumulator("Convert Processor Accumulator") val sortStepRowCounter = sc.longAccumulator("Sort Processor Accumulator") val writeStepRowCounter = sc.longAccumulator("Write Processor Accumulator") // 1. Input hadoopConf .set(CarbonCommonConstants.CARBON_WRITTEN_BY_APPNAME, sparkSession.sparkContext.appName) val inputRDD = CsvRDDHelper .csvFileScanRDD(sparkSession, model, hadoopConf) .mapPartitionsWithIndex { case (index, rows) => DataLoadProcessorStepOnSpark.internalInputFunc( rows, index, modelBroadcast, Option(inputStepRowCounter), Option.empty) } // 2. Convert val conf = SparkSQLUtil.broadCastHadoopConf(sc, hadoopConf) val convertRDD = inputRDD .mapPartitionsWithIndex { case (index, rows) => ThreadLocalSessionInfo.setConfigurationToCurrentThread(conf.value.value) DataLoadProcessorStepOnSpark .convertFunc(rows, index, modelBroadcast, partialSuccessAccum, convertStepRowCounter) } .filter(_ != null) // 3. Range partition by range_column val configuration = DataLoadProcessBuilder.createConfiguration(model) val rangeColumnIndex = indexOfColumn(model.getRangePartitionColumn, configuration.getDataFields) // convert RDD[CarbonRow] to RDD[(rangeColumn, CarbonRow)] val keyRDD = convertRDD.keyBy(_.getObject(rangeColumnIndex)) // range partition by key val numPartitions = getNumPartitions(configuration, model, convertRDD) val objectOrdering: Ordering[Object] = createOrderingForColumn(model.getRangePartitionColumn) import scala.reflect.classTag val sampleRDD = getSampleRDD(sparkSession, model, hadoopConf, configuration, modelBroadcast) val rangeRDD = keyRDD .partitionBy( new DataSkewRangePartitioner( numPartitions, sampleRDD, false)(objectOrdering, classTag[Object])) .map(_._2) // 4. Sort and Write data sc.runJob(rangeRDD, (context: TaskContext, rows: Iterator[CarbonRow]) => { setTaskListener() DataLoadProcessorStepOnSpark.sortAndWriteFunc(rows, context.partitionId, modelBroadcast, writeStepRowCounter, conf.value.value) }) // Log the number of rows in each step LOGGER.info("Total rows processed in step Input Processor: " + inputStepRowCounter.value) LOGGER.info("Total rows processed in step Data Converter: " + convertStepRowCounter.value) LOGGER.info("Total rows processed in step Sort Processor: " + sortStepRowCounter.value) LOGGER.info("Total rows processed in step Data Writer: " + writeStepRowCounter.value) // Update status updateLoadStatus(model, partialSuccessAccum) } /** * provide RDD for sample * CSVRecordReader(univocity parser) will output only one column */ private def getSampleRDD( sparkSession: SparkSession, model: CarbonLoadModel, hadoopConf: Configuration, configuration: CarbonDataLoadConfiguration, modelBroadcast: Broadcast[CarbonLoadModel] ): RDD[(Object, Object)] = { // initialize and prepare row counter val configuration = DataLoadProcessBuilder.createConfiguration(model) val header = configuration.getHeader val rangeColumn = model.getRangePartitionColumn val rangeColumnIndex = (0 until header.length).find { index => header(index).equalsIgnoreCase(rangeColumn.getColName) }.get val rangeField = configuration .getDataFields .find(dataField => dataField.getColumn.getColName.equals(rangeColumn.getColName)) .get // 1. Input val newHadoopConf = new Configuration(hadoopConf) newHadoopConf .set(CSVInputFormat.SELECT_COLUMN_INDEX, "" + rangeColumnIndex) val inputRDD = CsvRDDHelper .csvFileScanRDD(sparkSession, model, newHadoopConf) .mapPartitionsWithIndex { case (index, rows) => DataLoadProcessorStepOnSpark .internalInputFunc(rows, index, modelBroadcast, Option.empty, Option(rangeField)) } // 2. Convert val conf = SparkSQLUtil.broadCastHadoopConf(sparkSession.sparkContext, hadoopConf) val convertRDD = inputRDD .mapPartitionsWithIndex { case (index, rows) => ThreadLocalSessionInfo.setConfigurationToCurrentThread(conf.value.value) DataLoadProcessorStepOnSpark .sampleConvertFunc(rows, rangeField, index, modelBroadcast) } .filter(_ != null) convertRDD.map(row => (row.getObject(0), null)) } /** * calculate the number of partitions. */ private def getNumPartitions( configuration: CarbonDataLoadConfiguration, model: CarbonLoadModel, convertRDD: RDD[CarbonRow] ): Int = { var numPartitions = CarbonDataProcessorUtil.getGlobalSortPartitions( configuration.getDataLoadProperty(CarbonCommonConstants.LOAD_GLOBAL_SORT_PARTITIONS)) if (numPartitions <= 0) { if (model.getTotalSize <= 0) { numPartitions = convertRDD.partitions.length } else { // calculate the number of partitions // better to generate a CarbonData file for each partition val totalSize = model.getTotalSize.toDouble val table = model.getCarbonDataLoadSchema.getCarbonTable numPartitions = getNumPatitionsBasedOnSize(totalSize, table, model, false) } } numPartitions } def getNumPatitionsBasedOnSize(totalSize: Double, table: CarbonTable, model: CarbonLoadModel, mergerFlag: Boolean): Int = { val blockSize = 1024L * 1024 * table.getBlockSizeInMB val blockletSize = 1024L * 1024 * table.getBlockletSizeInMB val scaleFactor = if (mergerFlag) { 1 } else if (model.getScaleFactor == 0) { // use system properties CarbonProperties.getInstance().getRangeColumnScaleFactor } else { model.getScaleFactor } // For Range_Column, it will try to generate one big file for each partition. // And the size of the big file is about TABLE_BLOCKSIZE of this table. val splitSize = Math.max(blockletSize, (blockSize - blockletSize)) * scaleFactor Math.ceil(totalSize / splitSize).toInt } private def indexOfColumn(column: CarbonColumn, fields: Array[DataField]): Int = { (0 until fields.length) .find(index => fields(index).getColumn.getColName.equals(column.getColName)) .get } private def createOrderingForColumn(column: CarbonColumn): Ordering[Object] = { if (column.isDimension) { val dimension = column.asInstanceOf[CarbonDimension] if (dimension.getDataType == DataTypes.DATE) { new PrimtiveOrdering(DataTypes.INT) } else { if (DataTypeUtil.isPrimitiveColumn(column.getDataType)) { new PrimtiveOrdering(column.getDataType) } else { new ByteArrayOrdering() } } } else { new PrimtiveOrdering(column.getDataType) } } def setTaskListener(): Unit = { TaskContext.get.addTaskCompletionListener { _ => CommonUtil.clearUnsafeMemory(ThreadLocalTaskInfo.getCarbonTaskInfo.getTaskId) } TaskMetricsMap.initializeThreadLocal() val carbonTaskInfo = new CarbonTaskInfo carbonTaskInfo.setTaskId(CarbonUtil.generateUUID()) ThreadLocalTaskInfo.setCarbonTaskInfo(carbonTaskInfo) } /** * create CarbonLoadModel for global_sort */ def createLoadModelForGlobalSort( sparkSession: SparkSession, carbonTable: CarbonTable ): CarbonLoadModel = { val conf = SparkSQLUtil.sessionState(sparkSession).newHadoopConf() CarbonTableOutputFormat.setDatabaseName(conf, carbonTable.getDatabaseName) CarbonTableOutputFormat.setTableName(conf, carbonTable.getTableName) CarbonTableOutputFormat.setCarbonTable(conf, carbonTable) val fieldList = carbonTable.getCreateOrderColumn .asScala .map { column => new StructField(column.getColName, column.getDataType) } CarbonTableOutputFormat.setInputSchema(conf, new StructType(fieldList.asJava)) val loadModel = CarbonTableOutputFormat.getLoadModel(conf) loadModel.setSerializationNullFormat( TableOptionConstant.SERIALIZATION_NULL_FORMAT.getName + ",\\\\N") loadModel.setBadRecordsLoggerEnable( TableOptionConstant.BAD_RECORDS_LOGGER_ENABLE.getName + ",false") loadModel.setBadRecordsAction( TableOptionConstant.BAD_RECORDS_ACTION.getName + ",force") loadModel.setIsEmptyDataBadRecord( DataLoadProcessorConstants.IS_EMPTY_DATA_BAD_RECORD + ",false") val globalSortPartitions = carbonTable.getTableInfo.getFactTable.getTableProperties.get("global_sort_partitions") if (globalSortPartitions != null) { loadModel.setGlobalSortPartitions(globalSortPartitions) } loadModel } /** * create DataFrame basing on specified splits */ def createInputDataFrame( sparkSession: SparkSession, carbonTable: CarbonTable, splits: Seq[InputSplit] ): DataFrame = { val columns = carbonTable .getCreateOrderColumn .asScala .map(_.getColName) .toArray val schema = SparkTypeConverter.createSparkSchema(carbonTable, columns) val rdd: RDD[InternalRow] = new CarbonScanRDD[CarbonRow]( sparkSession, columnProjection = new CarbonProjection(columns), null, carbonTable.getAbsoluteTableIdentifier, carbonTable.getTableInfo.serialize, carbonTable.getTableInfo, new CarbonInputMetrics, null, classOf[SparkDataTypeConverterImpl], classOf[CarbonRowReadSupport], splits.asJava) .map { row => new GenericInternalRow(row.getData.asInstanceOf[Array[Any]]) } SparkSQLUtil.execute(rdd, schema, sparkSession) } } class PrimtiveOrdering(dataType: DataType) extends Ordering[Object] { val comparator = org.apache.carbondata.core.util.comparator.Comparator .getComparator(dataType) override def compare(x: Object, y: Object): Int = { comparator.compare(x, y) } } class ByteArrayOrdering() extends Ordering[Object] { override def compare(x: Object, y: Object): Int = { UnsafeComparer.INSTANCE.compareTo(x.asInstanceOf[Array[Byte]], y.asInstanceOf[Array[Byte]]) } } class StringOrdering() extends Ordering[Object] { override def compare(x: Object, y: Object): Int = { if (x == null) { return -1 } else if (y == null) { return 1 } return (x.asInstanceOf[UTF8String]).compare(y.asInstanceOf[UTF8String]) } }
jackylk/incubator-carbondata
integration/spark/src/main/scala/org/apache/carbondata/spark/load/DataLoadProcessBuilderOnSpark.scala
Scala
apache-2.0
24,714
package com.komanov.junk.mockito import org.specs2.matcher.{Matcher, Matchers} import org.specs2.mock.Mockito import org.specs2.mutable.SpecificationWithJUnit import org.specs2.specification.Scope class MockitoAndMutableTest extends SpecificationWithJUnit with Mockito { "doBusiness" should { "replace 'a' with 'ab'" in new ctx { override def makeDao = mock[ContainerDao] manager.doBusiness(container) container must beModifiedContainer got { one(dao).update(beModifiedContainer) noMoreCallsTo(dao) } } "replace 'a' with 'ab' in a JMock way" in new ctx { override def makeDao = mock[ContainerDao].defaultAnswer(i => throw new IllegalStateException(s"Unexpected call: $i")) doAnswer(_ => {}).when(dao).update(beModifiedContainer) manager.doBusiness(container) container must beModifiedContainer got { one(dao).update(any) noMoreCallsTo(dao) } }.pendingUntilFixed("This test is correct - it catches a bug in a code. It demonstrates the threat of Mockito -- previous test doesn't check correctness") } trait ctx extends Scope { def makeDao: ContainerDao lazy val dao = makeDao lazy val manager = new ContainerManager(dao) val container = MutableContainer(Seq("b", "a")) def beModifiedContainer = MutableContainerMatchers.isMutableContainer(list = be_===(Seq("b", "ab"))) } } case class MutableContainer(var list: Seq[String]) object MutableMutator { def mutate(c: MutableContainer)(pf: PartialFunction[String, String]): Unit = { c.list = c.list.map(s => if (pf.isDefinedAt(s)) pf(s) else s) } } trait ContainerDao { def update(c: MutableContainer): Unit } class ContainerManager(dao: ContainerDao) { def doBusiness(c: MutableContainer): Unit = { MutableMutator.mutate(c) { case s@"a" => val newValue = s + "b" dao.update(c) newValue } } } class MutableContainerMatchers extends Matchers { object isMutableContainer { def apply(list: Matcher[Seq[String]]): Matcher[MutableContainer] = { list ^^ { (_: MutableContainer).list aka "list" } } } } object MutableContainerMatchers extends MutableContainerMatchers
dkomanov/stuff
src/com/komanov/junk/mockito/MockitoAndMutableTest.scala
Scala
mit
2,258
package com.twitter.finagle.httpproxy import org.scalatest.FunSuite import org.scalatest.junit.JUnitRunner import org.junit.runner.RunWith import org.scalatest.mock.MockitoSugar import org.mockito.Mockito.{times, verify, when, atLeastOnce} import org.mockito.ArgumentCaptor import org.mockito.Matchers._ import org.jboss.netty.channel._ import com.twitter.util.RandomSocket import java.net.{SocketAddress, InetSocketAddress} import org.jboss.netty.handler.codec.http._ @RunWith(classOf[JUnitRunner]) class HttpConnectHandlerTest extends FunSuite with MockitoSugar { class HttpConnectHandlerHelper { val ctx = mock[ChannelHandlerContext] val channel = mock[Channel] when(ctx.getChannel) thenReturn channel val pipeline = mock[ChannelPipeline] when(ctx.getPipeline) thenReturn pipeline when(channel.getPipeline) thenReturn pipeline val closeFuture = Channels.future(channel) when(channel.getCloseFuture) thenReturn closeFuture val port = RandomSocket.nextPort() val remoteAddress = new InetSocketAddress("localhost", port) when(channel.getRemoteAddress) thenReturn remoteAddress val proxyAddress = mock[SocketAddress] val connectFuture = Channels.future(channel, true) val connectRequested = new DownstreamChannelStateEvent( channel, connectFuture, ChannelState.CONNECTED, remoteAddress) val ch = HttpConnectHandler.addHandler(proxyAddress, remoteAddress, pipeline) ch.handleDownstream(ctx, connectRequested) def checkDidClose() { val ec = ArgumentCaptor.forClass(classOf[DownstreamChannelStateEvent]) verify(pipeline).sendDownstream(ec.capture) val e = ec.getValue assert(e.getChannel === channel) assert(e.getFuture === closeFuture) assert(e.getState === ChannelState.OPEN) assert(e.getValue === java.lang.Boolean.FALSE) } } test("HttpConnectHandler should upon connect wrap the downstream connect request") { val h = new HttpConnectHandlerHelper import h._ val ec = ArgumentCaptor.forClass(classOf[DownstreamChannelStateEvent]) verify(ctx).sendDownstream(ec.capture) val e = ec.getValue assert(e.getChannel === channel) assert(e.getFuture != connectFuture) // this is proxied assert(e.getState === ChannelState.CONNECTED) assert(e.getValue === proxyAddress) } test("HttpConnectHandler should upon connect propagate cancellation") { val h = new HttpConnectHandlerHelper import h._ val ec = ArgumentCaptor.forClass(classOf[DownstreamChannelStateEvent]) verify(ctx).sendDownstream(ec.capture) val e = ec.getValue assert(!e.getFuture.isCancelled) connectFuture.cancel() assert(e.getFuture.isCancelled) } test("HttpConnectHandler should when connect is successful not propagate success") { val h = new HttpConnectHandlerHelper import h._ ch.handleUpstream(ctx, new UpstreamChannelStateEvent( channel, ChannelState.CONNECTED, remoteAddress)) assert(!connectFuture.isDone) verify(ctx, times(0)).sendUpstream(any[ChannelEvent]) } test("HttpConnectHandler should when connect is successful propagate connection cancellation") { val h = new HttpConnectHandlerHelper import h._ ch.handleUpstream(ctx, new UpstreamChannelStateEvent( channel, ChannelState.CONNECTED, remoteAddress)) assert(!connectFuture.isDone) verify(ctx, times(0)).sendUpstream(any[ChannelEvent]) connectFuture.cancel() checkDidClose() } test("HttpConnectHandler should when connect is successful do HTTP CONNECT") { val h = new HttpConnectHandlerHelper import h._ ch.handleUpstream(ctx, new UpstreamChannelStateEvent( channel, ChannelState.CONNECTED, remoteAddress)) assert(!connectFuture.isDone) verify(ctx, times(0)).sendUpstream(any[ChannelEvent]) { // send connect request val ec = ArgumentCaptor.forClass(classOf[DownstreamMessageEvent]) verify(ctx, atLeastOnce).sendDownstream(ec.capture) val e = ec.getValue val req = e.getMessage.asInstanceOf[DefaultHttpRequest] assert(req.getMethod === HttpMethod.CONNECT) assert(req.getUri === "localhost:" + port) assert(req.headers().get("Host") === "localhost:" + port) } { // when connect response is received, propagate the connect and remove the handler ch.handleUpstream(ctx, new UpstreamMessageEvent( channel, new DefaultHttpResponse(HttpVersion.HTTP_1_0, HttpResponseStatus.OK), null)) assert(connectFuture.isDone) verify(pipeline).remove(ch) // we propagated the connect val ec = ArgumentCaptor.forClass(classOf[UpstreamChannelStateEvent]) verify(ctx).sendUpstream(ec.capture) val e = ec.getValue assert(e.getChannel === channel) assert(e.getState === ChannelState.CONNECTED) assert(e.getValue === remoteAddress) } } test("HttpConnectHandler should propagate connection failure") { val h = new HttpConnectHandlerHelper import h._ val ec = ArgumentCaptor.forClass(classOf[DownstreamChannelStateEvent]) verify(ctx).sendDownstream(ec.capture) val e = ec.getValue val exc = new Exception("failed to connect") assert(!connectFuture.isDone) e.getFuture.setFailure(exc) assert(connectFuture.isDone) assert(connectFuture.getCause === exc) } }
JustinTulloss/finagle
finagle-core/src/test/scala/com/twitter/finagle/httpproxy/HttpConnectHandlerTest.scala
Scala
apache-2.0
5,368
// Copyright 2011 Twitter, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); you may not use this // file except in compliance with the License. You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software distributed // under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR // CONDITIONS OF ANY KIND, either express or implied. See the License for the // specific language governing permissions and limitations under the License. package com.twitter.joauth.testhelpers import com.twitter.joauth.UrlCodec import scala.util.Random object MockRequestFactory { val random = new Random() def oAuth1Header( token: String, clientKey: String, signature: String, nonce: String, timestamp: String, urlEncodeSig: Boolean, extraHeaderParams: Seq[(String, String)] = Nil, quotedHeaderValues: Boolean = true): String = { def maybeQuote(str: String) = if (quotedHeaderValues) "\\"%s\\"".format(str) else str val encodedSignature = if (signature == null || !urlEncodeSig) signature else UrlCodec.encode(signature) val params = oAuth1ParameterMap(token, clientKey, encodedSignature, nonce, timestamp) ++ extraHeaderParams val paramString = params.filter(_._2 != null).map { case (k, v) => getRandomWhitespace + k + getRandomWhitespace + "=" + getRandomWhitespace + maybeQuote(v) + getRandomWhitespace } "OAuth " + paramString.mkString(",") } def oAuth2Header(token: String) = "Bearer %s".format(token) def oAuth1QueryString(token: String, clientKey: String, signature: String, nonce: String, timestamp: String, urlEncode: Boolean) = ParamHelper.toQueryString(oAuth1ParameterMap(token, clientKey, signature, nonce, timestamp), urlEncode) def oAuth1ParameterMap( token: String, clientKey: String, signature: String, nonce: String, timestamp: String): Seq[(String, String)] = { Seq( "oauth_token" -> token, "oauth_consumer_key" -> clientKey, "oauth_signature" -> signature, "oauth_nonce" -> nonce, "oauth_timestamp" -> timestamp, "oauth_version" -> "1.0", "oauth_signature_method" -> "HMAC-SHA1" ) } def getRandomWhitespace() = " " * random.nextInt(2) def requestWithAuthHeader(header: String): MockRequest = { val request = new MockRequest() request.authHeader = header request } def oAuth1RequestInHeader(token: String, clientKey: String, signature: String, nonce: String, timestamp: String) = requestWithAuthHeader(oAuth1Header(token, clientKey, signature, nonce, timestamp, true)) def oAuth1RequestInParams(token: String, clientKey: String, signature: String, nonce: String, timestamp: String) = { val request = new MockRequest() request.queryString = oAuth1QueryString(token, clientKey, signature, nonce, timestamp, true) request } def oAuth2RequestInParams(token: String) : MockRequest = { val request = new MockRequest() request.queryString = "access_token=%s".format(token) request } def addParamsToRequestBody(request: MockRequest) = { if (request.queryString ne null) { request.body = request.queryString request.queryString = null } request.contentType = "application/x-www-form-urlencoded" request } def postRequest(request: MockRequest) = { if (request.queryString ne null) { request.body = request.queryString request.queryString = null } request.contentType = "application/x-www-form-urlencoded" request.method = "POST" request } def putRequest(request: MockRequest) = { postRequest(request) request.method = "PUT" request } def oAuth2nRequestInHeader(token: String) = requestWithAuthHeader(oAuth2Header(token)) }
twitter/joauth
src/test/scala/com/twitter/joauth/testhelpers/MockRequestFactory.scala
Scala
apache-2.0
3,876
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intel.hibench.gearpumpbench.task import com.intel.hibench.common.streaming.metrics.KafkaReporter import com.intel.hibench.gearpumpbench.util.GearpumpConfig import org.apache.gearpump.Message import org.apache.gearpump.cluster.UserConfig import org.apache.gearpump.streaming.task.{Task, TaskContext} class Identity(taskContext: TaskContext, conf: UserConfig) extends Task(taskContext, conf) { private val benchConfig = conf.getValue[GearpumpConfig](GearpumpConfig.BENCH_CONFIG).get val reporter = new KafkaReporter(benchConfig.reporterTopic, benchConfig.brokerList) override def onNext(msg: Message): Unit = { taskContext.output(msg) reporter.report(msg.timestamp, System.currentTimeMillis()) } }
kimihe/Swallow
swallow-benchmark/HiBench-master/gearpumpbench/streaming/src/main/scala/com/intel/hibench/gearpumpbench/task/Identity.scala
Scala
apache-2.0
1,528
package mesosphere.marathon package core.health import akka.Done import mesosphere.marathon.core.instance.Instance import mesosphere.marathon.state.{ AppDefinition, PathId, Timestamp } import org.apache.mesos.Protos.TaskStatus import org.slf4j.LoggerFactory import scala.concurrent.Future import scala.collection.immutable.{ Map, Seq } trait HealthCheckManager { protected[this] val log = LoggerFactory.getLogger(getClass.getName) /** * Returns the active health checks for the app with the supplied id. */ def list(appId: PathId): Set[HealthCheck] /** * Adds a health check of the supplied app. */ def add(appDefinition: AppDefinition, healthCheck: HealthCheck, instances: Seq[Instance]): Unit /** * Adds all health checks for the supplied app. */ def addAllFor(app: AppDefinition, instances: Seq[Instance]): Unit /** * Removes a health check from the app with the supplied id. */ def remove(appId: PathId, version: Timestamp, healthCheck: HealthCheck): Unit /** * Removes all health checks. */ def removeAll(): Unit /** * Removes all health checks for the app with the supplied id. */ def removeAllFor(appId: PathId): Unit /** * Reconciles active health checks with those defined for all supplied apps. */ def reconcile(apps: Seq[AppDefinition]): Future[Done] /** * Notifies this health check manager of health information received * from Mesos. */ def update(taskStatus: TaskStatus, version: Timestamp): Unit /** * Returns the health status of the supplied instance. */ def status(appId: PathId, instanceId: Instance.Id): Future[Seq[Health]] /** * Returns the health status of all instances of the supplied app. */ def statuses(appId: PathId): Future[Map[Instance.Id, Seq[Health]]] }
guenter/marathon
src/main/scala/mesosphere/marathon/core/health/HealthCheckManager.scala
Scala
apache-2.0
1,827
package com.wavesplatform.transaction import cats.instances.either._ import cats.instances.lazyList._ import cats.syntax.traverse._ import com.wavesplatform.transaction.TxValidationError.InvalidSignature import monix.eval.Coeval trait Signed extends Authorized { protected val signatureValid: Coeval[Boolean] protected val signedDescendants: Coeval[Seq[Signed]] = Coeval(Nil) val signaturesValid: Coeval[Either[InvalidSignature, this.type]] = Coeval.evalOnce { (this +: signedDescendants()) .to(LazyList) .map( entity => if (entity.signatureValid()) { Right(entity) } else { Left(InvalidSignature(entity, None)) } ) .sequence .left .map { is => if (is.entity.eq(this)) is else InvalidSignature(this, Some(is)) } .map(_ => this) } }
wavesplatform/Waves
node/src/main/scala/com/wavesplatform/transaction/Signed.scala
Scala
mit
878
package blackboard.monitor.connection abstract trait DataSource {} sealed case class PostgresqlDataSource(host: String, port: String, db: String, username: String, password: String) extends DataSource { override def equals(that: Any): Boolean = { if (that.isInstanceOf[PostgresqlDataSource]) { val dst = that.asInstanceOf[PostgresqlDataSource] host == dst.host && port == dst.port && db == dst.db } else { false } } override def hashCode: Int = { val key = "postgresql_" + host + "_" + port + "_" + db key.hashCode() } } sealed case class MongodbDataSource(uri: String) extends DataSource { override def equals(that: Any): Boolean = { if (that.isInstanceOf[MongodbDataSource]) { val dst = that.asInstanceOf[MongodbDataSource] uri == dst.uri } else { false } } override def hashCode: Int = { val key = "mongodb_" + uri key.hashCode() } } sealed case class OracleDataSource(host: String, port: String, username: String, password: String, sid: String) extends DataSource { override def equals(that: Any): Boolean = { if (that.isInstanceOf[OracleDataSource]) { val dst = that.asInstanceOf[OracleDataSource] host == dst.host && port == dst.port && sid == dst.sid && username == dst.username } else { false } } override def hashCode: Int = { val key = "oracle_" + host + "_" + port + "_" + sid + "_" + username key.hashCode() } } sealed case class RedisDataSource(host: String, port: String, password: String) extends DataSource { override def equals(that: Any): Boolean = { if (that.isInstanceOf[RedisDataSource]) { val dst = that.asInstanceOf[RedisDataSource] host == dst.host && port == dst.port } else { false } } override def hashCode: Int = { val key = "redis_" + host + "_" + port key.hashCode() } }
blackboard/monitor-bridge
src/main/scala/blackboard/monitor/connection/DataSources.scala
Scala
bsd-3-clause
1,900
object Darts { def score(x: Double, y: Double): Int = { val distance = math.sqrt(x * x + y * y) distance match { case _ if distance <= 1.0 => 10 case _ if distance <= 5.0 => 5 case _ if distance <= 10.0 => 1 case _ => 0 } } }
exercism/xscala
exercises/practice/darts/.meta/Example.scala
Scala
mit
267
/* * Copyright 2012 Twitter Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.twitter.zipkin.common import org.specs.SpecificationWithJUnit import org.specs.runner.JUnitSuiteRunner import org.junit.runner.RunWith import com.twitter.algebird.{Semigroup, Moments, Monoid} import com.twitter.util.Time import com.twitter.conversions.time._ @RunWith(classOf[JUnitSuiteRunner]) class DependenciesSpec extends SpecificationWithJUnit { "Services" should { "compare correctly" in { val s1 = Service("foo") val s2 = Service("bar") val s3 = Service("foo") val s4 = Service("Foo") val s5 = Service("FOO") s1 mustEqual s1 s1 mustEqual s3 s1 mustNotEq s2 s1 mustNotEq s4 // not sure if case sensitivity is required, but we should be aware if it changes s1 mustNotEq s5 } } "DependencyLinks" should { val m1 = Moments(2) val m2 = Moments(4) val d1 = DependencyLink(Service("tfe"), Service("mobileweb"), m1) val d2 = DependencyLink(Service("tfe"), Service("mobileweb"), m2) val d3 = DependencyLink(Service("Gizmoduck"), Service("tflock"), m2) "combine" in { Semigroup.plus(d1, d2) mustEqual d1.copy(durationMoments = Monoid.plus(m1, m2)) } "assert if incompatible links are combined" in { Semigroup.plus(d1, d3) must throwA[AssertionError] } } "Dependencies" should { val m1 = Moments(2) val m2 = Moments(4) val dl1 = DependencyLink(Service("tfe"), Service("mobileweb"), m1) val dl2 = DependencyLink(Service("tfe"), Service("mobileweb"), m2) val dl3 = DependencyLink(Service("Gizmoduck"), Service("tflock"), m2) val dl4 = DependencyLink(Service("mobileweb"), Service("Gizmoduck"), m2) val dl5 = dl1.copy(durationMoments = Monoid.plus(m1,m2)) val deps1 = Dependencies(Time.fromSeconds(0), Time.fromSeconds(0)+1.hour, List(dl1, dl3)) val deps2 = Dependencies(Time.fromSeconds(0)+1.hour, Time.fromSeconds(0)+2.hours, List(dl2, dl4)) "express identity when added to zero" in { val result = Monoid.plus(deps1, Monoid.zero[Dependencies]) result mustEqual deps1 } "combine" in { val result = Monoid.plus(deps1, deps2) result.startTime mustEqual Time.fromSeconds(0) result.endTime mustEqual Time.fromSeconds(0)+2.hours result.links must haveTheSameElementsAs(Seq(dl4, dl5, dl3)) } } }
suchang/zipkin
zipkin-common/src/test/scala/com/twitter/zipkin/common/DependenciesSpec.scala
Scala
apache-2.0
2,930
/* * Copyright 2022 HM Revenue & Customs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package iht.forms.application.gifts import iht.FakeIhtApp import iht.forms.ApplicationForms._ import iht.forms.FormTestHelper import iht.models.application.gifts.{AllGifts, PreviousYearsGifts} class GiftFormsTest extends FormTestHelper with FakeIhtApp { "giftsGivenAwayForm" must { behave like yesNoQuestion[AllGifts]("isGivenAway", giftsGivenAwayForm, _.isGivenAway, "error.giftsGivenAway.select") } "giftWithReservationFromBenefitForm" must { behave like yesNoQuestion[AllGifts]("reservation.isReservation", giftWithReservationFromBenefitForm, _.isReservation, "error.giftWithReservationFromBenefit.select") } "giftSevenYearsGivenInLast7YearsForm" must { behave like yesNoQuestion[AllGifts]("givenInPast.isGivenInLast7Years", giftSevenYearsGivenInLast7YearsForm, _.isGivenInLast7Years, "error.giftSevenYearsGivenInLast7Years.select") } "giftSevenYearsToTrustForm" must { behave like yesNoQuestion[AllGifts]("trust.isToTrust", giftSevenYearsToTrustForm, _.isToTrust, "error.giftSevenYearsToTrust.select") } "previousYearsGiftsForm.value" must { behave like currencyValue[PreviousYearsGifts]( "value", previousYearsGiftsForm ) } "previousYearsGiftsForm.exemptions" must { behave like currencyValue[PreviousYearsGifts]( "exemptions", previousYearsGiftsForm ) } "previousYearsGiftsForm" must { "display error if value < exemptions" in { val expectedErrors = error("exemptions", "error.giftsDetails.exceedsGivenAway") val data = Map("yearId" -> "1", "value" -> "10000", "exemptions" -> "11000", "startDate" -> "", "endDate" -> "") checkForError(previousYearsGiftsForm, data, expectedErrors) } } }
hmrc/iht-frontend
test/iht/forms/application/gifts/GiftFormsTest.scala
Scala
apache-2.0
2,337
import scala.io.Source case class Point(val x: Int, val y: Int) { def reflection(midPoint: Point): Point = { val rx = 2 * midPoint.x - x val ry = 2 * midPoint.y - y Point(rx, ry) } } object FindPoint extends App { val coordinates = Source.stdin.getLines().drop(1) coordinates.foreach { lines => { val ints = lines.split(" ").map(_.toInt) val point = Point(ints(0), ints(1)) val midPoint = Point(ints(2), ints(3)) val symmetricPoint = point.reflection(midPoint) println(s"${symmetricPoint.x} ${symmetricPoint.y}") } } }
PaulNoth/hackerrank
practice/mathematics/fundamentals/find_point/FindPoint.scala
Scala
mit
581
/* * Copyright 2013 - 2017 Outworkers Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.outworkers.morpheus.builder /** * The hierarchical implementation of operators is designed to account for potential variations between SQL databases. * Every specific implementation can provide it's own set of operators and string encoding for them based on the specific semantics. * * A QueryBuilder singleton will exist for every database, and every QueryBuilder will select a specific set of operators. */ trait SQLOperatorSet { val eq = "=" val lt = "<" val lte = "<=" val gt = ">" val gte = ">=" val != = "!=" val <> = "<>" val like = "LIKE" val notLike = "NOT LIKE" val in = "IN" val notIn = "NOT IN" val <=> = "<=>" val ascii = "ASCII" val bin = "BIN" val bitLength = "BIT_LENGTH" val charLength = "CHAR_LENGTH" val characterLength = "CHARACTER_LENGTH" val concat = "CONCAT" val concatWs = "CONCAT_WS" val elt = "ELT" val exportSet = "EXPORT_SET" val field = "FIELD" val findInSet = "FIND_IN_SET" val format = "FORMAT" val fromBase64 = "FROM_BASE64" val hex = "HEX" val instr = "INSTR" val lcase = "LCASE" val left = "LEFT" val loadFile = "LOAD_FILE" val locate = "LOCATE" val lower = "LOWER" val lpad = "LPAD" val ltrim = "LTRIM" val makeSet = "MAKE_SET" val `match` = "MATCH" val mid = "MID" val notRegexp = "NOT REGEXP" val oct = "OCT" val octetLength = "OCTET_LENGTH" val ord = "ORD" val position = "POSITION" val quote = "QUOTE" val regexp = "REGEXP" val repeat = "REPEAT" val replace = "REPLACE" val reverse = "REVERSE" val right = "RIGHT" val rlike = "RLIKE" val rpad = "RPAD" val rtrim = "RTRIM" val soundex = "SOUNDEX" val soundsLike = "SOUNDS LIKE" val space = "SPACE" val strcmp = "STRCMP" val substr = "SUBSTR" val substringIndex = "SUBSTRING_INDEX" val substring = "SUBSTRING" val toBase64 = "TO_BASE64" val trim = "TRIM" val ucase = "UCASE" val unhex = "UNHEX" val upper = "UPPER" val weightString = "WEIGHT_STRING" } trait AbstractSQLKeys { val primaryKey = "PRIMARY KEY" val foreignKey = "FOREIGN KEY" val uniqueKey = "UNIQUE KEY" val index = "INDEX" val notNull = "NOT NULL" val autoIncrement = "AUTO_INCREMENT" val zeroFill = "ZEROFILL" val unsigned = "UNSIGNED" val cascade = "CASCADE" val restrict = "RESTRICT" val setNull = "SET NULL" val noAction = "NO ACTION" } abstract class AbstractSQLSyntax extends AbstractSQLKeys { val into = "INTO" val values = "VALUES" val select = "SELECT" val distinct = "DISTINCT" val ignore = "IGNORE" val quick = "QUICK" val create = "CREATE" val insert = "INSERT" val ifNotExists = "IF NOT EXISTS" val temporary = "TEMPORARY" val where = "WHERE" val having = "HAVING" val update = "UPDATE" val delete = "DELETE" val orderBy = "ORDER BY" val groupBy = "GROUP BY" val limit = "LIMIT" val and = "AND" val isNull = "IS NULL" val isNotNull = "IS NOT NULL" val or = "OR" val set = "SET" val from = "FROM" val table = "TABLE" val eqs = "=" val `(` = "(" val comma = "," val `)` = ")" val asc = "ASC" val desc = "DESC" val references = "REFERENCES" val onDelete = "ON DELETE" val onUpdate = "ON UPDATE" val between = "BETWEEN" val not = "NOT" val notBetween = "NOT BETWEEN" val exists = "EXISTS" val notExists = "NOT EXISTS" val interval = "INTERVAL" val greatest = "GREATEST" val coalesce = "COALESCE" val least = "LEAST" val on = "ON" val engine = "ENGINE" val leftJoin = "LEFT JOIN" val rightJoin = "RIGHT JOIN" val innerJoin = "INNER JOIN" val outerJoin = "OUTER JOIN" } abstract class AbstractSQLDataTypes { val tinyInt = "TINYINT" val smallInt = "SMALLINT" val mediumInt = "MEDIUMINT" val bigInt = "BIGINT" val int = "INT" val short = "SHORT" val decimal = "DECIMAL" val float = "FLOAT" val double = "DOUBLE" val long = "LONG" val char = "CHAR" val varchar = "VARCHAR" val tinyText = "TINYTEXT" val text = "TEXT" val mediumText = "MEDIUMTEXT" val longText = "LONGTEXT" val binary = "BINARY" val varbinary = "VARBINARY" val tinyBlob = "TINYBLOB" val blob = "BLOB" val mediumBlob = "MEDIUMBLOB" val longBlob = "LONGBLOB" val date = "DATE" val dateTime = "DATETIME" val time = "TIME" val timestamp = "TIMESTAMP" val year = "YEAR" val enum = "ENUM" val set = "SET" } object DefaultSQLOperatorSet extends SQLOperatorSet object DefaultSQLDataTypes extends AbstractSQLDataTypes object DefaultSQLSyntax extends AbstractSQLSyntax private[morpheus] object DefaultQueryBuilder extends AbstractQueryBuilder { val syntax = DefaultSQLSyntax val operators: SQLOperatorSet = DefaultSQLOperatorSet def clauses(clauses: List[SQLBuiltQuery], sep: String = " "): SQLBuiltQuery = { SQLBuiltQuery.empty.append(clauses.map(_.queryString).mkString(sep)) } def columns(list: List[SQLBuiltQuery]): SQLBuiltQuery = { list match { case head :: tail => SQLBuiltQuery.empty.wrapEscape(list.map(_.queryString)) case Nil => SQLBuiltQuery.empty } } def values(list: List[SQLBuiltQuery]): SQLBuiltQuery = { list match { case head :: tail => SQLBuiltQuery(DefaultSQLSyntax.values).wrapn(list.map(_.queryString)) case Nil => SQLBuiltQuery.empty } } } /** * This is used to represent a syntax block where multiple operations are possible at the same point in the code. * For instance, this is used to create a select block, where up to 10 operators can follow a select statement. */ private[morpheus] trait AbstractSyntaxBlock { def syntax: AbstractSQLSyntax } /** * The AbstractQueryBuilder is designed to define the basic behaviour of an SQL query builder. * A QueryBuilder singleton will exist for every database supported by Morpheus. * * Every specific table implementation will automatically select the appropriate QueryBuilder while the user doesn't have to do anything. * Every imports package will carefully swap out the table implementation with the relevant one, so the user doesn't have to bother doing anything crazy like * using different base table implementations for different databases. */ private[morpheus] trait AbstractQueryBuilder { def operators: SQLOperatorSet def syntax: AbstractSQLSyntax def escapeValue(str: String): String = s"'$str'" def sqlEscape(str: String): String = s"`$str`" def eqs(name: String, value: String): SQLBuiltQuery = { SQLBuiltQuery(name) .forcePad.append(operators.eq) .forcePad.append(value) } def lt(name: String, value: String): SQLBuiltQuery = { SQLBuiltQuery(name).forcePad.append(operators.lt).forcePad.append(value) } def lte(name: String, value: String): SQLBuiltQuery = { SQLBuiltQuery(name).forcePad.append(operators.lte).forcePad.append(value) } def gt(name: String, value: String): SQLBuiltQuery = { SQLBuiltQuery(name).forcePad.append(operators.gt).forcePad.append(value) } def gte(name: String, value: String): SQLBuiltQuery = { SQLBuiltQuery(name).forcePad.append(operators.gte).forcePad.append(value) } def !=(name: String, value: String): SQLBuiltQuery = { SQLBuiltQuery(name).forcePad.append(operators.`!=`).forcePad.append(value) } def <>(name: String, value: String): SQLBuiltQuery = { SQLBuiltQuery(name).forcePad.append(operators.`<>`).forcePad.append(value) } def <=>(name: String, value: String): SQLBuiltQuery = { SQLBuiltQuery(name) .forcePad.append(operators.`<=>`) .forcePad.append(value) } def like(name: String, value: String): SQLBuiltQuery = { SQLBuiltQuery(name) .pad.append(operators.like) .forcePad.append(value) } def notLike(name: String, value: String): SQLBuiltQuery = { SQLBuiltQuery(name) .pad.append(operators.notLike) .forcePad.append(value) } /** * This is the "SELECT WHERE column IN (5, 10, 5)" query builder block. * It allows for any traversable of strings to be passed as an argument for the values. * * This is done to enable a dual API, "table.select.where(_.column in List(5, 10, 5))". * and alternatively "table.select.where(_.column in (10, 5, 10))" * * @param name The name of the column to match inside the "IN" operator list. * @param values The collection of values to pass as an argument to the list of values. * @return An SQL query. */ def in[M[X] <: TraversableOnce[X]](name: String, values: M[String]): SQLBuiltQuery = { SQLBuiltQuery(name).pad.append(operators.in).wrap(values) } /** * This is the "SELECT WHERE column NOT IN (5, 10, 5)" query builder block. * It allows for any traversable of strings to be passed as an argument for the values. * * This is done to enable a dual API, "table.select.where(_.column notIn List(5, 10, 5))". * and alternatively "table.select.where(_.column notIn (10, 5, 10))" * * @param name The name of the column to match inside the "NOT IN" operator list. * @param values The collection of values to pass as an argument to the list of values. * @return An SQL query. */ def notIn[M[X] <: TraversableOnce[X]](name: String, values: M[String]): SQLBuiltQuery = { SQLBuiltQuery(name).pad.append(operators.notIn).wrap(values) } def select(tableName: String): SQLBuiltQuery = { SQLBuiltQuery(syntax.select) .forcePad.append("*").forcePad .append(syntax.from) .forcePad.appendEscape(tableName) } def select(tableName: String, names: String*): SQLBuiltQuery = { SQLBuiltQuery(syntax.select) .pad.append(names, " ") .forcePad.append(syntax.from) .forcePad.appendEscape(tableName) } def select(tableName: String, clause: SQLBuiltQuery) = { SQLBuiltQuery(syntax.select) .pad.append(clause) .pad.append(syntax.from) .pad.appendEscape(tableName) } def where(qb: SQLBuiltQuery, condition: SQLBuiltQuery): SQLBuiltQuery = { qb.pad.append(syntax.where).forcePad.append(condition) } def orderBy(qb: SQLBuiltQuery, conditions: Seq[SQLBuiltQuery]): SQLBuiltQuery = { qb.pad .append(syntax.orderBy) .forcePad.append(conditions.map(_.queryString).mkString(", ")) } def groupBy(qb: SQLBuiltQuery, columns: Seq[String]): SQLBuiltQuery = { qb.pad .append(syntax.groupBy) .forcePad.append(columns.mkString(", ")) } def having(qb: SQLBuiltQuery, condition: SQLBuiltQuery): SQLBuiltQuery = { qb.pad.append(syntax.having).pad.append(condition) } def limit(qb: SQLBuiltQuery, value: String): SQLBuiltQuery = { qb.pad.append(syntax.limit) .forcePad.append(value) } def and(qb: SQLBuiltQuery, condition: SQLBuiltQuery): SQLBuiltQuery = { qb.pad .append(syntax.and) .forcePad.append(condition) } def or(qb: SQLBuiltQuery, condition: SQLBuiltQuery): SQLBuiltQuery = { qb.pad.append(syntax.or).forcePad.append(condition) } def update(tableName: String): SQLBuiltQuery = { SQLBuiltQuery(syntax.update).forcePad } def setTo(name: String, value: String): SQLBuiltQuery = { SQLBuiltQuery(name) .pad.append(operators.eq) .forcePad.append(value) } def set(qb: SQLBuiltQuery, condition: SQLBuiltQuery): SQLBuiltQuery = { qb.pad.append(syntax.set) .forcePad.append(condition) } def andSet(qb: SQLBuiltQuery, condition: SQLBuiltQuery): SQLBuiltQuery = { qb.append(syntax.comma) .forcePad.append(condition) } def asc(name: String): SQLBuiltQuery = { SQLBuiltQuery(name).forcePad.append(syntax.asc) } def desc(name: String): SQLBuiltQuery = { SQLBuiltQuery(name).forcePad.append(syntax.desc) } def insert(qb: SQLBuiltQuery, columns: List[String], values: List[String]): SQLBuiltQuery = { qb.wrapEscape(columns) .forcePad.append(syntax.values) .wrap(values) } def leftJoin(qb: SQLBuiltQuery, tableName: String): SQLBuiltQuery = { qb.pad .append(syntax.leftJoin) .forcePad.appendEscape(tableName) } def rightJoin(qb: SQLBuiltQuery, tableName: String): SQLBuiltQuery = { qb.pad .append(syntax.rightJoin) .forcePad.appendEscape(tableName) } def innerJoin(qb: SQLBuiltQuery, tableName: String): SQLBuiltQuery = { qb.pad .append(syntax.innerJoin) .forcePad.appendEscape(tableName) } def outerJoin(qb: SQLBuiltQuery, tableName: String): SQLBuiltQuery = { qb.pad .append(syntax.outerJoin) .forcePad.appendEscape(tableName) } def ifNotExists(qb: SQLBuiltQuery): SQLBuiltQuery = { qb.pad.append(syntax.ifNotExists) } def between(name: String, value: String): SQLBuiltQuery = { SQLBuiltQuery(name) .forcePad.append(syntax.between) .forcePad.append(value) } def notBetween(name: String, value: String): SQLBuiltQuery = { SQLBuiltQuery(name) .forcePad.append(syntax.notBetween) .forcePad.append(value) } def on(qb: SQLBuiltQuery, clause: SQLBuiltQuery): SQLBuiltQuery = { qb.pad.append(syntax.on).forcePad.append(clause) } def exists(select: SQLBuiltQuery): SQLBuiltQuery = { SQLBuiltQuery(syntax.exists).pad.wrap(select) } def notExists(select: SQLBuiltQuery): SQLBuiltQuery = { SQLBuiltQuery(syntax.notExists).wrap(select) } /** * This will create a "SELECT INTERVAL (x1, x2, ..)" query. * When this method is invoked, the arguments would've been already serialised and escaped. * @param values The list of parameters to which to apply the "INTERVAL" operator to. * @return An SQL query. */ def interval(values: List[String]): SQLBuiltQuery = { SQLBuiltQuery(syntax.interval).wrap(values) } /** * This will create a "SELECT LEAST (x1, x2, ..)" query. * When this method is invoked, the arguments would've been already serialised and escaped. * @param values The list of parameters to which to apply the "LEAST" operator to. * @return An SQL query. */ def least(values: List[String]): SQLBuiltQuery = { SQLBuiltQuery(syntax.least).wrap(values) } /** * This will create a "SELECT GREATEST (x1, x2, ..)" query. * When this method is invoked, the arguments would've been already serialised and escaped. * @param values The list of parameters to which to apply the "GREATEST" operator to. * @return An SQL query. */ def greatest(values: List[String]): SQLBuiltQuery = { SQLBuiltQuery(syntax.greatest).wrap(values) } /** * This will create a "SELECT COALESCE (null, null, ..)" query. * When this method is invoked, the arguments would've been already serialised and escaped. * @param values The list of parameters to which to apply the "COALESCE" operator to. * @return An SQL query. */ def coalesce(values: List[String]): SQLBuiltQuery = { SQLBuiltQuery(syntax.coalesce).wrap(values) } def ascii(value: String): SQLBuiltQuery = { SQLBuiltQuery(operators.ascii).wrap(value) } def bitLength(value: String): SQLBuiltQuery = { SQLBuiltQuery(operators.bitLength).wrap(value) } def charLength(value: String): SQLBuiltQuery = { SQLBuiltQuery(operators.charLength).wrap(value) } def characterLength(value: String): SQLBuiltQuery = { SQLBuiltQuery(operators.characterLength).wrap(value) } def concat(values: List[String]): SQLBuiltQuery = { SQLBuiltQuery(operators.concat).wrap(values) } def concatWs(values: List[String]): SQLBuiltQuery = { SQLBuiltQuery(operators.concatWs).wrap(values) } def bin(value: String): SQLBuiltQuery = { SQLBuiltQuery(operators.bin).wrap(value) } def engine(qb: SQLBuiltQuery, value: String): SQLBuiltQuery = { qb.pad.append(syntax.engine).forcePad.append(value) } def isNull(qb: SQLBuiltQuery): SQLBuiltQuery = { qb.pad.append(syntax.isNull) } def isNotNull(qb: SQLBuiltQuery): SQLBuiltQuery = { qb.pad.append(syntax.isNotNull) } }
websudos/morpheus
morpheus-dsl/src/main/scala/com/outworkers/morpheus/builder/QueryBuilder.scala
Scala
bsd-2-clause
16,474
package cromwell.engine.backend.io.filesystem.gcs import java.lang.Iterable import java.nio.file._ import java.nio.file.attribute.UserPrincipalLookupService import java.nio.file.spi.FileSystemProvider import java.util.{Collections, Set => JSet} import scala.language.postfixOps case class NotAGcsPathException(path: String) extends IllegalArgumentException(s"$path is not a valid GCS path.") object GcsFileSystem { val Separator = "/" private[io] val Protocol = "gs://" private val GsUriRegex = s"""$Protocol(.*)""".r private val AttributeViews = Collections.singleton("basic") val defaultGcsFileSystem = GcsFileSystemProvider.defaultProvider.getFileSystem def isAbsoluteGcsPath(str: String) = str match { case GsUriRegex(chunks) => true case _ => false } def apply(provider: GcsFileSystemProvider) = new GcsFileSystem(provider) } /** * Implements the java.nio.FileSystem interface for GoogleCloudStorage. */ class GcsFileSystem private(gcsFileSystemProvider: GcsFileSystemProvider) extends FileSystem { import GcsFileSystem._ override def supportedFileAttributeViews(): JSet[String] = AttributeViews override def getSeparator: String = Separator override def getRootDirectories: Iterable[Path] = Collections.emptyList[Path] override def newWatchService(): WatchService = throw new NotImplementedError("GCS FS does not support Watch Service at this time") override def getFileStores: Iterable[FileStore] = Collections.emptyList() override def isReadOnly: Boolean = false override def provider(): FileSystemProvider = gcsFileSystemProvider override def isOpen: Boolean = true override def close(): Unit = throw new UnsupportedOperationException("GCS FS cannot be closed") override def getPathMatcher(syntaxAndPattern: String): PathMatcher = FileSystems.getDefault.getPathMatcher(syntaxAndPattern) override def getUserPrincipalLookupService: UserPrincipalLookupService = throw new UnsupportedOperationException() private def buildPath(first: String, more: Seq[String], forceDirectory: Boolean) = { val directory = forceDirectory || (more.isEmpty && first.endsWith(Separator)) || more.lastOption.exists(_.endsWith(Separator)) first match { case GsUriRegex(chunks) => new NioGcsPath(chunks.split(Separator) ++ more.toArray[String], true, directory)(this) case empty if empty.isEmpty => new NioGcsPath(Array.empty[String] ++ more.toArray[String], false, false)(this) case _ => throw new NotAGcsPathException(s"$first is not a gcs path") } } override def getPath(first: String, more: String*): Path = buildPath(first, more, forceDirectory = false) def getPathAsDirectory(first: String, more: String*): Path = buildPath(first, more, forceDirectory = true) }
cowmoo/cromwell
engine/src/main/scala/cromwell/engine/backend/io/filesystem/gcs/GcsFileSystem.scala
Scala
bsd-3-clause
2,764
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql import java.text.SimpleDateFormat import java.util.{Date, Locale, Properties, UUID} import scala.collection.JavaConverters._ import org.apache.spark.annotation.InterfaceStability import org.apache.spark.sql.catalyst.TableIdentifier import org.apache.spark.sql.catalyst.analysis.{EliminateSubqueryAliases, UnresolvedRelation} import org.apache.spark.sql.catalyst.catalog._ import org.apache.spark.sql.catalyst.plans.logical.{InsertIntoTable, LogicalPlan} import org.apache.spark.sql.execution.SQLExecution import org.apache.spark.sql.execution.command.DDLUtils import org.apache.spark.sql.execution.datasources.{CreateTable, DataSource, LogicalRelation} import org.apache.spark.sql.execution.datasources.v2.WriteToDataSourceV2 import org.apache.spark.sql.sources.BaseRelation import org.apache.spark.sql.sources.v2.{DataSourceV2, DataSourceV2Options, WriteSupport} import org.apache.spark.sql.types.StructType /** * Interface used to write a [[Dataset]] to external storage systems (e.g. file systems, * key-value stores, etc). Use `Dataset.write` to access this. * * @since 1.4.0 */ @InterfaceStability.Stable final class DataFrameWriter[T] private[sql](ds: Dataset[T]) { private val df = ds.toDF() /** * Specifies the behavior when data or table already exists. Options include: * - `SaveMode.Overwrite`: overwrite the existing data. * - `SaveMode.Append`: append the data. * - `SaveMode.Ignore`: ignore the operation (i.e. no-op). * - `SaveMode.ErrorIfExists`: default option, throw an exception at runtime. * * @since 1.4.0 */ def mode(saveMode: SaveMode): DataFrameWriter[T] = { this.mode = saveMode this } /** * Specifies the behavior when data or table already exists. Options include: * - `overwrite`: overwrite the existing data. * - `append`: append the data. * - `ignore`: ignore the operation (i.e. no-op). * - `error` or `errorifexists`: default option, throw an exception at runtime. * * @since 1.4.0 */ def mode(saveMode: String): DataFrameWriter[T] = { this.mode = saveMode.toLowerCase(Locale.ROOT) match { case "overwrite" => SaveMode.Overwrite case "append" => SaveMode.Append case "ignore" => SaveMode.Ignore case "error" | "errorifexists" | "default" => SaveMode.ErrorIfExists case _ => throw new IllegalArgumentException(s"Unknown save mode: $saveMode. " + "Accepted save modes are 'overwrite', 'append', 'ignore', 'error', 'errorifexists'.") } this } /** * Specifies the underlying output data source. Built-in options include "parquet", "json", etc. * * @since 1.4.0 */ def format(source: String): DataFrameWriter[T] = { this.source = source this } /** * Adds an output option for the underlying data source. * * You can set the following option(s): * <ul> * <li>`timeZone` (default session local timezone): sets the string that indicates a timezone * to be used to format timestamps in the JSON/CSV datasources or partition values.</li> * </ul> * * @since 1.4.0 */ def option(key: String, value: String): DataFrameWriter[T] = { this.extraOptions += (key -> value) this } /** * Adds an output option for the underlying data source. * * @since 2.0.0 */ def option(key: String, value: Boolean): DataFrameWriter[T] = option(key, value.toString) /** * Adds an output option for the underlying data source. * * @since 2.0.0 */ def option(key: String, value: Long): DataFrameWriter[T] = option(key, value.toString) /** * Adds an output option for the underlying data source. * * @since 2.0.0 */ def option(key: String, value: Double): DataFrameWriter[T] = option(key, value.toString) /** * (Scala-specific) Adds output options for the underlying data source. * * You can set the following option(s): * <ul> * <li>`timeZone` (default session local timezone): sets the string that indicates a timezone * to be used to format timestamps in the JSON/CSV datasources or partition values.</li> * </ul> * * @since 1.4.0 */ def options(options: scala.collection.Map[String, String]): DataFrameWriter[T] = { this.extraOptions ++= options this } /** * Adds output options for the underlying data source. * * You can set the following option(s): * <ul> * <li>`timeZone` (default session local timezone): sets the string that indicates a timezone * to be used to format timestamps in the JSON/CSV datasources or partition values.</li> * </ul> * * @since 1.4.0 */ def options(options: java.util.Map[String, String]): DataFrameWriter[T] = { this.options(options.asScala) this } /** * Partitions the output by the given columns on the file system. If specified, the output is * laid out on the file system similar to Hive's partitioning scheme. As an example, when we * partition a dataset by year and then month, the directory layout would look like: * * - year=2016/month=01/ * - year=2016/month=02/ * * Partitioning is one of the most widely used techniques to optimize physical data layout. * It provides a coarse-grained index for skipping unnecessary data reads when queries have * predicates on the partitioned columns. In order for partitioning to work well, the number * of distinct values in each column should typically be less than tens of thousands. * * This is applicable for all file-based data sources (e.g. Parquet, JSON) staring Spark 2.1.0. * * @since 1.4.0 */ @scala.annotation.varargs def partitionBy(colNames: String*): DataFrameWriter[T] = { this.partitioningColumns = Option(colNames) this } /** * Buckets the output by the given columns. If specified, the output is laid out on the file * system similar to Hive's bucketing scheme. * * This is applicable for all file-based data sources (e.g. Parquet, JSON) staring Spark 2.1.0. * * @since 2.0 */ @scala.annotation.varargs def bucketBy(numBuckets: Int, colName: String, colNames: String*): DataFrameWriter[T] = { this.numBuckets = Option(numBuckets) this.bucketColumnNames = Option(colName +: colNames) this } /** * Sorts the output in each bucket by the given columns. * * This is applicable for all file-based data sources (e.g. Parquet, JSON) staring Spark 2.1.0. * * @since 2.0 */ @scala.annotation.varargs def sortBy(colName: String, colNames: String*): DataFrameWriter[T] = { this.sortColumnNames = Option(colName +: colNames) this } /** * Saves the content of the `DataFrame` at the specified path. * * @since 1.4.0 */ def save(path: String): Unit = { this.extraOptions += ("path" -> path) save() } /** * Saves the content of the `DataFrame` as the specified table. * * @since 1.4.0 */ def save(): Unit = { if (source.toLowerCase(Locale.ROOT) == DDLUtils.HIVE_PROVIDER) { throw new AnalysisException("Hive data source can only be used with tables, you can not " + "write files of Hive data source directly.") } assertNotBucketed("save") val cls = DataSource.lookupDataSource(source, df.sparkSession.sessionState.conf) if (classOf[DataSourceV2].isAssignableFrom(cls)) { cls.newInstance() match { case ds: WriteSupport => val options = new DataSourceV2Options(extraOptions.asJava) // Using a timestamp and a random UUID to distinguish different writing jobs. This is good // enough as there won't be tons of writing jobs created at the same second. val jobId = new SimpleDateFormat("yyyyMMddHHmmss", Locale.US) .format(new Date()) + "-" + UUID.randomUUID() val writer = ds.createWriter(jobId, df.logicalPlan.schema, mode, options) if (writer.isPresent) { runCommand(df.sparkSession, "save") { WriteToDataSourceV2(writer.get(), df.logicalPlan) } } case _ => throw new AnalysisException(s"$cls does not support data writing.") } } else { // Code path for data source v1. runCommand(df.sparkSession, "save") { DataSource( sparkSession = df.sparkSession, className = source, partitionColumns = partitioningColumns.getOrElse(Nil), options = extraOptions.toMap).planForWriting(mode, df.logicalPlan) } } } /** * Inserts the content of the `DataFrame` to the specified table. It requires that * the schema of the `DataFrame` is the same as the schema of the table. * * @note Unlike `saveAsTable`, `insertInto` ignores the column names and just uses position-based * resolution. For example: * * {{{ * scala> Seq((1, 2)).toDF("i", "j").write.mode("overwrite").saveAsTable("t1") * scala> Seq((3, 4)).toDF("j", "i").write.insertInto("t1") * scala> Seq((5, 6)).toDF("a", "b").write.insertInto("t1") * scala> sql("select * from t1").show * +---+---+ * | i| j| * +---+---+ * | 5| 6| * | 3| 4| * | 1| 2| * +---+---+ * }}} * * Because it inserts data to an existing table, format or options will be ignored. * * @since 1.4.0 */ def insertInto(tableName: String): Unit = { insertInto(df.sparkSession.sessionState.sqlParser.parseTableIdentifier(tableName)) } private def insertInto(tableIdent: TableIdentifier): Unit = { assertNotBucketed("insertInto") if (partitioningColumns.isDefined) { throw new AnalysisException( "insertInto() can't be used together with partitionBy(). " + "Partition columns have already be defined for the table. " + "It is not necessary to use partitionBy()." ) } runCommand(df.sparkSession, "insertInto") { InsertIntoTable( table = UnresolvedRelation(tableIdent), partition = Map.empty[String, Option[String]], query = df.logicalPlan, overwrite = mode == SaveMode.Overwrite, ifPartitionNotExists = false) } } private def getBucketSpec: Option[BucketSpec] = { if (sortColumnNames.isDefined) { require(numBuckets.isDefined, "sortBy must be used together with bucketBy") } numBuckets.map { n => BucketSpec(n, bucketColumnNames.get, sortColumnNames.getOrElse(Nil)) } } private def assertNotBucketed(operation: String): Unit = { if (numBuckets.isDefined || sortColumnNames.isDefined) { throw new AnalysisException(s"'$operation' does not support bucketing right now") } } private def assertNotPartitioned(operation: String): Unit = { if (partitioningColumns.isDefined) { throw new AnalysisException( s"'$operation' does not support partitioning") } } /** * Saves the content of the `DataFrame` as the specified table. * * In the case the table already exists, behavior of this function depends on the * save mode, specified by the `mode` function (default to throwing an exception). * When `mode` is `Overwrite`, the schema of the `DataFrame` does not need to be * the same as that of the existing table. * * When `mode` is `Append`, if there is an existing table, we will use the format and options of * the existing table. The column order in the schema of the `DataFrame` doesn't need to be same * as that of the existing table. Unlike `insertInto`, `saveAsTable` will use the column names to * find the correct column positions. For example: * * {{{ * scala> Seq((1, 2)).toDF("i", "j").write.mode("overwrite").saveAsTable("t1") * scala> Seq((3, 4)).toDF("j", "i").write.mode("append").saveAsTable("t1") * scala> sql("select * from t1").show * +---+---+ * | i| j| * +---+---+ * | 1| 2| * | 4| 3| * +---+---+ * }}} * * In this method, save mode is used to determine the behavior if the data source table exists in * Spark catalog. We will always overwrite the underlying data of data source (e.g. a table in * JDBC data source) if the table doesn't exist in Spark catalog, and will always append to the * underlying data of data source if the table already exists. * * When the DataFrame is created from a non-partitioned `HadoopFsRelation` with a single input * path, and the data source provider can be mapped to an existing Hive builtin SerDe (i.e. ORC * and Parquet), the table is persisted in a Hive compatible format, which means other systems * like Hive will be able to read this table. Otherwise, the table is persisted in a Spark SQL * specific format. * * @since 1.4.0 */ def saveAsTable(tableName: String): Unit = { saveAsTable(df.sparkSession.sessionState.sqlParser.parseTableIdentifier(tableName)) } private def saveAsTable(tableIdent: TableIdentifier): Unit = { val catalog = df.sparkSession.sessionState.catalog val tableExists = catalog.tableExists(tableIdent) val db = tableIdent.database.getOrElse(catalog.getCurrentDatabase) val tableIdentWithDB = tableIdent.copy(database = Some(db)) val tableName = tableIdentWithDB.unquotedString (tableExists, mode) match { case (true, SaveMode.Ignore) => // Do nothing case (true, SaveMode.ErrorIfExists) => throw new AnalysisException(s"Table $tableIdent already exists.") case (true, SaveMode.Overwrite) => // Get all input data source or hive relations of the query. val srcRelations = df.logicalPlan.collect { case LogicalRelation(src: BaseRelation, _, _, _) => src case relation: HiveTableRelation => relation.tableMeta.identifier } val tableRelation = df.sparkSession.table(tableIdentWithDB).queryExecution.analyzed EliminateSubqueryAliases(tableRelation) match { // check if the table is a data source table (the relation is a BaseRelation). case LogicalRelation(dest: BaseRelation, _, _, _) if srcRelations.contains(dest) => throw new AnalysisException( s"Cannot overwrite table $tableName that is also being read from") // check hive table relation when overwrite mode case relation: HiveTableRelation if srcRelations.contains(relation.tableMeta.identifier) => throw new AnalysisException( s"Cannot overwrite table $tableName that is also being read from") case _ => // OK } // Drop the existing table catalog.dropTable(tableIdentWithDB, ignoreIfNotExists = true, purge = false) createTable(tableIdentWithDB) // Refresh the cache of the table in the catalog. catalog.refreshTable(tableIdentWithDB) case _ => createTable(tableIdent) } } private def createTable(tableIdent: TableIdentifier): Unit = { val storage = DataSource.buildStorageFormatFromOptions(extraOptions.toMap) val tableType = if (storage.locationUri.isDefined) { CatalogTableType.EXTERNAL } else { CatalogTableType.MANAGED } val tableDesc = CatalogTable( identifier = tableIdent, tableType = tableType, storage = storage, schema = new StructType, provider = Some(source), partitionColumnNames = partitioningColumns.getOrElse(Nil), bucketSpec = getBucketSpec) runCommand(df.sparkSession, "saveAsTable")(CreateTable(tableDesc, mode, Some(df.logicalPlan))) } /** * Saves the content of the `DataFrame` to an external database table via JDBC. In the case the * table already exists in the external database, behavior of this function depends on the * save mode, specified by the `mode` function (default to throwing an exception). * * Don't create too many partitions in parallel on a large cluster; otherwise Spark might crash * your external database systems. * * You can set the following JDBC-specific option(s) for storing JDBC: * <ul> * <li>`truncate` (default `false`): use `TRUNCATE TABLE` instead of `DROP TABLE`.</li> * </ul> * * In case of failures, users should turn off `truncate` option to use `DROP TABLE` again. Also, * due to the different behavior of `TRUNCATE TABLE` among DBMS, it's not always safe to use this. * MySQLDialect, DB2Dialect, MsSqlServerDialect, DerbyDialect, and OracleDialect supports this * while PostgresDialect and default JDBCDirect doesn't. For unknown and unsupported JDBCDirect, * the user option `truncate` is ignored. * * @param url JDBC database url of the form `jdbc:subprotocol:subname` * @param table Name of the table in the external database. * @param connectionProperties JDBC database connection arguments, a list of arbitrary string * tag/value. Normally at least a "user" and "password" property * should be included. "batchsize" can be used to control the * number of rows per insert. "isolationLevel" can be one of * "NONE", "READ_COMMITTED", "READ_UNCOMMITTED", "REPEATABLE_READ", * or "SERIALIZABLE", corresponding to standard transaction * isolation levels defined by JDBC's Connection object, with default * of "READ_UNCOMMITTED". * @since 1.4.0 */ def jdbc(url: String, table: String, connectionProperties: Properties): Unit = { assertNotPartitioned("jdbc") assertNotBucketed("jdbc") // connectionProperties should override settings in extraOptions. this.extraOptions ++= connectionProperties.asScala // explicit url and dbtable should override all this.extraOptions += ("url" -> url, "dbtable" -> table) format("jdbc").save() } /** * Saves the content of the `DataFrame` in JSON format (<a href="http://jsonlines.org/"> * JSON Lines text format or newline-delimited JSON</a>) at the specified path. * This is equivalent to: * {{{ * format("json").save(path) * }}} * * You can set the following JSON-specific option(s) for writing JSON files: * <ul> * <li>`compression` (default `null`): compression codec to use when saving to file. This can be * one of the known case-insensitive shorten names (`none`, `bzip2`, `gzip`, `lz4`, * `snappy` and `deflate`). </li> * <li>`dateFormat` (default `yyyy-MM-dd`): sets the string that indicates a date format. * Custom date formats follow the formats at `java.text.SimpleDateFormat`. This applies to * date type.</li> * <li>`timestampFormat` (default `yyyy-MM-dd'T'HH:mm:ss.SSSXXX`): sets the string that * indicates a timestamp format. Custom date formats follow the formats at * `java.text.SimpleDateFormat`. This applies to timestamp type.</li> * </ul> * * @since 1.4.0 */ def json(path: String): Unit = { format("json").save(path) } /** * Saves the content of the `DataFrame` in Parquet format at the specified path. * This is equivalent to: * {{{ * format("parquet").save(path) * }}} * * You can set the following Parquet-specific option(s) for writing Parquet files: * <ul> * <li>`compression` (default is the value specified in `spark.sql.parquet.compression.codec`): * compression codec to use when saving to file. This can be one of the known case-insensitive * shorten names(`none`, `snappy`, `gzip`, and `lzo`). This will override * `spark.sql.parquet.compression.codec`.</li> * </ul> * * @since 1.4.0 */ def parquet(path: String): Unit = { format("parquet").save(path) } /** * Saves the content of the `DataFrame` in ORC format at the specified path. * This is equivalent to: * {{{ * format("orc").save(path) * }}} * * You can set the following ORC-specific option(s) for writing ORC files: * <ul> * <li>`compression` (default is the value specified in `spark.sql.orc.compression.codec`): * compression codec to use when saving to file. This can be one of the known case-insensitive * shorten names(`none`, `snappy`, `zlib`, and `lzo`). This will override * `orc.compress` and `spark.sql.orc.compression.codec`. If `orc.compress` is given, * it overrides `spark.sql.orc.compression.codec`.</li> * </ul> * * @since 1.5.0 * @note Currently, this method can only be used after enabling Hive support */ def orc(path: String): Unit = { format("orc").save(path) } /** * Saves the content of the `DataFrame` in a text file at the specified path. * The DataFrame must have only one column that is of string type. * Each row becomes a new line in the output file. For example: * {{{ * // Scala: * df.write.text("/path/to/output") * * // Java: * df.write().text("/path/to/output") * }}} * * You can set the following option(s) for writing text files: * <ul> * <li>`compression` (default `null`): compression codec to use when saving to file. This can be * one of the known case-insensitive shorten names (`none`, `bzip2`, `gzip`, `lz4`, * `snappy` and `deflate`). </li> * </ul> * * @since 1.6.0 */ def text(path: String): Unit = { format("text").save(path) } /** * Saves the content of the `DataFrame` in CSV format at the specified path. * This is equivalent to: * {{{ * format("csv").save(path) * }}} * * You can set the following CSV-specific option(s) for writing CSV files: * <ul> * <li>`sep` (default `,`): sets the single character as a separator for each * field and value.</li> * <li>`quote` (default `"`): sets the single character used for escaping quoted values where * the separator can be part of the value. If an empty string is set, it uses `u0000` * (null character).</li> * <li>`escape` (default `\\`): sets the single character used for escaping quotes inside * an already quoted value.</li> * <li>`escapeQuotes` (default `true`): a flag indicating whether values containing * quotes should always be enclosed in quotes. Default is to escape all values containing * a quote character.</li> * <li>`quoteAll` (default `false`): a flag indicating whether all values should always be * enclosed in quotes. Default is to only escape values containing a quote character.</li> * <li>`header` (default `false`): writes the names of columns as the first line.</li> * <li>`nullValue` (default empty string): sets the string representation of a null value.</li> * <li>`compression` (default `null`): compression codec to use when saving to file. This can be * one of the known case-insensitive shorten names (`none`, `bzip2`, `gzip`, `lz4`, * `snappy` and `deflate`). </li> * <li>`dateFormat` (default `yyyy-MM-dd`): sets the string that indicates a date format. * Custom date formats follow the formats at `java.text.SimpleDateFormat`. This applies to * date type.</li> * <li>`timestampFormat` (default `yyyy-MM-dd'T'HH:mm:ss.SSSXXX`): sets the string that * indicates a timestamp format. Custom date formats follow the formats at * `java.text.SimpleDateFormat`. This applies to timestamp type.</li> * <li>`ignoreLeadingWhiteSpace` (default `true`): a flag indicating whether or not leading * whitespaces from values being written should be skipped.</li> * <li>`ignoreTrailingWhiteSpace` (default `true`): a flag indicating defines whether or not * trailing whitespaces from values being written should be skipped.</li> * </ul> * * @since 2.0.0 */ def csv(path: String): Unit = { format("csv").save(path) } /** * Wrap a DataFrameWriter action to track the QueryExecution and time cost, then report to the * user-registered callback functions. */ private def runCommand(session: SparkSession, name: String)(command: LogicalPlan): Unit = { val qe = session.sessionState.executePlan(command) try { val start = System.nanoTime() // call `QueryExecution.toRDD` to trigger the execution of commands. SQLExecution.withNewExecutionId(session, qe)(qe.toRdd) val end = System.nanoTime() session.listenerManager.onSuccess(name, qe, end - start) } catch { case e: Exception => session.listenerManager.onFailure(name, qe, e) throw e } } /////////////////////////////////////////////////////////////////////////////////////// // Builder pattern config options /////////////////////////////////////////////////////////////////////////////////////// private var source: String = df.sparkSession.sessionState.conf.defaultDataSourceName private var mode: SaveMode = SaveMode.ErrorIfExists private val extraOptions = new scala.collection.mutable.HashMap[String, String] private var partitioningColumns: Option[Seq[String]] = None private var bucketColumnNames: Option[Seq[String]] = None private var numBuckets: Option[Int] = None private var sortColumnNames: Option[Seq[String]] = None }
ron8hu/spark
sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala
Scala
apache-2.0
25,941
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.execution import java.util.concurrent.ConcurrentHashMap import java.util.concurrent.atomic.AtomicLong import org.apache.spark.SparkContext import org.apache.spark.sql.SparkSession import org.apache.spark.sql.execution.ui.{SparkListenerSQLExecutionEnd, SparkListenerSQLExecutionStart} object SQLExecution { val EXECUTION_ID_KEY = "spark.sql.execution.id" private val _nextExecutionId = new AtomicLong(0) private def nextExecutionId: Long = _nextExecutionId.getAndIncrement private val executionIdToQueryExecution = new ConcurrentHashMap[Long, QueryExecution]() def getQueryExecution(executionId: Long): QueryExecution = { executionIdToQueryExecution.get(executionId) } private val testing = sys.props.contains("spark.testing") private[sql] def checkSQLExecutionId(sparkSession: SparkSession): Unit = { // only throw an exception during tests. a missing execution ID should not fail a job. if (testing && sparkSession.sparkContext.getLocalProperty(EXECUTION_ID_KEY) == null) { // Attention testers: when a test fails with this exception, it means that the action that // started execution of a query didn't call withNewExecutionId. The execution ID should be // set by calling withNewExecutionId in the action that begins execution, like // Dataset.collect or DataFrameWriter.insertInto. throw new IllegalStateException("Execution ID should be set") } } /** * Wrap an action that will execute "queryExecution" to track all Spark jobs in the body so that * we can connect them with an execution. */ def withNewExecutionId[T]( sparkSession: SparkSession, queryExecution: QueryExecution)(body: => T): T = { val sc = sparkSession.sparkContext val oldExecutionId = sc.getLocalProperty(EXECUTION_ID_KEY) if (oldExecutionId == null) { val executionId = SQLExecution.nextExecutionId sc.setLocalProperty(EXECUTION_ID_KEY, executionId.toString) executionIdToQueryExecution.put(executionId, queryExecution) val r = try { // sparkContext.getCallSite() would first try to pick up any call site that was previously // set, then fall back to Utils.getCallSite(); call Utils.getCallSite() directly on // streaming queries would give us call site like "run at <unknown>:0" val callSite = sparkSession.sparkContext.getCallSite() sparkSession.sparkContext.listenerBus.post(SparkListenerSQLExecutionStart( executionId, callSite.shortForm, callSite.longForm, queryExecution.toString, SparkPlanInfo.fromSparkPlan(queryExecution.executedPlan), System.currentTimeMillis())) try { body } finally { sparkSession.sparkContext.listenerBus.post(SparkListenerSQLExecutionEnd( executionId, System.currentTimeMillis())) } } finally { executionIdToQueryExecution.remove(executionId) sc.setLocalProperty(EXECUTION_ID_KEY, null) } r } else { // Don't support nested `withNewExecutionId`. This is an example of the nested // `withNewExecutionId`: // // class DataFrame { // def foo: T = withNewExecutionId { something.createNewDataFrame().collect() } // } // // Note: `collect` will call withNewExecutionId // In this case, only the "executedPlan" for "collect" will be executed. The "executedPlan" // for the outer DataFrame won't be executed. So it's meaningless to create a new Execution // for the outer DataFrame. Even if we track it, since its "executedPlan" doesn't run, // all accumulator metrics will be 0. It will confuse people if we show them in Web UI. // // A real case is the `DataFrame.count` method. throw new IllegalArgumentException(s"$EXECUTION_ID_KEY is already set") } } /** * Wrap an action with a known executionId. When running a different action in a different * thread from the original one, this method can be used to connect the Spark jobs in this action * with the known executionId, e.g., `BroadcastHashJoin.broadcastFuture`. */ def withExecutionId[T](sc: SparkContext, executionId: String)(body: => T): T = { val oldExecutionId = sc.getLocalProperty(SQLExecution.EXECUTION_ID_KEY) try { sc.setLocalProperty(SQLExecution.EXECUTION_ID_KEY, executionId) body } finally { sc.setLocalProperty(SQLExecution.EXECUTION_ID_KEY, oldExecutionId) } } }
bOOm-X/spark
sql/core/src/main/scala/org/apache/spark/sql/execution/SQLExecution.scala
Scala
apache-2.0
5,310
package com.featurefm.riversong.health import com.featurefm.riversong.health.HealthState.HealthState import org.joda.time.DateTime import HealthState._ case class InternalContainerHealth(applicationName: String, time: DateTime, state: HealthState, details: String, checks: Seq[NamedHealthInfo]) {} object ContainerHealth { def apply(applicationName: String, time: DateTime, state: HealthState, details: String, checks: Seq[(String,HealthInfo)] = Seq.empty): InternalContainerHealth = InternalContainerHealth ( applicationName, time, state, details, checks = for ((n,i) <- checks) yield NamedHealthInfo(n, i) ) } private [health] case class NamedHealthInfo(name: String, state: HealthState = OK, details: String, extra: Option[AnyRef] = None, checks: Seq[NamedHealthInfo] = List.empty) private [health] object NamedHealthInfo { def apply(name: String, info: HealthInfo): NamedHealthInfo = NamedHealthInfo( name, info.state, info.details, info.extra, checks = for ((n,i) <- info.checks) yield NamedHealthInfo(n, i) ) }
ListnPlay/RiverSong
src/main/scala/com/featurefm/riversong/health/ContainerHealth.scala
Scala
mit
1,311
import quoted.* import scala.quoted.staging.* object Test { given Compiler = Compiler.make(getClass.getClassLoader) def main(args: Array[String]): Unit = withQuotes { val q = '{ (q: Quotes) ?=> '{3} } println(q.show) } }
dotty-staging/dotty
tests/run-staging/quote-nested-1.scala
Scala
apache-2.0
236
/* * Copyright 2001-2013 Artima, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.scalactic import org.scalatest._ class DecidersSpec extends FunSpec with Explicitly with StringNormalizations { describe("The 'decided by' syntax") { it("should enable users to explicitly choose an Equality for a === use") { assert(3 === 3) assert(3 !== 4) implicit val e = new Equality[Int] { def areEqual(a: Int, b: Any): Boolean = a != b } assert(3 !== 3) assert(3 === 4) // And now with "decided by" to go back to defaultEquality assert((3 === 3) (decided by defaultEquality)) assert(!(3 === 4) (decided by defaultEquality)) assert((3 !== 4) (decided by defaultEquality)) assert(!(3 !== 3) (decided by defaultEquality)) } } describe("The 'after being' syntax") { it("should enable users to explicitly choose a Normalization for a === use") { assert("hello" !== "HELLO") assert(("hello" === "HELLo") (after being lowerCased)) assert("HELLO" !== "hello") assert(("HELLO" === "hello") (after being lowerCased)) assert("HeLlO" !== "hElLo") assert(("HeLlO" === "hElLo") (after being lowerCased)) assert(("hello" !== "Helloooo") (after being lowerCased)) } it("should enable users to explicitly build a Normalization for a === use by composing with 'and', with or without parens") { assert("hello" !== "HELLO") assert(("hello" === " HELLo ") (after being (lowerCased and trimmed))) assert("HELLO" !== "hello") assert((" HELLO " === "hello") (after being (lowerCased and trimmed))) assert(" HeLlO" !== "HeLlO\\n") assert((" HeLlO" === "HeLlO\\n") (after being (lowerCased and trimmed))) assert("hello" !== "HELLO") assert(("hello" === " HELLo ") (after being lowerCased and trimmed)) assert("HELLO" !== "hello") assert((" HELLO " === "hello") (after being lowerCased and trimmed)) assert(" HeLlO" !== "HeLlO\\n") assert((" HeLlO" === "HeLlO\\n") (after being lowerCased and trimmed)) } it("should enable users to explicitly specify an equality and one or more normalizations") { implicit val e = new Equality[String] { def areEqual(a: String, b: Any): Boolean = a != b } assert(!("hello" === "HELLo") (after being lowerCased)) assert(("hello" === "HELLo") (decided by defaultEquality afterBeing lowerCased)) assert(!("HELLO" === "hello") (after being lowerCased)) assert(("HELLO" === "hello") (decided by defaultEquality afterBeing lowerCased)) assert(!("HeLlO" === "hElLo") (after being lowerCased)) assert(("HeLlO" === "hElLo") (decided by defaultEquality afterBeing lowerCased)) assert(!("hello" !== "Helloooo") (after being lowerCased)) assert(("hello" !== "Helloooo") (decided by defaultEquality afterBeing lowerCased)) assert(!("hello" === " HELLo ") (after being (lowerCased and trimmed))) assert(("hello" === " HELLo ") (decided by defaultEquality afterBeing (lowerCased and trimmed))) assert(!(" HELLO " === "hello") (after being (lowerCased and trimmed))) assert((" HELLO " === "hello") (decided by defaultEquality afterBeing (lowerCased and trimmed))) assert(!(" HeLlO" === "HeLlO\\n") (after being (lowerCased and trimmed))) assert((" HeLlO" === "HeLlO\\n") (decided by defaultEquality afterBeing (lowerCased and trimmed))) assert(!("hello" === " HELLo ") (after being lowerCased and trimmed)) assert(("hello" === " HELLo ") (decided by defaultEquality afterBeing lowerCased and trimmed)) assert(!(" HELLO " === "hello") (after being lowerCased and trimmed)) assert((" HELLO " === "hello") (decided by defaultEquality afterBeing lowerCased and trimmed)) assert(!(" HeLlO" === "HeLlO\\n") (after being lowerCased and trimmed)) assert((" HeLlO" === "HeLlO\\n") (decided by defaultEquality afterBeing lowerCased and trimmed)) } } }
cheeseng/scalatest
scalactic-test/src/test/scala/org/scalactic/DecidersSpec.scala
Scala
apache-2.0
4,561
package examples import models.dataframe.DataFrame object ExampleTitanic { // Data from https://www.kaggle.com/c/titanic/data import models.dataframe._ // Reading a plain text file with passenger data (tab separated) val passenger_data: DataFrame = DataFrame("data/plain_text/input/passenger.tsv") // Filter the rows corresponding to male passengers (SQL filter) val male_passengers: DataFrame = passenger_data.filter("Sex", "male") // Export the filtered data male_passengers.toText("data/plain_text/output/passenger_male.tsv") // generate a new dataTable with only 3 fields of the original table (SQL projection) val passenger_data_projected: DataFrame = passenger_data.project("Age", "PassengerId", "Sex") passenger_data_projected.toText("data/plain_text/output/passenger_projected.tsv") //Reading a plain text file with survival data (tab separated) val gender_model_data: DataFrame = DataFrame("data/plain_text/input/gendermodel.tsv") // Join the passenger data with survival data // by PasssengerId // We perform a inner join (SQL inner join) only rows with same value by join field val innerjoin_data = passenger_data.join(gender_model_data, "PassengerId", "PassengerId2") innerjoin_data.toText("data/plain_text/output/passenger_inner_join_survival.tsv") // Join the passenger data with survival data // by PasssengerId // We perform a left join (SQL inner join) all rows in left dataFrame independently if they are in the right dataFrame val outer_join_data = passenger_data.join_left(gender_model_data, "PassengerId", "PassengerId2") outer_join_data.toText("data/plain_text/output/passenger_outer_join_survival.tsv") } object ExampleDrugBank { // Data from http://www.drugbank.ca/downloads#structures import models.dataframe._ // Reading approved drugs in DrugBank (using RDKit libraries) val approved_drugs: DataFrame = DataFrame("data/drugbank/input/approved.sdf") // We compute MW and LogP for the approved drugs in DrugBank val aproved_drugs_mw_logp = approved_drugs .addMW .addLogP //We export the DataFrame with new fields // to SDF aproved_drugs_mw_logp.toSDF("data/drugbank/output/approved_mw_logp.sdf") // to TSV aproved_drugs_mw_logp.toText("data/drugbank/output/approved_mw_logp.tsv") }
OriolLopezMassaguer/DataFrame
src/main/scala/examples/Examples.scala
Scala
gpl-3.0
2,308
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package spark import org.scalatest.Suite import org.scalatest.BeforeAndAfterAll /** Shares a local `SparkContext` between all tests in a suite and closes it at the end */ trait SharedSparkContext extends BeforeAndAfterAll { self: Suite => @transient private var _sc: SparkContext = _ def sc: SparkContext = _sc override def beforeAll() { _sc = new SparkContext("local", "test") super.beforeAll() } override def afterAll() { if (_sc != null) { LocalSparkContext.stop(_sc) _sc = null } super.afterAll() } }
wgpshashank/spark
core/src/test/scala/spark/SharedSparkContext.scala
Scala
apache-2.0
1,357
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.expressions import java.sql.{Date, Time, Timestamp} import org.apache.flink.api.common.typeinfo.BasicTypeInfo._ import org.apache.flink.api.common.typeinfo.TypeInformation import org.apache.flink.api.java.typeutils.RowTypeInfo import org.apache.flink.types.Row import org.apache.flink.table.api.{Types, ValidationException} import org.apache.flink.table.api.java.utils.UserDefinedScalarFunctions.{JavaFunc0, JavaFunc1, JavaFunc2, JavaFunc3} import org.apache.flink.table.api.scala._ import org.apache.flink.table.expressions.utils._ import org.apache.flink.table.functions.ScalarFunction import org.junit.Test class UserDefinedScalarFunctionTest extends ExpressionTestBase { @Test def testParameters(): Unit = { testAllApis( Func0('f0), "Func0(f0)", "Func0(f0)", "42") testAllApis( Func1('f0), "Func1(f0)", "Func1(f0)", "43") testAllApis( Func2('f0, 'f1, 'f3), "Func2(f0, f1, f3)", "Func2(f0, f1, f3)", "42 and Test and SimplePojo(Bob,36)") testAllApis( Func0(123), "Func0(123)", "Func0(123)", "123") testAllApis( Func6('f4, 'f5, 'f6), "Func6(f4, f5, f6)", "Func6(f4, f5, f6)", "(1990-10-14,12:10:10,1990-10-14 12:10:10.0)") } @Test def testNullableParameters(): Unit = { testAllApis( Func3(Null(INT_TYPE_INFO), Null(STRING_TYPE_INFO)), "Func3(Null(INT), Null(STRING))", "Func3(NULL, NULL)", "null and null") testAllApis( Func3(Null(INT_TYPE_INFO), "Test"), "Func3(Null(INT), 'Test')", "Func3(NULL, 'Test')", "null and Test") testAllApis( Func3(42, Null(STRING_TYPE_INFO)), "Func3(42, Null(STRING))", "Func3(42, NULL)", "42 and null") testAllApis( Func0(Null(INT_TYPE_INFO)), "Func0(Null(INT))", "Func0(NULL)", "-1") } @Test def testResults(): Unit = { testAllApis( Func4(), "Func4()", "Func4()", "null") testAllApis( Func5(), "Func5()", "Func5()", "-1") } @Test def testNesting(): Unit = { testAllApis( Func0(Func0('f0)), "Func0(Func0(f0))", "Func0(Func0(f0))", "42") testAllApis( Func0(Func0('f0)), "Func0(Func0(f0))", "Func0(Func0(f0))", "42") testAllApis( Func7(Func7(Func7(1, 1), Func7(1, 1)), Func7(Func7(1, 1), Func7(1, 1))), "Func7(Func7(Func7(1, 1), Func7(1, 1)), Func7(Func7(1, 1), Func7(1, 1)))", "Func7(Func7(Func7(1, 1), Func7(1, 1)), Func7(Func7(1, 1), Func7(1, 1)))", "8") } @Test def testOverloadedParameters(): Unit = { testAllApis( Func8(1), "Func8(1)", "Func8(1)", "a") testAllApis( Func8(1, 1), "Func8(1, 1)", "Func8(1, 1)", "b") testAllApis( Func8("a", "a"), "Func8('a', 'a')", "Func8('a', 'a')", "c") } @Test def testTimePointsOnPrimitives(): Unit = { testAllApis( Func9('f4, 'f5, 'f6), "Func9(f4, f5, f6)", "Func9(f4, f5, f6)", "7591 and 43810000 and 655906210000") testAllApis( Func10('f6), "Func10(f6)", "Func10(f6)", "1990-10-14 12:10:10.0") } @Test def testTimeIntervalsOnPrimitives(): Unit = { testAllApis( Func11('f7, 'f8), "Func11(f7, f8)", "Func11(f7, f8)", "12 and 1000") testAllApis( Func12('f8), "Func12(f8)", "Func12(f8)", "+0 00:00:01.000") } @Test def testVariableArgs(): Unit = { testAllApis( Func14(1, 2, 3, 4), "Func14(1, 2, 3, 4)", "Func14(1, 2, 3, 4)", "10") // Test for empty arguments testAllApis( Func14(), "Func14()", "Func14()", "0") // Test for override testAllApis( Func15("Hello"), "Func15('Hello')", "Func15('Hello')", "Hello" ) testAllApis( Func15('f1), "Func15(f1)", "Func15(f1)", "Test" ) testAllApis( Func15("Hello", 1, 2, 3), "Func15('Hello', 1, 2, 3)", "Func15('Hello', 1, 2, 3)", "Hello3" ) testAllApis( Func16('f9), "Func16(f9)", "Func16(f9)", "Hello, World" ) try { testAllApis( Func17("Hello", "World"), "Func17('Hello', 'World')", "Func17('Hello', 'World')", "Hello, World" ) throw new RuntimeException("Shouldn't be reached here!") } catch { case ex: ValidationException => // ok } val JavaFunc2 = new JavaFunc2 testAllApis( JavaFunc2("Hi", 1, 3, 5, 7), "JavaFunc2('Hi', 1, 3, 5, 7)", "JavaFunc2('Hi', 1, 3, 5, 7)", "Hi105") // test overloading val JavaFunc3 = new JavaFunc3 testAllApis( JavaFunc3("Hi"), "JavaFunc3('Hi')", "JavaFunc3('Hi')", "Hi") testAllApis( JavaFunc3('f1), "JavaFunc3(f1)", "JavaFunc3(f1)", "Test") } @Test def testJavaBoxedPrimitives(): Unit = { val JavaFunc0 = new JavaFunc0() val JavaFunc1 = new JavaFunc1() testAllApis( JavaFunc0('f8), "JavaFunc0(f8)", "JavaFunc0(f8)", "1001" ) testTableApi( JavaFunc0(1000L), "JavaFunc0(1000L)", "1001" ) testAllApis( JavaFunc1('f4, 'f5, 'f6), "JavaFunc1(f4, f5, f6)", "JavaFunc1(f4, f5, f6)", "7591 and 43810000 and 655906210000") testAllApis( JavaFunc1(Null(Types.TIME), 15, Null(Types.TIMESTAMP)), "JavaFunc1(Null(TIME), 15, Null(TIMESTAMP))", "JavaFunc1(NULL, 15, NULL)", "null and 15 and null") } @Test def testRichFunctions(): Unit = { val richFunc0 = new RichFunc0 val richFunc1 = new RichFunc1 val richFunc2 = new RichFunc2 testAllApis( richFunc0('f0), "RichFunc0(f0)", "RichFunc0(f0)", "43") testAllApis( richFunc1('f0), "RichFunc1(f0)", "RichFunc1(f0)", "42") testAllApis( richFunc2('f1), "RichFunc2(f1)", "RichFunc2(f1)", "#Test") } // ---------------------------------------------------------------------------------------------- override def testData: Any = { val testData = new Row(10) testData.setField(0, 42) testData.setField(1, "Test") testData.setField(2, null) testData.setField(3, SimplePojo("Bob", 36)) testData.setField(4, Date.valueOf("1990-10-14")) testData.setField(5, Time.valueOf("12:10:10")) testData.setField(6, Timestamp.valueOf("1990-10-14 12:10:10")) testData.setField(7, 12) testData.setField(8, 1000L) testData.setField(9, Seq("Hello", "World")) testData } override def typeInfo: TypeInformation[Any] = { new RowTypeInfo( Types.INT, Types.STRING, Types.BOOLEAN, TypeInformation.of(classOf[SimplePojo]), Types.DATE, Types.TIME, Types.TIMESTAMP, Types.INTERVAL_MONTHS, Types.INTERVAL_MILLIS, TypeInformation.of(classOf[Seq[String]]) ).asInstanceOf[TypeInformation[Any]] } override def functions: Map[String, ScalarFunction] = Map( "Func0" -> Func0, "Func1" -> Func1, "Func2" -> Func2, "Func3" -> Func3, "Func4" -> Func4, "Func5" -> Func5, "Func6" -> Func6, "Func7" -> Func7, "Func8" -> Func8, "Func9" -> Func9, "Func10" -> Func10, "Func11" -> Func11, "Func12" -> Func12, "Func14" -> Func14, "Func15" -> Func15, "Func16" -> Func16, "Func17" -> Func17, "JavaFunc0" -> new JavaFunc0, "JavaFunc1" -> new JavaFunc1, "JavaFunc2" -> new JavaFunc2, "JavaFunc3" -> new JavaFunc3, "RichFunc0" -> new RichFunc0, "RichFunc1" -> new RichFunc1, "RichFunc2" -> new RichFunc2 ) }
DieBauer/flink
flink-libraries/flink-table/src/test/scala/org/apache/flink/table/expressions/UserDefinedScalarFunctionTest.scala
Scala
apache-2.0
8,663
package eventstore package operations import OperationError._ import Inspection.Decision._ private[eventstore] case class TransactionWriteInspection(out: TransactionWrite) extends ErrorInspection[TransactionWriteCompleted, OperationError] { def decision(error: OperationError) = { error match { case PrepareTimeout => Retry case CommitTimeout => Retry case ForwardTimeout => Retry case WrongExpectedVersion => Unexpected case StreamDeleted => Unexpected case InvalidTransaction => Unexpected case AccessDenied => Fail(new AccessDeniedException(s"Write access denied")) } } }
pawelkaczor/EventStore.JVM
src/main/scala/eventstore/operations/TransactionWriteInspection.scala
Scala
bsd-3-clause
671
/** * The MIT License (MIT) * <p/> * Copyright (c) 2016 ScalateKids * <p/> * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * <p/> * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * <p/> * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. * <p/> * @author Scalatekids * @version 1.0 * @since 1.0 */ package com.actorbase.actorsystem.messages.ClientActorMessages import spray.json._ import spray.json.DefaultJsonProtocol._ final case class ListTupleResponse(tuples: List[Map[String, List[String]]]) case object ListTupleResponse { implicit val goJson = jsonFormat1(ListTupleResponse.apply) } /** * A message for list the responses * @param list the list that contain responses */ final case class ListResponse(list: List[String]) case object ListResponse { implicit val goJson = jsonFormat1(ListResponse.apply) } /** * Message that represent a server response * @param response the response of the server in byteArray */ final case class Response(response: Any) case object Response { implicit object AnyJsonFormat extends JsonFormat[Any] { def write(x: Any) = x match { case n: Int => JsNumber(n) case d: Double => JsNumber(d) case i: BigInt => JsNumber(i) case l: Long => JsNumber(l) case s: String => JsString(s) case x: Seq[_] => seqFormat[Any].write(x) case m: Map[String, _] => mapFormat[String, Any].write(m) case b: Boolean if b == true => JsTrue case b: Boolean if b == false => JsFalse case a: Array[Byte] => arrayFormat[Byte].write(a) case x => serializationError("Do not understand object of type " + x.getClass.getName) } def read(value: JsValue) = value match { case JsNumber(n) => n.intValue() case JsString(s) => s case a: JsArray => listFormat[Any].read(value) case o: JsObject => mapFormat[String, Any].read(value) case JsTrue => true case JsFalse => false case x => deserializationError("Do not understand how to deserialize " + x) } } implicit val goJson = jsonFormat1(Response.apply) } /** * Message with a map of structured responses * @param collection the collection of the map responses * @param map the map with the responses */ final case class MapResponse(owner: String, collectionName: String, contributors: Map[String, Boolean], data: Map[String, Any]) object MapResponse { implicit object AnyJsonFormat extends JsonFormat[Any] { def write(x: Any) = x match { case n: Int => JsNumber(n) case d: Double => JsNumber(d) case i: BigInt => JsNumber(i) case l: Long => JsNumber(l) case s: String => JsString(s) case x: Seq[_] => seqFormat[Any].write(x) case m: Map[String, _] => mapFormat[String, Any].write(m) case b: Boolean if b == true => JsTrue case b: Boolean if b == false => JsFalse case a: Array[Byte] => arrayFormat[Byte].write(a) case x => serializationError("Do not understand object of type " + x.getClass.getName) } def read(value: JsValue) = value match { case JsNumber(n) => n.intValue() case JsString(s) => s case a: JsArray => listFormat[Any].read(value) case o: JsObject => mapFormat[String, Any].read(value) case JsTrue => true case JsFalse => false case x => deserializationError("Do not understand how to deserialize " + x) } } implicit val goJson = jsonFormat4(MapResponse.apply) }
ScalateKids/Actorbase-Server
src/main/scala/com/actorbase/actorsystem/messages/ClientActorMessages.scala
Scala
mit
4,382
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.deploy.k8s.integrationtest private[spark] trait RTestsSuite { k8sSuite: KubernetesSuite => import RTestsSuite._ import KubernetesSuite.{k8sTestTag, rTestTag} test("Run SparkR on simple dataframe.R example", k8sTestTag, rTestTag) { sparkAppConf.set("spark.kubernetes.container.image", rImage) runSparkApplicationAndVerifyCompletion( appResource = SPARK_R_DATAFRAME_TEST, mainClass = "", expectedLogOnCompletion = Seq("name: string (nullable = true)", "1 Justin"), appArgs = Array.empty[String], driverPodChecker = doBasicDriverRPodCheck, executorPodChecker = doBasicExecutorRPodCheck, appLocator = appLocator, isJVM = false) } } private[spark] object RTestsSuite { val CONTAINER_LOCAL_SPARKR: String = "local:///opt/spark/examples/src/main/r/" val SPARK_R_DATAFRAME_TEST: String = CONTAINER_LOCAL_SPARKR + "dataframe.R" }
dbtsai/spark
resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/RTestsSuite.scala
Scala
apache-2.0
1,719
package org.datacleaner.visualization import org.datacleaner.result.html.HtmlRenderingContext import org.datacleaner.result.html.HeadElement import scala.collection.mutable.Map import scala.collection.mutable.ListBuffer import java.awt.Color /** * Head element that writes a script specific to the rendering of a single result */ class DensityAnalyzerChartScriptHeadElement(result: DensityAnalyzerResult, elementId: String) extends HeadElement { val series: Map[String, ListBuffer[(Int, Int)]] = Map[String, ListBuffer[(Int, Int)]]().withDefault(rgbHex => { val list: ListBuffer[(Int, Int)] = new ListBuffer[(Int, Int)]() series.put(rgbHex, list) list }); override def toHtml(context: HtmlRenderingContext): String = { val annotations = result.getRowAnnotations val maxRowsAnnotation = annotations.values.reduce((a, b) => if (a.getRowCount() > b.getRowCount()) a else b) val maxRows = maxRowsAnnotation.getRowCount() val paintScale = DensityAnalyzerColors.getPaintScale(maxRows); annotations.foreach(entry => { val point = entry._1 val z = entry._2.getRowCount() val paint = paintScale.getPaint(z) val color = paint.asInstanceOf[Color] val rgbHex = DensityAnalyzerColors.toHexString(color) val list = series(rgbHex) list += point }) return """<script type="text/javascript"> //<![CDATA[ var data = [ """ + series.map(entry => { val rgbHex = entry._1 val list = entry._2; """{ data: [""" + list.map(coor => "[" + coor._1 + "," + coor._2 + "]").mkString(",") + """], color: "#""" + rgbHex + """" }""" }).mkString(",") + """ ]; draw_scatter_chart('""" + elementId + """', data, 2); //]]> </script> """ } }
anandswarupv/DataCleaner
components/visualization/src/main/scala/org/datacleaner/visualization/DensityAnalyzerChartScriptHeadElement.scala
Scala
lgpl-3.0
1,799
/* * Copyright (C) 2005, The OpenURP Software. * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.openurp.code.geo.model import org.beangle.data.model.annotation.code import org.openurp.code.CodeBean import org.beangle.commons.collection.Collections /** * 国家地区 * 参见国家推荐标准 GB/T 2659-2000 * @see http://en.wikipedia.org/wiki/ISO_3166-1 * @see http://wenku.baidu.com/view/bd105c235901020207409cd1.html */ @code("nation") class Country extends CodeBean { var alpha3Code: String = _ var alpha2Code: String = _ var shortName: String = _ } /** * 行政区划 * 参见国家推荐标准 GB/T 2260-1999 * @see http://www.gfjl.org/thread-83266-1-1.html */ @code("nation") class Division extends CodeBean { /** 父级 */ var parent: Division = _ var children = Collections.newBuffer[Division] }
openurp/api
code/src/main/scala/org/openurp/code/geo/model/nation.scala
Scala
lgpl-3.0
1,468
package org.scalameta.paradise import scala.tools.nsc.{Global, Phase, SubComponent} import scala.tools.nsc.plugins.{Plugin => NscPlugin, PluginComponent => NscPluginComponent} import scala.collection.{mutable, immutable} import org.scalameta.paradise.converters.Converter import org.scalameta.paradise.reflect.ReflectToolkit import org.scalameta.paradise.parser.HijackSyntaxAnalyzer import org.scalameta.paradise.typechecker.HijackAnalyzer import org.scalameta.paradise.typechecker.AnalyzerPlugins class Plugin(val global: Global) extends NscPlugin with HijackSyntaxAnalyzer with HijackAnalyzer with AnalyzerPlugins { val name = "macroparadise" val description = "Empowers production Scala compiler with latest macro developments" val components = Nil hijackSyntaxAnalyzer() val newAnalyzer = hijackAnalyzer() newAnalyzer.addAnalyzerPlugin(AnalyzerPlugin) newAnalyzer.addMacroPlugin(MacroPlugin) }
scalameta/paradise
plugin/src/main/scala/org/scalameta/paradise/Plugin.scala
Scala
bsd-3-clause
931
package ch.epfl.bluebrain.nexus.iam.types import java.time.Instant import ch.epfl.bluebrain.nexus.iam.types.Identity.Subject import ch.epfl.bluebrain.nexus.rdf.Iri.AbsoluteIri object ResourceMetadata { /** * Constructs a [[ResourceF]] where the value is of type Unit * * @param id the identifier of the resource * @param rev the revision of the resource * @param types the types of the resource * @param createdAt the instant when the resource was created * @param createdBy the subject that created the resource * @param updatedAt the instant when the resource was updated * @param updatedBy the subject that updated the resource */ def apply( id: AbsoluteIri, rev: Long, types: Set[AbsoluteIri], createdAt: Instant, createdBy: Subject, updatedAt: Instant, updatedBy: Subject ): ResourceMetadata = ResourceF.unit(id, rev, types, createdAt, createdBy, updatedAt, updatedBy) }
hygt/nexus-iam
src/main/scala/ch/epfl/bluebrain/nexus/iam/types/ResourceMetadata.scala
Scala
apache-2.0
996
package org.littlewings.infinispan.mapreduce import scala.collection.JavaConverters._ import org.infinispan.distexec.mapreduce.{Collator, Collector, Mapper, Reducer} @SerialVersionUID(1L) class ReplicatedCacheMapper extends Mapper[String, String, String, Set[String]] { override def map(key: String, value: String, collector: Collector[String, Set[String]]): Unit = collector.emit(key, Set(s"Mapper-${Thread.currentThread.getName}")) } @SerialVersionUID(1L) class ReplicatedCacheReducer extends Reducer[String, Set[String]] { override def reduce(key: String, iter: java.util.Iterator[Set[String]]): Set[String] = iter.asScala.foldLeft(Set.empty[String]) { (acc, c) => acc ++ c } + s"Reducer-${Thread.currentThread.getName}" }
kazuhira-r/infinispan-examples
infinispan-mapreduce-clustering-type/src/main/scala/org/littlewings/infinispan/mapreduce/ReplicatedCacheMapperReducer.scala
Scala
mit
742
package me.breidenbach.asyncmailer import org.springframework.stereotype.Service import scala.concurrent.duration.FiniteDuration /** * Copyright © Kevin E. Breidenbach, 5/29/15. */ // A wrapper to make it easy for Spring systems to use it @Service("amMailSupervisor") class MailerSupervisorControl { val supervisor = MailerSupervisor def start(mailerCount: Int, waitTimeout: FiniteDuration): Unit = { supervisor.start(mailerCount, waitTimeout) } // Using polymorphic approach as poor old Java clients can use default parameters yet def sendEmail(emailAddress: String): Boolean = { sendEmail(emailAddress, async = true) } def sendEmail(emailAddress: String, async: Boolean): Boolean = { supervisor.sendEmail(emailAddress, async) } def isStarted = supervisor.isStarted // use this for "destroy-method" in spring bean configuration def stop() = supervisor.stop() }
kbreidenbach/akka-emailer
src/main/scala/me/breidenbach/asyncmailer/MailerSupervisorControl.scala
Scala
mit
907
package ctlmc.bddgraph import ctlmc.spec._ import ctlmc._ class GraphFactorySpec extends UnitSpec { test("Creation") { val factory = new GraphFactory() } } class GraphSpec extends UnitSpec { val factory = new GraphFactory() factory.setParameters(Array( ("v1", (Array("F", "T").zipWithIndex.toMap, 0)), ("v2", (Array("F", "T").zipWithIndex.toMap, 1)), ("v3", (Array("F", "T").zipWithIndex.toMap, 2)), ("v4", (Array("F", "T").zipWithIndex.toMap, 3)), ("v5", (Array("F", "T").zipWithIndex.toMap, 4)) ).toMap) val params = Array[Int](0, 0, 0, 0, 0) test("Single var State comparison, positive") { assert(factory.createState(params).set(1, 1) == factory.createState(params).set(1, 1)) } test("Single var State comparison, negative") { assert(factory.createState(params).set(1, 1) != factory.createState(params).set(2, 1)) } test("Full StateSet comparison") { assert(factory.createFullStateSet() == factory.createFullStateSet()) } test("Empty StateSet comparison") { assert(factory.createEmptyStateSet() == factory.createEmptyStateSet()) } test("Custom StateSet comparison, positive 1") { val s0 = factory.createState(params).set(1, 1) assert(factory.createStateSet(s0) == factory.createStateSet(s0)) } test("Custom StateSet comparison, positive 2") { val s0 = factory.createState(params).set(0, 1) val s1 = factory.createState(params).set(1, 1) assert(factory.createStateSet(Array(s0, s1)) == factory.createStateSet(Array(s1, s0))) } test("Custom StateSet comparison, negative") { val s0 = factory.createState(params).set(0, 1) val s1 = factory.createState(params).set(1, 1) assert(factory.createStateSet(s0) != factory.createStateSet(s1)) } test("Graph size") { val s0 = factory.createState(params).set(0, 1) val s1 = factory.createState(params).set(1, 1) val s2 = factory.createState(params).set(2, 1) val graph = factory.createGraph(Array( factory.createEdge(s0, s1), factory.createEdge(s1, s2), factory.createEdge(s2, s0) )) assert(graph.countEdges == 3) } test("Preimage, segment") { val s0 = factory.createState(params).set(0, 1) val s1 = factory.createState(params).set(1, 1) val graph = factory.createGraph(Array( factory.createEdge(s0, s1) )) val set = factory.createStateSet(s1) val pre = graph.preimage(set) assert(pre == factory.createStateSet(s0)) } test("Preimage, line") { val s0 = factory.createState(params).set(0, 1) val s1 = factory.createState(params).set(1, 1) val s2 = factory.createState(params).set(2, 1) val graph = factory.createGraph(Array( factory.createEdge(s0, s1), factory.createEdge(s1, s2), factory.createEdge(s2, s0) )) val set = factory.createStateSet(s2) val pre = graph.preimage(set) assert(pre == factory.createStateSet(s1)) } test("Preimage, triangle") { val s0 = factory.createState(params).set(0, 1) val s1 = factory.createState(params).set(1, 1) val s2 = factory.createState(params).set(2, 1) val graph = factory.createGraph(Array( factory.createEdge(s0, s1), factory.createEdge(s1, s2), factory.createEdge(s2, s0) )) val set = factory.createStateSet(s0) val pre = graph.preimage(set) assert(pre == factory.createStateSet(s2)) } }
fpoli/ctlmc
src/test/scala/bddgraph/GraphSpec.scala
Scala
gpl-3.0
3,242
/* * Copyright (c) 2013 University of Massachusetts Amherst * Licensed under the Apache License, Version 2.0 * http://www.apache.org/licenses/LICENSE-2.0 */ package edu.umass.cs.iesl.bibmogrify.pipeline import com.typesafe.scalalogging.slf4j.Logging import collection.GenTraversableOnce /** * @author <a href="mailto:[email protected]">David Soergel</a> * @version $Id$ */ trait Transformer[S, +T] extends ((S) => GenTraversableOnce[T]) { def metadata: Option[TransformerMetadata] = None // introspecting the types is a mess; just hardcode the strings val fromType: String // S val toType: String // T } trait TransformerMetadata // for tracking provenance, headers, etc. case class StringMetadata(value: String) extends TransformerMetadata { override def toString = value } //trait Source[T] extends TraversableOnce[T] trait Sink[T] { def put(c: T) def putMetadata(m: Option[TransformerMetadata]) def close() } object Pump extends Logging { def apply[T](source: TraversableOnce[T], sink: Sink[T]) { source.foreach(sink.put(_)) } /* def apply[T](source: TraversableOnce[T], sink: Sink[T], before:Option[T], between: Option[T], after:Option[T]) { //val x = source.toIterable.par //toSeq // no clue why toSeq is needed here, but otherwise the map below doesn't work ?? BAD memory use. even // toIterable makes a Stream. //logger.warn("Pumping...") before.map(sink.put(_)) var first = true source.foreach(x=>{ if (!first) { between.map(sink.put(_)) first = false } sink.put(x) }) after.map(sink.put(_)) //logger.warn("Done Pumping!") } */ } /* object ParPump extends Logging { def apply[T](source: TraversableOnce[T], sink: Sink[T]) { source.toIterator.sliding(100,100).foreach( i=>i.par.foreach(sink.put(_)) ) //val x = source.toIterable.par //toSeq // no clue why toSeq is needed here, but otherwise the map below doesn't work ?? BAD memory use. even // toIterable makes a Stream. //logger.warn("Pumping...") //source.foreach(sink.put(_)) //logger.warn("Done Pumping!") } } */ class CompositeTransformer[T, U, V](first: Transformer[T, U], second: Transformer[U, V]) extends Transformer[T, V] with Logging { //def apply(t: T) = first(t).flatMap(second) // this is strict and memoizes? def apply(t: T): TraversableOnce[V] = new Traversable[V] { // ** side effect error handling for now; would prefer an error monad def foreach[Z](f: (V) => Z) { try { { val a = first(t) a.foreach(y => { try { { val b = second(y) b.foreach(f) } } catch { case e => logger.error("Error", e) } }) } } catch { case e => logger.error("Error", e) } } } //XmlUtils.generatorToTraversable(second).foreach(first(t)) override def metadata = { (first.metadata, second.metadata) match { case (None, None) => None case (a, b) => { Some(new CompositeMetadata(a, b)) } } } val fromType = "T" val toType = "V" } class CompositeMetadata(a: Option[TransformerMetadata], b: Option[TransformerMetadata]) extends TransformerMetadata { override def toString = a.map(_.toString).getOrElse("") + b.map(_.toString).getOrElse("") }
iesl/bibmogrify
src/main/scala/edu/umass/cs/iesl/bibmogrify/pipeline/PipelineTraits.scala
Scala
apache-2.0
3,380
package xi.examples import org.apache.spark.rdd.RDD import org.apache.spark.{SparkConf, SparkContext} object Ex3 { def main(args: Array[String]): Unit = { val conf = new SparkConf() .setAppName("Task 3") .setMaster("local") val sc = new SparkContext(conf) val tweetsPath = args(0) val stopWordsPath = args(1) val outputDataset = args(2) val tweetsRaw: RDD[String] = sc.textFile(tweetsPath) val stopWords: Array[String] = sc.textFile(stopWordsPath) .flatMap(_.split(",")) .map(_.replaceAll("^\\\\s+", "")) .collect() // recover the city and the tweet text // city is always the first element // the text is always the penultimate element val tweets: RDD[(String, String)] = tweetsRaw.map { line => val elt = line.split("\\t") (elt.head, elt(elt.length - 2)) } val filteredTweetsText: RDD[String] = tweets.filter { ct => val city = ct._1 (city matches "San Francisco") || (city matches "Chicago") }.map { _._2 // recover the tweet text .split(" ") // split the tweet into words .filterNot(s => stopWords.contains(s.toLowerCase.replaceAll("^\\\\s+", ""))) // remove stop words .mkString(" ") } filteredTweetsText.saveAsTextFile(outputDataset) } }
SnipyJulmy/mcs_notes_and_resume
bdi/s06/lab06/src/main/scala/xi/examples/Ex3.scala
Scala
lgpl-3.0
1,300
package com.twitter.scrooge.android_generator import com.twitter.scrooge.ast.{Identifier, Enum} import com.twitter.scrooge.java_generator.TypeController class EnumConstant(val name: String, val value: Int, val last: Boolean) class EnumController(e: Enum, generator: AndroidGenerator, ns: Option[Identifier]) extends TypeController(e, generator, ns) { val constants = e.values.zipWithIndex map { case (v, i) => new EnumConstant(v.sid.name, v.value, i == e.values.size - 1) } }
thirstycrow/scrooge
scrooge-generator/src/main/scala/com/twitter/scrooge/android_generator/EnumController.scala
Scala
apache-2.0
489
/*********************************************************************** * Copyright (c) 2013-2017 Commonwealth Computer Research, Inc. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Apache License, Version 2.0 * which accompanies this distribution and is available at * http://www.opensource.org/licenses/apache2.0.php. ***********************************************************************/ package org.locationtech.geomesa.raster.wcs import org.geotools.coverage.grid.io.AbstractGridFormat import org.geotools.coverage.grid.io.AbstractGridFormat._ import org.geotools.factory.{GeoTools, Hints} import org.geotools.parameter.{DefaultParameterDescriptorGroup, ParameterGroup} import org.opengis.coverage.grid.Format import org.opengis.parameter.GeneralParameterDescriptor class GeoMesaCoverageFormat extends AbstractGridFormat with Format { mInfo = new java.util.HashMap[String, String]() mInfo.put("name", GeoMesaCoverageFormat.CoverageFormatName) mInfo.put("description", "Tile imagery in Apache Accumulo\\u2122") mInfo.put("vendor", "GeoMesa") mInfo.put("docURL", "http://www.geomesa.org") mInfo.put("version", "1.0") private val parameterDescriptors = Array[GeneralParameterDescriptor](READ_GRIDGEOMETRY2D) private val defaultParameterGroup = new DefaultParameterDescriptorGroup(mInfo, parameterDescriptors) readParameters = new ParameterGroup(defaultParameterGroup) writeParameters = null override def getReader(source: AnyRef) = getReader(source, null) override def getReader(source: AnyRef, hints: Hints) = { source match { case path: String => new GeoMesaCoverageReader(path, hints) case unk => throw new RuntimeException("unexpected data type for reader source: " + s"${Option(unk).map(_.getClass.getName).getOrElse("null")}") } } override def accepts(input: AnyRef) = accepts(input, null) override def accepts(source: AnyRef, hints: Hints) = { source match { case string: String => string.startsWith("accumulo://") case _ => false } } override def getWriter(destination: AnyRef) = throw new UnsupportedOperationException("Unsupported") override def getWriter(destination: AnyRef, hints: Hints) = throw new UnsupportedOperationException("Unsupported") override def getDefaultImageIOWriteParameters = throw new UnsupportedOperationException("Unsupported") } object GeoMesaCoverageFormat { val CoverageFormatName = "Accumulo (Geomesa Raster)" }
ronq/geomesa
geomesa-accumulo/geomesa-accumulo-raster/src/main/scala/org/locationtech/geomesa/raster/wcs/GeoMesaCoverageFormat.scala
Scala
apache-2.0
2,537
package benchmark.unsafe import org.scalameter._ import scalajson.ast.unsafe object Generators { def jBoolean: Gen[unsafe.JBoolean] = for { size <- Gen.range("seed")(300000, 1500000, 300000) } yield { if (size % 2 == 0) unsafe.JBoolean(true) else unsafe.JBoolean(false) } def jString: Gen[unsafe.JString] = for { size <- Gen.range("seed")(300000, 1500000, 300000) } yield { unsafe.JString(size.toString) } def jNumber: Gen[unsafe.JNumber] = for { size <- Gen.range("seed")(300000, 1500000, 300000) } yield { scalajson.ast.unsafe.JNumber(size) } def jArray: Gen[unsafe.JArray] = for { size <- Gen.range("seed")(0, 10, 1) randomJValue <- jValue } yield { val array: Array[unsafe.JValue] = Array.ofDim(size) (0 until size).foreach { index => array(index) = randomJValue } scalajson.ast.unsafe.JArray(array) } def jObject: Gen[unsafe.JObject] = for { size <- Gen.range("seed")(0, 10, 1) string <- Gen.range("seed")(300000, 1500000, 300000).map { _.toString } randomJValue <- jValue } yield { val array: Array[unsafe.JField] = Array.ofDim(size) (0 until size).foreach { index => array(index) = unsafe.JField(string, randomJValue) } scalajson.ast.unsafe.JObject(array) } def jValue: Gen[unsafe.JValue] = Gen.range("JValue type")(300000, 1500000, 300000).flatMap { seed => seed % 5 match { case 0 => jBoolean.asInstanceOf[Gen[unsafe.JValue]] case 1 => jString.asInstanceOf[Gen[unsafe.JValue]] case 2 => jNumber.asInstanceOf[Gen[unsafe.JValue]] case 3 => jArray.asInstanceOf[Gen[unsafe.JValue]] case 4 => jString.asInstanceOf[Gen[unsafe.JValue]] } } }
mdedetrich/scalajson
benchmark/jvm/src/test/scala/benchmark/unsafe/Generators.scala
Scala
bsd-3-clause
1,851
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package wvlet.airframe.examples.di import wvlet.airframe.tracing.DIStats /** * To check the coverage of your design, set DIStats to your design. * * This is useful to see unused bindings in the design. */ object DI_91_Stats extends App { import wvlet.airframe._ trait A { val b = bind[B] } trait B trait C val stats = new DIStats() val d = newSilentDesign .bind[A].toSingleton .bind[B].toSingleton .bind[C].toSingleton .withStats(stats) // Set stats d.build[A] { a => // } val report = stats.coverageReportFor(d) /** * Show the design coverage and access stats. * * [coverage] design coverage: 66.7% [unused types] C [access stats] [A] init:1, inject:1 [B] init:1, inject:1 */ println(report) }
wvlet/airframe
examples/src/main/scala/wvlet/airframe/examples/di/DI_91_Stats.scala
Scala
apache-2.0
1,333
package zeroformatter package unsafe object ZeroFormatter { def serialize[T](value: T)(implicit F: Formatter[T]): Array[Byte] = { val encoder = UnsafeEncoder(new Array[Byte](F.length.getOrElse(0))) val byteSize = F.serialize(encoder, 0, value) val result = encoder.toByteArray if(result.length != byteSize) UnsafeUtil.resize(result, byteSize) else result } def deserialize[T](bytes: Array[Byte])(implicit F: Formatter[T]): T = F.deserialize(UnsafeDecoder(bytes, 0)) }
pocketberserker/scala-zero-formatter
unsafe/src/main/scala/zeroformatter/unsafe/ZeroFormatter.scala
Scala
mit
501
package controllers import play.api._ import play.api.mvc._ import models.{Movie, Movies} import play.api.libs.json.Json import play.api.db.slick.DBAction object Application extends Controller { def index = Action { Ok(views.html.index()) } implicit val messageJsonWriter = Json.writes[Movie] def movies(year: Option[Int]) = DBAction { implicit rs => implicit val s = rs.dbSession val movies = year match { case Some(y) => Movies.findByYear(y) case None => Movies.list } Ok(Json.toJson(Map("movies" -> movies))) } def movie(id: Long) = DBAction { implicit rs => implicit val s = rs.dbSession val movie = Movies.get(id) Ok(Json.toJson(movie)) } def years = DBAction { implicit rs => implicit val s = rs.dbSession val years = Movies.years Ok(Json.toJson(years)) } }
rockneurotiko/madness-things
Scala/Play/movies-example/app/controllers/Application.scala
Scala
mpl-2.0
846
/* * Copyright 2007-2008 WorldWide Conferencing, LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions * and limitations under the License. */ package net.liftweb.example.snippet import _root_.net.liftweb.example.model._ import _root_.scala.xml.{NodeSeq, Text, Group, Node} import _root_.net.liftweb.http._ import _root_.net.liftweb.http.S import _root_.net.liftweb.mapper._ import _root_.net.liftweb.http.S._ import _root_.net.liftweb.http.SHtml._ import _root_.net.liftweb.util.Helpers._ import _root_.net.liftweb.util._ class CountGame extends StatefulSnippet { val dispatch: DispatchIt = { case "run" if lastGuess == number => xhtml => win(chooseTemplate("choose", "win", xhtml)) case "run" => xhtml => nextGuess(chooseTemplate("choose", "guess", xhtml)) case "count_down" => xhtml => countDown(attr("from").map(Helpers.toInt).openOr(0)) } def win(xhtml: NodeSeq) = bind("count", xhtml, "number" -> number, "count" -> count) ++ <p>Counting backward: {countDown(number)}</p> def countDown(number: Int): Node = if (number <= 0) Text("") else <xml:group>{number} <lift:count_game.count_down from={(number - 1).toString} /></xml:group> def nextGuess(xhtml: NodeSeq) = bind("count", xhtml, "input" -> text("", guess _), "last" -> lastGuess.map(v => if (v < number) v+" is low" else v+" is high"). openOr("Make first Guess")) private def guess(in: String) { count += 1 lastGuess = Full(toInt(in)) } private val number = 1 + randomInt(100) private var lastGuess: Box[Int] = Empty private var count = 0 }
andreum/liftweb
sites/example/src/main/scala/net/liftweb/example/snippet/CountGame.scala
Scala
apache-2.0
2,038
package io.scalajs.nodejs.readline import io.scalajs.nodejs.events.IEventEmitter import scala.scalajs.js /** * Readline Interface * @see https://nodejs.org/api/readline.html#readline_class_interface * @author [email protected] */ @js.native trait Interface extends IEventEmitter { /** * Closes the Interface instance, relinquishing control on the input and output streams. * The 'close' event will also be emitted. * @example rl.close() */ def close(): Unit = js.native /** * Pauses the readline input stream, allowing it to be resumed later if needed. * <b>Note</b>: that this doesn't immediately pause the stream of events. Several events may be emitted * after calling pause, including line. * @example rl.pause() */ def pause(): Unit = js.native /** * Readies readline for input from the user, putting the current setPrompt options on a new line, giving the user * a new spot to write. Set preserveCursor to true to prevent the cursor placement being reset to 0. * This will also resume the input stream used with createInterface if it has been paused. * If output is set to null or undefined when calling createInterface, the prompt is not written. * @example rl.prompt([preserveCursor]) */ def prompt(preserveCursor: Boolean): Unit = js.native /** * Readies readline for input from the user, putting the current setPrompt options on a new line, giving the user * a new spot to write. Set preserveCursor to true to prevent the cursor placement being reset to 0. * This will also resume the input stream used with createInterface if it has been paused. * If output is set to null or undefined when calling createInterface, the prompt is not written. * @example rl.prompt([preserveCursor]) */ def prompt(): Unit = js.native /** * Prepends the prompt with query and invokes callback with the user's response. Displays the query to the user, * and then invokes callback with the user's response after it has been typed. * This will also resume the input stream used with createInterface if it has been paused. * If output is set to null or undefined when calling createInterface, nothing is displayed. * @example rl.question(query, callback) */ def question(query: String, callback: js.Function): Unit = js.native /** * Resumes the readline input stream. * @example rl.resume() */ def resume(): Unit = js.native /** * Sets the prompt, for example when you run node on the command line, you see > , which is Node.js's prompt. * @example rl.setPrompt(prompt) */ def setPrompt(prompt: String): Unit = js.native /** * Writes data to output stream, unless output is set to null or undefined when calling createInterface. * key is an object literal to represent a key sequence; available if the terminal is a TTY. * This will also resume the input stream if it has been paused. * @example rl.write(data[, key]) */ def write(data: String, key: js.Any): Unit = js.native /** * Writes data to output stream, unless output is set to null or undefined when calling createInterface. * key is an object literal to represent a key sequence; available if the terminal is a TTY. * This will also resume the input stream if it has been paused. * @example rl.write(data[, key]) */ def write(data: String): Unit = js.native } /** * Readline Interface Companion * @author [email protected] */ object Interface { /** * Readline Interface Events * @param readline the given [[Interface Readline Interface]] */ implicit class ReadlineEvents(val readline: Interface) extends AnyVal { /** * Emitted when close() is called. Also emitted when the input stream receives its 'end' event. * The Interface instance should be considered "finished" once this is emitted. For example, when * the input stream receives {{{ ^D }}}, respectively known as EOT. */ @inline def onClose(callback: () => Any) = readline.on("close", callback) /** * Emitted whenever the input stream receives an end of line (\\n, \\r, or \\r\\n), usually received when * the user hits enter, or return. This is a good hook to listen for user input. */ @inline def onLine(callback: String => Any) = readline.on("line", callback) /** * Emitted whenever the input stream is paused. Also emitted whenever the input stream is not paused * and receives the SIGCONT event. (See events SIGTSTP and SIGCONT) */ @inline def onPause(callback: js.Function) = readline.on("pause", callback) /** * Emitted whenever the input stream is resumed. */ @inline def onResume(callback: js.Function) = readline.on("resume", callback) /** * Emitted whenever the input stream is sent to the background with {{{ ^Z }}}, respectively known as SIGTSTP, * and then continued with fg(1). This event only emits if the stream was not paused before sending the * program to the background. */ @inline def onSIGCONT(callback: js.Function) = readline.on("SIGCONT", callback) /** * Emitted whenever the input stream receives a {{{ ^C }}}, respectively known as SIGINT. If there is no * SIGINT event listener present when the input stream receives a SIGINT, pause will be triggered. */ @inline def onSIGINT(callback: js.Function) = readline.on("SIGINT", callback) /** * Emitted whenever the input stream receives a {{{ ^Z }}}, respectively known as SIGTSTP. If there is no * SIGTSTP event listener present when the input stream receives a SIGTSTP, the program will be sent * to the background. When the program is resumed with fg, the 'pause' and SIGCONT events will be emitted. * You can use either to resume the stream. The 'pause' and SIGCONT events will not be triggered if the * stream was paused before the program was sent to the background. */ @inline def onSIGTSTP(callback: js.Function) = readline.on("SIGTSTP", callback) } }
scalajs-io/nodejs
app/common/src/main/scala/io/scalajs/nodejs/readline/Interface.scala
Scala
apache-2.0
6,151
package com.github.pockethub import java.io._ import java.util.zip.GZIPOutputStream import android.util.Log /** * Created by chentao on 15/12/10. * * @author [email protected] * * ___====-_ _-====___ * _--^^^#####// \\\\#####^^^--_ * _-^##########// ( ) \\\\##########^-_ * -############// |\\^^/| \\\\############- * _/############// (@::@) \\\\############\\_ * /#############(( \\\\// ))#############\\ * -###############\\\\ (oo) //###############- * -#################\\\\ / VV \\ //#################- * -###################\\\\/ \\//###################- * _#/|##########/\\######( /\\ )######/\\##########|\\#_ * |/ |#/\\#/\\#/\\/ \\#/\\##\\ | | /##/\\#/ \\/\\#/\\#/\\#| \\| * ` |/ V V ` V \\#\\| | | |/#/ V ' V V \\| ' * ` ` ` ` / | | | | \\ ' ' ' ' * ( | | | | ) * __\\ | | | | /__ * (vvv(VVV)(VVV)vvv) * * HERE BE DRAGONS */ class RequestWriter(private val handle: File, private val version: Int) { private val TAG = "RequestWriter" def createDirectory(dir: File): Unit = { if (dir != null && !dir.exists()) { dir.mkdirs() } } def write[V >: Null](request: V): V = { createDirectory(handle.getParentFile) val dir = new RandomAccessFile(handle, "rw") val lock = dir.getChannel.lock() val result: Either[V, IOException] = try { inSafe(dir) { inSafe(lock) { val output = new ObjectOutputStream( new GZIPOutputStream(new FileOutputStream(dir.getFD), 8192) ) inSafe(output) { output.writeInt(version) output.writeObject(request) } } } Left(request) } catch { case e: IOException => Log.d(TAG, "Exception writing cache " + handle.getName, e) Right(e) } result match { case Left(r) => r case Right(e) => null } } }
JLLK/PocketHub-scala
app/src/main/scala/com/github/pockethub/RequestWriter.scala
Scala
apache-2.0
2,220
package scala.virtualization.lms package internal import java.io._ import scala.tools.nsc._ import scala.tools.nsc.util._ import scala.tools.nsc.reporters._ import scala.tools.nsc.io._ import scala.tools.nsc.interpreter.AbstractFileClassLoader trait ScalaCompile extends Expressions { val codegen: ScalaCodegen { val IR: ScalaCompile.this.type } var compiler: Global = _ var reporter: ConsoleReporter = _ //var output: ByteArrayOutputStream = _ def setupCompiler() = { /* output = new ByteArrayOutputStream() val writer = new PrintWriter(new OutputStreamWriter(output)) */ val settings = new Settings() val pathSeparator = System.getProperty("path.separator") settings.classpath.value = this.getClass.getClassLoader match { case ctx: java.net.URLClassLoader => ctx.getURLs.map(_.getPath).mkString(pathSeparator) case _ => System.getProperty("java.class.path") } settings.bootclasspath.value = Predef.getClass.getClassLoader match { case ctx: java.net.URLClassLoader => ctx.getURLs.map(_.getPath).mkString(pathSeparator) case _ => System.getProperty("sun.boot.class.path") } settings.encoding.value = "UTF-8" settings.outdir.value = "." settings.extdirs.value = "" //settings.verbose.value = true // -usejavacp needed on windows? reporter = new ConsoleReporter(settings, null, new PrintWriter(System.out))//writer compiler = new Global(settings, reporter) } var compileCount = 0 var dumpGeneratedCode = false def compile[A,B](f: Exp[A] => Exp[B])(implicit mA: Manifest[A], mB: Manifest[B]): A=>B = { if (this.compiler eq null) setupCompiler() val className = "staged$" + compileCount compileCount += 1 val source = new StringWriter() val writer = new PrintWriter(source) val staticData = codegen.emitSource(f, className, writer) codegen.emitDataStructures(writer) if (dumpGeneratedCode) println(source) val compiler = this.compiler val run = new compiler.Run val fileSystem = new VirtualDirectory("<vfs>", None) compiler.settings.outputDirs.setSingleOutput(fileSystem) // compiler.genJVM.outputDir = fileSystem run.compileSources(List(new util.BatchSourceFile("<stdin>", source.toString))) reporter.printSummary() if (!reporter.hasErrors) println("compilation: ok") else println("compilation: had errors") reporter.reset //output.reset val parent = this.getClass.getClassLoader val loader = new AbstractFileClassLoader(fileSystem, this.getClass.getClassLoader) val cls: Class[_] = loader.loadClass(className) val cons = cls.getConstructor(staticData.map(_._1.tp.erasure):_*) val obj: A=>B = cons.newInstance(staticData.map(_._2.asInstanceOf[AnyRef]):_*).asInstanceOf[A=>B] obj } }
afernandez90/virtualization-lms-core
src/internal/ScalaCompile.scala
Scala
bsd-3-clause
2,853
package ssscs.outputter import com.itextpdf.text._ import com.itextpdf.text.pdf.PdfWriter import java.io.{FileOutputStream, File} import java.text.SimpleDateFormat import scala.Some import ssscs.{Article, PdfUtils} class PdfOutputter extends Outputter { override protected def outputToDir(articles: IndexedSeq[Article], dir: File) { articles.foreach(a => outputSingleToDir(a, dir)) } private def outputSingleToDir(article: Article, dir: File) { article.transcript match { case Some(transcript) => { val underscorizedTitle = article.info.title.replaceAll("\\\\s", "_") val formattedDate = new SimpleDateFormat("yyyy_MM_dd").format(article.info.date) val pdfFileName = s"${formattedDate}_$underscorizedTitle.pdf" val pdfFilePath = s"${dir.getPath}/$pdfFileName" val doc = new Document() PdfWriter.getInstance(doc, new FileOutputStream(pdfFilePath)) doc.open() try { PdfUtils.addTitle(doc, article.info.title) PdfUtils.addDate(doc, article.info.date) PdfUtils.addSeparator(doc) PdfUtils.addTranscript(doc, transcript) } finally { doc.close() } } case None => } } }
raincole/ssscs
src/main/scala/ssscs/outputter/PdfOutputter.scala
Scala
mit
1,245
/* * Copyright 2019 http4s.org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.http4s.ember.core.h2 import cats.data._ import cats.effect._ import cats.syntax.all._ import scodec.bits._ import scala.scalajs.js.JSConverters._ private[h2] trait HpackPlatform { def create[F[_]](implicit F: Async[F]): F[Hpack[F]] = F.delay { val compressor = facade.Compressor(facade.HpackOptions(4096)) val decompressor = facade.Decompressor(facade.HpackOptions(4096)) new Hpack[F] { def encodeHeaders(headers: NonEmptyList[(String, String, Boolean)]): F[ByteVector] = { val jsHeaders = headers .map { case (name, value, huffman) => facade.Header(name, value, huffman) } .toList .toJSArray F.delay(compressor.write(jsHeaders)) *> F.delay(ByteVector.view(compressor.read())) } def decodeHeaders(bv: ByteVector): F[NonEmptyList[(String, String)]] = F.delay(decompressor.write(bv.toUint8Array)) *> F.delay(decompressor.execute()) *> F.delay { val builder = List.newBuilder[(String, String)] while ( Option(decompressor.read()).map { header => builder += ((header.name, header.value)) }.isDefined ) {} builder.result() }.flatMap(NonEmptyList.fromList(_).liftTo[F](new NoSuchElementException)) } } }
rossabaker/http4s
ember-core/js/src/main/scala/org/http4s/ember/core/h2/HpackPlatform.scala
Scala
apache-2.0
1,947
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.catalyst.plans import org.scalactic.source import org.scalatest.Suite import org.scalatest.Tag import org.apache.spark.SparkFunSuite import org.apache.spark.sql.catalyst.analysis.SimpleAnalyzer import org.apache.spark.sql.catalyst.expressions._ import org.apache.spark.sql.catalyst.expressions.CodegenObjectFactoryMode import org.apache.spark.sql.catalyst.expressions.aggregate.AggregateExpression import org.apache.spark.sql.catalyst.plans.logical._ import org.apache.spark.sql.catalyst.util._ import org.apache.spark.sql.internal.SQLConf /** * Provides helper methods for comparing plans. */ trait PlanTest extends SparkFunSuite with PlanTestBase trait CodegenInterpretedPlanTest extends PlanTest { override protected def test( testName: String, testTags: Tag*)(testFun: => Any)(implicit pos: source.Position): Unit = { val codegenMode = CodegenObjectFactoryMode.CODEGEN_ONLY.toString val interpretedMode = CodegenObjectFactoryMode.NO_CODEGEN.toString withSQLConf(SQLConf.CODEGEN_FACTORY_MODE.key -> codegenMode) { super.test(testName + " (codegen path)", testTags: _*)(testFun)(pos) } withSQLConf(SQLConf.CODEGEN_FACTORY_MODE.key -> interpretedMode) { super.test(testName + " (interpreted path)", testTags: _*)(testFun)(pos) } } } /** * Provides helper methods for comparing plans, but without the overhead of * mandating a FunSuite. */ trait PlanTestBase extends PredicateHelper with SQLHelper { self: Suite => // TODO(gatorsmile): remove this from PlanTest and all the analyzer rules protected def conf = SQLConf.get /** * Since attribute references are given globally unique ids during analysis, * we must normalize them to check if two different queries are identical. */ protected def normalizeExprIds(plan: LogicalPlan) = { plan transformAllExpressions { case s: ScalarSubquery => s.copy(exprId = ExprId(0)) case e: Exists => e.copy(exprId = ExprId(0)) case l: ListQuery => l.copy(exprId = ExprId(0)) case a: AttributeReference => AttributeReference(a.name, a.dataType, a.nullable)(exprId = ExprId(0)) case a: Alias => Alias(a.child, a.name)(exprId = ExprId(0)) case ae: AggregateExpression => ae.copy(resultId = ExprId(0)) case lv: NamedLambdaVariable => lv.copy(exprId = ExprId(0), value = null) case udf: PythonUDF => udf.copy(resultId = ExprId(0)) } } private def rewriteNameFromAttrNullability(plan: LogicalPlan): LogicalPlan = { plan.transformAllExpressions { case a @ AttributeReference(name, _, false, _) => a.copy(name = s"*$name")(exprId = a.exprId, qualifier = a.qualifier) } } /** * Normalizes plans: * - Filter the filter conditions that appear in a plan. For instance, * ((expr 1 && expr 2) && expr 3), (expr 1 && expr 2 && expr 3), (expr 3 && (expr 1 && expr 2) * etc., will all now be equivalent. * - Sample the seed will replaced by 0L. * - Join conditions will be resorted by hashCode. */ protected def normalizePlan(plan: LogicalPlan): LogicalPlan = { plan transform { case Filter(condition: Expression, child: LogicalPlan) => Filter(splitConjunctivePredicates(condition).map(rewriteEqual).sortBy(_.hashCode()) .reduce(And), child) case sample: Sample => sample.copy(seed = 0L) case Join(left, right, joinType, condition, hint) if condition.isDefined => val newCondition = splitConjunctivePredicates(condition.get).map(rewriteEqual).sortBy(_.hashCode()) .reduce(And) Join(left, right, joinType, Some(newCondition), hint) } } /** * Rewrite [[EqualTo]] and [[EqualNullSafe]] operator to keep order. The following cases will be * equivalent: * 1. (a = b), (b = a); * 2. (a <=> b), (b <=> a). */ private def rewriteEqual(condition: Expression): Expression = condition match { case eq @ EqualTo(l: Expression, r: Expression) => Seq(l, r).sortBy(_.hashCode()).reduce(EqualTo) case eq @ EqualNullSafe(l: Expression, r: Expression) => Seq(l, r).sortBy(_.hashCode()).reduce(EqualNullSafe) case _ => condition // Don't reorder. } /** Fails the test if the two plans do not match */ protected def comparePlans( plan1: LogicalPlan, plan2: LogicalPlan, checkAnalysis: Boolean = true): Unit = { if (checkAnalysis) { // Make sure both plan pass checkAnalysis. SimpleAnalyzer.checkAnalysis(plan1) SimpleAnalyzer.checkAnalysis(plan2) } val normalized1 = normalizePlan(normalizeExprIds(plan1)) val normalized2 = normalizePlan(normalizeExprIds(plan2)) if (normalized1 != normalized2) { fail( s""" |== FAIL: Plans do not match === |${sideBySide( rewriteNameFromAttrNullability(normalized1).treeString, rewriteNameFromAttrNullability(normalized2).treeString).mkString("\\n")} """.stripMargin) } } /** Fails the test if the two expressions do not match */ protected def compareExpressions(e1: Expression, e2: Expression): Unit = { comparePlans(Filter(e1, OneRowRelation()), Filter(e2, OneRowRelation()), checkAnalysis = false) } /** Fails the test if the join order in the two plans do not match */ protected def compareJoinOrder(plan1: LogicalPlan, plan2: LogicalPlan): Unit = { val normalized1 = normalizePlan(normalizeExprIds(plan1)) val normalized2 = normalizePlan(normalizeExprIds(plan2)) if (!sameJoinPlan(normalized1, normalized2)) { fail( s""" |== FAIL: Plans do not match === |${sideBySide( rewriteNameFromAttrNullability(normalized1).treeString, rewriteNameFromAttrNullability(normalized2).treeString).mkString("\\n")} """.stripMargin) } } /** Consider symmetry for joins when comparing plans. */ private def sameJoinPlan(plan1: LogicalPlan, plan2: LogicalPlan): Boolean = { (plan1, plan2) match { case (j1: Join, j2: Join) => (sameJoinPlan(j1.left, j2.left) && sameJoinPlan(j1.right, j2.right) && j1.hint.leftHint == j2.hint.leftHint && j1.hint.rightHint == j2.hint.rightHint) || (sameJoinPlan(j1.left, j2.right) && sameJoinPlan(j1.right, j2.left) && j1.hint.leftHint == j2.hint.rightHint && j1.hint.rightHint == j2.hint.leftHint) case (p1: Project, p2: Project) => p1.projectList == p2.projectList && sameJoinPlan(p1.child, p2.child) case _ => plan1 == plan2 } } }
goldmedal/spark
sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/plans/PlanTest.scala
Scala
apache-2.0
7,441
/* Copyright 2009-2016 EPFL, Lausanne */ import leon.lang._ import leon.collection._ import leon._ object BVDivision { def divByZero(x: Int): Boolean = { (x / 0 == 10) } }
epfl-lara/leon
src/test/resources/regression/verification/newsolvers/invalid/BVDivision.scala
Scala
gpl-3.0
186
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.planner.codegen.calls import org.apache.flink.table.api.ValidationException import org.apache.flink.table.data.binary.BinaryArrayData import org.apache.flink.table.data.util.{DataFormatConverters, MapDataUtil} import org.apache.flink.table.data.writer.{BinaryArrayWriter, BinaryRowWriter} import org.apache.flink.table.planner.codegen.CodeGenUtils.{binaryRowFieldSetAccess, binaryRowSetNull, binaryWriterWriteField, binaryWriterWriteNull, _} import org.apache.flink.table.planner.codegen.GenerateUtils._ import org.apache.flink.table.planner.codegen.GeneratedExpression.{ALWAYS_NULL, NEVER_NULL, NO_CODE} import org.apache.flink.table.planner.codegen.{CodeGenException, CodeGeneratorContext, GeneratedExpression} import org.apache.flink.table.planner.utils.JavaScalaConversionUtil.toScala import org.apache.flink.table.runtime.functions.SqlFunctionUtils import org.apache.flink.table.runtime.types.LogicalTypeDataTypeConverter.fromLogicalTypeToDataType import org.apache.flink.table.runtime.types.PlannerTypeUtils import org.apache.flink.table.runtime.types.PlannerTypeUtils.{isInteroperable, isPrimitive} import org.apache.flink.table.runtime.typeutils.TypeCheckUtils import org.apache.flink.table.runtime.typeutils.TypeCheckUtils._ import org.apache.flink.table.types.logical.LogicalTypeFamily.DATETIME import org.apache.flink.table.types.logical.LogicalTypeRoot._ import org.apache.flink.table.types.logical._ import org.apache.flink.table.types.logical.utils.LogicalTypeCasts.supportsExplicitCast import org.apache.flink.table.types.logical.utils.LogicalTypeChecks.getFieldTypes import org.apache.flink.table.types.logical.utils.LogicalTypeMerging.findCommonType import org.apache.flink.util.Preconditions.checkArgument import org.apache.calcite.avatica.util.DateTimeUtils.MILLIS_PER_DAY import org.apache.calcite.avatica.util.{DateTimeUtils, TimeUnitRange} import org.apache.calcite.util.BuiltInMethod import java.lang.{StringBuilder => JStringBuilder} import java.nio.charset.StandardCharsets import java.util.Arrays.asList import scala.collection.JavaConversions._ /** * Utilities to generate SQL scalar operators, e.g. arithmetic operator, * compare operator, equal operator, etc. */ object ScalarOperatorGens { // ---------------------------------------------------------------------------------------- // scalar operators generate utils // ---------------------------------------------------------------------------------------- /** * Generates a binary arithmetic operator, e.g. + - * / % */ def generateBinaryArithmeticOperator( ctx: CodeGeneratorContext, operator: String, resultType: LogicalType, left: GeneratedExpression, right: GeneratedExpression) : GeneratedExpression = { resultType match { case dt: DecimalType => return generateDecimalBinaryArithmeticOperator(ctx, operator, dt, left, right) case _ => } val leftCasting = operator match { case "%" => if (isInteroperable(left.resultType, right.resultType)) { numericCasting(left.resultType, resultType) } else { val castedType = if (isDecimal(left.resultType)) { new BigIntType() } else { left.resultType } numericCasting(left.resultType, castedType) } case _ => numericCasting(left.resultType, resultType) } val rightCasting = numericCasting(right.resultType, resultType) val resultTypeTerm = primitiveTypeTermForType(resultType) generateOperatorIfNotNull(ctx, resultType, left, right) { (leftTerm, rightTerm) => s"($resultTypeTerm) (${leftCasting(leftTerm)} $operator ${rightCasting(rightTerm)})" } } /** * Generates a binary arithmetic operator for Decimal, e.g. + - * / % */ private def generateDecimalBinaryArithmeticOperator( ctx: CodeGeneratorContext, operator: String, resultType: DecimalType, left: GeneratedExpression, right: GeneratedExpression) : GeneratedExpression = { // do not cast a decimal operand to resultType, which may change its value. // use it as is during calculation. def castToDec(t: LogicalType): String => String = t match { case _: DecimalType => (operandTerm: String) => s"$operandTerm" case _ => numericCasting(t, resultType) } val methods = Map( "+" -> "add", "-" -> "subtract", "*" -> "multiply", "/" -> "divide", "%" -> "mod") generateOperatorIfNotNull(ctx, resultType, left, right) { (leftTerm, rightTerm) => { val method = methods(operator) val leftCasted = castToDec(left.resultType)(leftTerm) val rightCasted = castToDec(right.resultType)(rightTerm) val precision = resultType.getPrecision val scale = resultType.getScale s"$DECIMAL_UTIL.$method($leftCasted, $rightCasted, $precision, $scale)" } } } /** * Generates an unary arithmetic operator, e.g. -num */ def generateUnaryArithmeticOperator( ctx: CodeGeneratorContext, operator: String, resultType: LogicalType, operand: GeneratedExpression) : GeneratedExpression = { generateUnaryOperatorIfNotNull(ctx, resultType, operand) { operandTerm => if (isDecimal(operand.resultType) && operator == "-") { s"$DECIMAL_UTIL.negate($operandTerm)" } else if (isDecimal(operand.resultType) && operator == "+") { s"$operandTerm" } else { s"$operator($operandTerm)" } } } def generateTemporalPlusMinus( ctx: CodeGeneratorContext, plus: Boolean, resultType: LogicalType, left: GeneratedExpression, right: GeneratedExpression) : GeneratedExpression = { val op = if (plus) "+" else "-" (left.resultType.getTypeRoot, right.resultType.getTypeRoot) match { // arithmetic of time point and time interval case (INTERVAL_YEAR_MONTH, INTERVAL_YEAR_MONTH) | (INTERVAL_DAY_TIME, INTERVAL_DAY_TIME) => generateBinaryArithmeticOperator(ctx, op, left.resultType, left, right) case (DATE, INTERVAL_DAY_TIME) => resultType.getTypeRoot match { case DATE => generateOperatorIfNotNull(ctx, new DateType(), left, right) { (l, r) => s"$l $op (java.lang.Math.toIntExact($r / ${MILLIS_PER_DAY}L))" } case TIMESTAMP_WITHOUT_TIME_ZONE => generateOperatorIfNotNull(ctx, resultType, left, right) { (l, r) => s"$TIMESTAMP_DATA.fromEpochMillis(($l * ${MILLIS_PER_DAY}L) $op $r)" } } case (DATE, INTERVAL_YEAR_MONTH) => generateOperatorIfNotNull(ctx, new DateType(), left, right) { (l, r) => s"${qualifyMethod(BuiltInMethods.ADD_MONTHS)}($l, $op($r))" } case (TIME_WITHOUT_TIME_ZONE, INTERVAL_DAY_TIME) => generateOperatorIfNotNull(ctx, new TimeType(), left, right) { (l, r) => s"java.lang.Math.toIntExact((($l + ${MILLIS_PER_DAY}L) $op (" + s"java.lang.Math.toIntExact($r % ${MILLIS_PER_DAY}L))) % ${MILLIS_PER_DAY}L)" } case (TIME_WITHOUT_TIME_ZONE, INTERVAL_YEAR_MONTH) => generateOperatorIfNotNull(ctx, new TimeType(), left, right) { (l, r) => s"$l" } case (TIMESTAMP_WITHOUT_TIME_ZONE | TIMESTAMP_WITH_LOCAL_TIME_ZONE, INTERVAL_DAY_TIME) => generateOperatorIfNotNull(ctx, left.resultType, left, right) { (l, r) => { val leftTerm = s"$l.getMillisecond()" val nanoTerm = s"$l.getNanoOfMillisecond()" s"$TIMESTAMP_DATA.fromEpochMillis($leftTerm $op $r, $nanoTerm)" } } case (TIMESTAMP_WITHOUT_TIME_ZONE | TIMESTAMP_WITH_LOCAL_TIME_ZONE, INTERVAL_YEAR_MONTH) => generateOperatorIfNotNull(ctx, left.resultType, left, right) { (l, r) => { val leftTerm = s"$l.getMillisecond()" val nanoTerm = s"$l.getNanoOfMillisecond()" s""" |$TIMESTAMP_DATA.fromEpochMillis( | ${qualifyMethod(BuiltInMethods.ADD_MONTHS)}($leftTerm, $op($r)), | $nanoTerm) """.stripMargin } } // minus arithmetic of time points (i.e. for TIMESTAMPDIFF) case (TIMESTAMP_WITHOUT_TIME_ZONE | TIME_WITHOUT_TIME_ZONE | DATE, TIMESTAMP_WITHOUT_TIME_ZONE | TIME_WITHOUT_TIME_ZONE | DATE) if !plus => resultType.getTypeRoot match { case INTERVAL_YEAR_MONTH => generateOperatorIfNotNull(ctx, resultType, left, right) { (ll, rr) => (left.resultType.getTypeRoot, right.resultType.getTypeRoot) match { case (TIMESTAMP_WITHOUT_TIME_ZONE, DATE) => val leftTerm = s"$ll.getMillisecond()" s"${qualifyMethod(BuiltInMethods.SUBTRACT_MONTHS)}" + s"($leftTerm, $rr * ${MILLIS_PER_DAY}L)" case (DATE, TIMESTAMP_WITHOUT_TIME_ZONE) => val rightTerm = s"$rr.getMillisecond()" s"${qualifyMethod(BuiltInMethods.SUBTRACT_MONTHS)}" + s"($ll * ${MILLIS_PER_DAY}L, $rightTerm)" case (TIMESTAMP_WITHOUT_TIME_ZONE, TIMESTAMP_WITHOUT_TIME_ZONE) => val leftTerm = s"$ll.getMillisecond()" val rightTerm = s"$rr.getMillisecond()" s"${qualifyMethod(BuiltInMethods.SUBTRACT_MONTHS)}($leftTerm, $rightTerm)" case (TIMESTAMP_WITHOUT_TIME_ZONE, TIME_WITHOUT_TIME_ZONE) => val leftTerm = s"$ll.getMillisecond()" s"${qualifyMethod(BuiltInMethods.SUBTRACT_MONTHS)}($leftTerm, $rr)" case (TIME_WITHOUT_TIME_ZONE, TIMESTAMP_WITHOUT_TIME_ZONE) => val rightTerm = s"$rr.getMillisecond()" s"${qualifyMethod(BuiltInMethods.SUBTRACT_MONTHS)}($ll, $rightTerm)" case _ => s"${qualifyMethod(BuiltInMethods.SUBTRACT_MONTHS)}($ll, $rr)" } } case INTERVAL_DAY_TIME => generateOperatorIfNotNull(ctx, resultType, left, right) { (ll, rr) => (left.resultType.getTypeRoot, right.resultType.getTypeRoot) match { case (TIMESTAMP_WITHOUT_TIME_ZONE, TIMESTAMP_WITHOUT_TIME_ZONE) => val leftTerm = s"$ll.getMillisecond()" val rightTerm = s"$rr.getMillisecond()" s"$leftTerm $op $rightTerm" case (DATE, DATE) => s"($ll * ${MILLIS_PER_DAY}L) $op ($rr * ${MILLIS_PER_DAY}L)" case (TIMESTAMP_WITHOUT_TIME_ZONE, DATE) => val leftTerm = s"$ll.getMillisecond()" s"$leftTerm $op ($rr * ${MILLIS_PER_DAY}L)" case (DATE, TIMESTAMP_WITHOUT_TIME_ZONE) => val rightTerm = s"$rr.getMillisecond()" s"($ll * ${MILLIS_PER_DAY}L) $op $rightTerm" } } } // minus arithmetic of time points (i.e. for TIMESTAMPDIFF for TIMESTAMP_LTZ) case (TIMESTAMP_WITH_LOCAL_TIME_ZONE, t) if t.getFamilies.contains(DATETIME) && !plus => generateTimestampLtzMinus(ctx, resultType, left, right) case (t, TIMESTAMP_WITH_LOCAL_TIME_ZONE) if t.getFamilies.contains(DATETIME) && !plus => generateTimestampLtzMinus(ctx, resultType, left, right) case _ => throw new CodeGenException("Unsupported temporal arithmetic.") } } private def generateTimestampLtzMinus( ctx: CodeGeneratorContext, resultType: LogicalType, left: GeneratedExpression, right: GeneratedExpression) : GeneratedExpression = { resultType.getTypeRoot match { case INTERVAL_YEAR_MONTH => generateOperatorIfNotNull(ctx, resultType, left, right) { (ll, rr) => (left.resultType.getTypeRoot, right.resultType.getTypeRoot) match { case (TIMESTAMP_WITH_LOCAL_TIME_ZONE, TIMESTAMP_WITH_LOCAL_TIME_ZONE) => val leftTerm = s"$ll.getMillisecond()" val rightTerm = s"$rr.getMillisecond()" s"${qualifyMethod(BuiltInMethods.SUBTRACT_MONTHS)}($leftTerm, $rightTerm)" case _ => throw new CodeGenException( "TIMESTAMP_LTZ only supports diff between the same type.") } } case INTERVAL_DAY_TIME => generateOperatorIfNotNull(ctx, resultType, left, right) { (ll, rr) => (left.resultType.getTypeRoot, right.resultType.getTypeRoot) match { case (TIMESTAMP_WITH_LOCAL_TIME_ZONE, TIMESTAMP_WITH_LOCAL_TIME_ZONE) => val leftTerm = s"$ll.getMillisecond()" val rightTerm = s"$rr.getMillisecond()" s"$leftTerm - $rightTerm" case _ => throw new CodeGenException( "TIMESTAMP_LTZ only supports diff between the same type.") } } case _ => throw new CodeGenException("Unsupported temporal arithmetic.") } } def generateUnaryIntervalPlusMinus( ctx: CodeGeneratorContext, plus: Boolean, operand: GeneratedExpression) : GeneratedExpression = { val operator = if (plus) "+" else "-" generateUnaryArithmeticOperator(ctx, operator, operand.resultType, operand) } // ---------------------------------------------------------------------------------------- // scalar expression generate utils // ---------------------------------------------------------------------------------------- /** * Generates IN expression using a HashSet */ def generateIn( ctx: CodeGeneratorContext, needle: GeneratedExpression, haystack: Seq[GeneratedExpression]) : GeneratedExpression = { // add elements to hash set if they are constant if (haystack.forall(_.literal)) { // determine common numeric type val widerType = toScala(findCommonType(asList(needle.resultType, haystack.head.resultType))) .orElse(throw new CodeGenException(s"Unable to find common type of $needle and $haystack.")) // we need to normalize the values for the hash set val castNumeric = widerType match { case Some(t) => (value: GeneratedExpression) => numericCasting(value.resultType, t)(value.resultTerm) case None => (value: GeneratedExpression) => value.resultTerm } val resultType = widerType match { case Some(t) => t case None => needle.resultType } val elements = haystack.map { element => element.copy( castNumeric(element), // cast element to wider type element.nullTerm, element.code, resultType) } val setTerm = ctx.addReusableHashSet(elements, resultType) val castedNeedle = needle.copy( castNumeric(needle), // cast needle to wider type needle.nullTerm, needle.code, resultType) val Seq(resultTerm, nullTerm) = newNames("result", "isNull") val resultTypeTerm = primitiveTypeTermForType(new BooleanType()) val defaultValue = primitiveDefaultValue(new BooleanType()) val operatorCode = if (ctx.nullCheck) { s""" |${castedNeedle.code} |$resultTypeTerm $resultTerm = $defaultValue; |boolean $nullTerm = true; |if (!${castedNeedle.nullTerm}) { | $resultTerm = $setTerm.contains(${castedNeedle.resultTerm}); | $nullTerm = !$resultTerm && $setTerm.containsNull(); |} |""".stripMargin.trim } else { s""" |${castedNeedle.code} |$resultTypeTerm $resultTerm = $setTerm.contains(${castedNeedle.resultTerm}); |""".stripMargin.trim } GeneratedExpression(resultTerm, nullTerm, operatorCode, new BooleanType()) } else { // we use a chain of ORs for a set that contains non-constant elements haystack .map(generateEquals(ctx, needle, _)) .reduce((left, right) => generateOr(ctx, left, right) ) } } def generateEquals( ctx: CodeGeneratorContext, left: GeneratedExpression, right: GeneratedExpression) : GeneratedExpression = { val canEqual = isInteroperable(left.resultType, right.resultType) if (isCharacterString(left.resultType) && isCharacterString(right.resultType)) { generateOperatorIfNotNull(ctx, new BooleanType(), left, right) { (leftTerm, rightTerm) => s"$leftTerm.equals($rightTerm)" } } // numeric types else if (isNumeric(left.resultType) && isNumeric(right.resultType)) { generateComparison(ctx, "==", left, right) } // array types else if (isArray(left.resultType) && canEqual) { generateArrayComparison(ctx, left, right) } // map types else if (isMap(left.resultType) && canEqual) { val mapType = left.resultType.asInstanceOf[MapType] generateMapComparison(ctx, left, right, mapType.getKeyType, mapType.getValueType) } // multiset types else if (isMultiset(left.resultType) && canEqual) { val multisetType = left.resultType.asInstanceOf[MultisetType] generateMapComparison(ctx, left, right, multisetType.getElementType, new IntType(false)) } // comparable types of same type else if (isComparable(left.resultType) && canEqual) { generateComparison(ctx, "==", left, right) } // generic types of same type else if (isRaw(left.resultType) && canEqual) { val Seq(resultTerm, nullTerm) = newNames("result", "isNull") val genericSer = ctx.addReusableTypeSerializer(left.resultType) val ser = s"$genericSer.getInnerSerializer()" val resultType = new BooleanType() val code = s""" |${left.code} |${right.code} |boolean $nullTerm = ${left.nullTerm} || ${right.nullTerm}; |boolean $resultTerm = ${primitiveDefaultValue(resultType)}; |if (!$nullTerm) { | ${left.resultTerm}.ensureMaterialized($ser); | ${right.resultTerm}.ensureMaterialized($ser); | $resultTerm = | ${left.resultTerm}.getBinarySection().equals(${right.resultTerm}.getBinarySection()); |} |""".stripMargin GeneratedExpression(resultTerm, nullTerm, code, resultType) } // support date/time/timestamp equalTo string. // for performance, we cast literal string to literal time. else if (isTimePoint(left.resultType) && isCharacterString(right.resultType)) { if (right.literal) { generateEquals(ctx, left, generateCastStringLiteralToDateTime(ctx, right, left.resultType)) } else { generateEquals(ctx, left, generateCast(ctx, right, left.resultType)) } } else if (isTimePoint(right.resultType) && isCharacterString(left.resultType)) { if (left.literal) { generateEquals( ctx, generateCastStringLiteralToDateTime(ctx, left, right.resultType), right) } else { generateEquals(ctx, generateCast(ctx, left, right.resultType), right) } } // non comparable types else { generateOperatorIfNotNull(ctx, new BooleanType(), left, right) { if (isReference(left.resultType)) { (leftTerm, rightTerm) => s"$leftTerm.equals($rightTerm)" } else if (isReference(right.resultType)) { (leftTerm, rightTerm) => s"$rightTerm.equals($leftTerm)" } else { throw new CodeGenException(s"Incomparable types: ${left.resultType} and " + s"${right.resultType}") } } } } def generateIsNotDistinctFrom( ctx: CodeGeneratorContext, left: GeneratedExpression, right: GeneratedExpression) : GeneratedExpression = { generateOr(ctx, generateAnd(ctx, generateIsNull(ctx, left), generateIsNull(ctx, right)), generateEquals(ctx, left, right)) } def generateNotEquals( ctx: CodeGeneratorContext, left: GeneratedExpression, right: GeneratedExpression) : GeneratedExpression = { if (isCharacterString(left.resultType) && isCharacterString(right.resultType)) { generateOperatorIfNotNull(ctx, new BooleanType(), left, right) { (leftTerm, rightTerm) => s"!$leftTerm.equals($rightTerm)" } } // numeric types else if (isNumeric(left.resultType) && isNumeric(right.resultType)) { generateComparison(ctx, "!=", left, right) } // temporal types else if (isTemporal(left.resultType) && isInteroperable(left.resultType, right.resultType)) { generateComparison(ctx, "!=", left, right) } // array types else if (isArray(left.resultType) && isInteroperable(left.resultType, right.resultType)) { val equalsExpr = generateEquals(ctx, left, right) GeneratedExpression( s"(!${equalsExpr.resultTerm})", equalsExpr.nullTerm, equalsExpr.code, new BooleanType()) } // map types else if (isMap(left.resultType) && isInteroperable(left.resultType, right.resultType)) { val equalsExpr = generateEquals(ctx, left, right) GeneratedExpression( s"(!${equalsExpr.resultTerm})", equalsExpr.nullTerm, equalsExpr.code, new BooleanType()) } // comparable types else if (isComparable(left.resultType) && isInteroperable(left.resultType, right.resultType)) { generateComparison(ctx, "!=", left, right) } // non-comparable types else { generateOperatorIfNotNull(ctx, new BooleanType(), left, right) { if (isReference(left.resultType)) { (leftTerm, rightTerm) => s"!($leftTerm.equals($rightTerm))" } else if (isReference(right.resultType)) { (leftTerm, rightTerm) => s"!($rightTerm.equals($leftTerm))" } else { throw new CodeGenException(s"Incomparable types: ${left.resultType} and " + s"${right.resultType}") } } } } /** * Generates comparison code for numeric types and comparable types of same type. */ def generateComparison( ctx: CodeGeneratorContext, operator: String, left: GeneratedExpression, right: GeneratedExpression) : GeneratedExpression = { generateOperatorIfNotNull(ctx, new BooleanType(), left, right) { // either side is decimal if (isDecimal(left.resultType) || isDecimal(right.resultType)) { (leftTerm, rightTerm) => { s"$DECIMAL_UTIL.compare($leftTerm, $rightTerm) $operator 0" } } // both sides are numeric else if (isNumeric(left.resultType) && isNumeric(right.resultType)) { (leftTerm, rightTerm) => s"$leftTerm $operator $rightTerm" } // both sides are timestamp else if (isTimestamp(left.resultType) && isTimestamp(right.resultType)) { (leftTerm, rightTerm) => s"$leftTerm.compareTo($rightTerm) $operator 0" } // both sides are timestamp with local zone else if (isTimestampWithLocalZone(left.resultType) && isTimestampWithLocalZone(right.resultType)) { (leftTerm, rightTerm) => s"$leftTerm.compareTo($rightTerm) $operator 0" } // both sides are temporal of same type else if (isTemporal(left.resultType) && isInteroperable(left.resultType, right.resultType)) { (leftTerm, rightTerm) => s"$leftTerm $operator $rightTerm" } // both sides are boolean else if (isBoolean(left.resultType) && isInteroperable(left.resultType, right.resultType)) { operator match { case "==" | "!=" => (leftTerm, rightTerm) => s"$leftTerm $operator $rightTerm" case ">" | "<" | "<=" | ">=" => (leftTerm, rightTerm) => s"java.lang.Boolean.compare($leftTerm, $rightTerm) $operator 0" case _ => throw new CodeGenException(s"Unsupported boolean comparison '$operator'.") } } // both sides are binary type else if (isBinaryString(left.resultType) && isInteroperable(left.resultType, right.resultType)) { val utilName = classOf[SqlFunctionUtils].getCanonicalName (leftTerm, rightTerm) => s"$utilName.byteArrayCompare($leftTerm, $rightTerm) $operator 0" } // both sides are same comparable type else if (isComparable(left.resultType) && isInteroperable(left.resultType, right.resultType)) { (leftTerm, rightTerm) => s"(($leftTerm == null) ? (($rightTerm == null) ? 0 : -1) : (($rightTerm == null) ? " + s"1 : ($leftTerm.compareTo($rightTerm)))) $operator 0" } else { throw new CodeGenException(s"Incomparable types: ${left.resultType} and " + s"${right.resultType}") } } } def generateIsNull( ctx: CodeGeneratorContext, operand: GeneratedExpression): GeneratedExpression = { if (ctx.nullCheck) { GeneratedExpression(operand.nullTerm, NEVER_NULL, operand.code, new BooleanType(false)) } else if (!ctx.nullCheck && isReference(operand.resultType)) { val resultTerm = newName("isNull") val operatorCode = s""" |${operand.code} |boolean $resultTerm = ${operand.resultTerm} == null; |""".stripMargin GeneratedExpression(resultTerm, NEVER_NULL, operatorCode, new BooleanType(false)) } else { GeneratedExpression("false", NEVER_NULL, operand.code, new BooleanType(false)) } } def generateIsNotNull( ctx: CodeGeneratorContext, operand: GeneratedExpression): GeneratedExpression = { if (ctx.nullCheck) { val resultTerm = newName("result") val operatorCode = s""" |${operand.code} |boolean $resultTerm = !${operand.nullTerm}; |""".stripMargin.trim GeneratedExpression(resultTerm, NEVER_NULL, operatorCode, new BooleanType(false)) } else if (!ctx.nullCheck && isReference(operand.resultType)) { val resultTerm = newName("result") val operatorCode = s""" |${operand.code} |boolean $resultTerm = ${operand.resultTerm} != null; |""".stripMargin.trim GeneratedExpression(resultTerm, NEVER_NULL, operatorCode, new BooleanType(false)) } else { GeneratedExpression("true", NEVER_NULL, operand.code, new BooleanType(false)) } } def generateAnd( ctx: CodeGeneratorContext, left: GeneratedExpression, right: GeneratedExpression): GeneratedExpression = { val Seq(resultTerm, nullTerm) = newNames("result", "isNull") val operatorCode = if (ctx.nullCheck) { // Three-valued logic: // no Unknown -> Two-valued logic // True && Unknown -> Unknown // False && Unknown -> False // Unknown && True -> Unknown // Unknown && False -> False // Unknown && Unknown -> Unknown s""" |${left.code} | |boolean $resultTerm = false; |boolean $nullTerm = false; |if (!${left.nullTerm} && !${left.resultTerm}) { | // left expr is false, skip right expr |} else { | ${right.code} | | if (!${left.nullTerm} && !${right.nullTerm}) { | $resultTerm = ${left.resultTerm} && ${right.resultTerm}; | $nullTerm = false; | } | else if (!${left.nullTerm} && ${left.resultTerm} && ${right.nullTerm}) { | $resultTerm = false; | $nullTerm = true; | } | else if (!${left.nullTerm} && !${left.resultTerm} && ${right.nullTerm}) { | $resultTerm = false; | $nullTerm = false; | } | else if (${left.nullTerm} && !${right.nullTerm} && ${right.resultTerm}) { | $resultTerm = false; | $nullTerm = true; | } | else if (${left.nullTerm} && !${right.nullTerm} && !${right.resultTerm}) { | $resultTerm = false; | $nullTerm = false; | } | else { | $resultTerm = false; | $nullTerm = true; | } |} """.stripMargin.trim } else { s""" |${left.code} |boolean $resultTerm = false; |if (${left.resultTerm}) { | ${right.code} | $resultTerm = ${right.resultTerm}; |} |""".stripMargin.trim } GeneratedExpression(resultTerm, nullTerm, operatorCode, new BooleanType()) } def generateOr( ctx: CodeGeneratorContext, left: GeneratedExpression, right: GeneratedExpression): GeneratedExpression = { val Seq(resultTerm, nullTerm) = newNames("result", "isNull") val operatorCode = if (ctx.nullCheck) { // Three-valued logic: // no Unknown -> Two-valued logic // True || Unknown -> True // False || Unknown -> Unknown // Unknown || True -> True // Unknown || False -> Unknown // Unknown || Unknown -> Unknown s""" |${left.code} | |boolean $resultTerm = true; |boolean $nullTerm = false; |if (!${left.nullTerm} && ${left.resultTerm}) { | // left expr is true, skip right expr |} else { | ${right.code} | | if (!${left.nullTerm} && !${right.nullTerm}) { | $resultTerm = ${left.resultTerm} || ${right.resultTerm}; | $nullTerm = false; | } | else if (!${left.nullTerm} && ${left.resultTerm} && ${right.nullTerm}) { | $resultTerm = true; | $nullTerm = false; | } | else if (!${left.nullTerm} && !${left.resultTerm} && ${right.nullTerm}) { | $resultTerm = false; | $nullTerm = true; | } | else if (${left.nullTerm} && !${right.nullTerm} && ${right.resultTerm}) { | $resultTerm = true; | $nullTerm = false; | } | else if (${left.nullTerm} && !${right.nullTerm} && !${right.resultTerm}) { | $resultTerm = false; | $nullTerm = true; | } | else { | $resultTerm = false; | $nullTerm = true; | } |} |""".stripMargin.trim } else { s""" |${left.code} |boolean $resultTerm = true; |if (!${left.resultTerm}) { | ${right.code} | $resultTerm = ${right.resultTerm}; |} |""".stripMargin.trim } GeneratedExpression(resultTerm, nullTerm, operatorCode, new BooleanType()) } def generateNot( ctx: CodeGeneratorContext, operand: GeneratedExpression) : GeneratedExpression = { // Three-valued logic: // no Unknown -> Two-valued logic // Unknown -> Unknown generateUnaryOperatorIfNotNull(ctx, new BooleanType(), operand) { operandTerm => s"!($operandTerm)" } } def generateIsTrue(operand: GeneratedExpression): GeneratedExpression = { GeneratedExpression( operand.resultTerm, // unknown is always false by default GeneratedExpression.NEVER_NULL, operand.code, new BooleanType()) } def generateIsNotTrue(operand: GeneratedExpression): GeneratedExpression = { GeneratedExpression( s"(!${operand.resultTerm})", // unknown is always false by default GeneratedExpression.NEVER_NULL, operand.code, new BooleanType()) } def generateIsFalse(operand: GeneratedExpression): GeneratedExpression = { GeneratedExpression( s"(!${operand.resultTerm} && !${operand.nullTerm})", GeneratedExpression.NEVER_NULL, operand.code, new BooleanType()) } def generateIsNotFalse(operand: GeneratedExpression): GeneratedExpression = { GeneratedExpression( s"(${operand.resultTerm} || ${operand.nullTerm})", GeneratedExpression.NEVER_NULL, operand.code, new BooleanType()) } def generateReinterpret( ctx: CodeGeneratorContext, operand: GeneratedExpression, targetType: LogicalType) : GeneratedExpression = (operand.resultType.getTypeRoot, targetType.getTypeRoot) match { case (_, _) if isInteroperable(operand.resultType, targetType) => operand.copy(resultType = targetType) // internal reinterpretation of temporal types // Date -> Integer // Time -> Integer // Timestamp -> Long // Integer -> Date // Integer -> Time // Long -> Timestamp // Integer -> Interval Months // Long -> Interval Millis // Interval Months -> Integer // Interval Millis -> Long // Date -> Long // Time -> Long // Interval Months -> Long case (DATE, INTEGER) | (TIME_WITHOUT_TIME_ZONE, INTEGER) | (INTEGER, DATE) | (INTEGER, TIME_WITHOUT_TIME_ZONE) | (INTEGER, INTERVAL_YEAR_MONTH) | (BIGINT, INTERVAL_DAY_TIME) | (INTERVAL_YEAR_MONTH, INTEGER) | (INTERVAL_DAY_TIME, BIGINT) | (DATE, BIGINT) | (TIME_WITHOUT_TIME_ZONE, BIGINT) | (INTERVAL_YEAR_MONTH, BIGINT) => internalExprCasting(operand, targetType) case (TIMESTAMP_WITHOUT_TIME_ZONE, BIGINT) => generateUnaryOperatorIfNotNull(ctx, targetType, operand) { operandTerm => s"$operandTerm.getMillisecond()" } case (BIGINT, TIMESTAMP_WITHOUT_TIME_ZONE) => generateUnaryOperatorIfNotNull(ctx, targetType, operand) { operandTerm => s"$TIMESTAMP_DATA.fromEpochMillis($operandTerm)" } case (from, to) => if (from == to) { operand } else { throw new CodeGenException(s"Unsupported reinterpret from '$from' to '$to'.") } } def generateCast( ctx: CodeGeneratorContext, operand: GeneratedExpression, targetType: LogicalType) : GeneratedExpression = (operand.resultType.getTypeRoot, targetType.getTypeRoot) match { // special case: cast from TimeIndicatorTypeInfo to SqlTimeTypeInfo case (TIMESTAMP_WITHOUT_TIME_ZONE, TIMESTAMP_WITHOUT_TIME_ZONE) if operand.resultType.asInstanceOf[TimestampType].getKind == TimestampKind.PROCTIME || operand.resultType.asInstanceOf[TimestampType].getKind == TimestampKind.ROWTIME || targetType.asInstanceOf[TimestampType].getKind == TimestampKind.PROCTIME || targetType.asInstanceOf[TimestampType].getKind == TimestampKind.ROWTIME => // just replace the DataType operand.copy(resultType = new TimestampType(operand.resultType.isNullable, 3)) case (TIMESTAMP_WITHOUT_TIME_ZONE, TIMESTAMP_WITHOUT_TIME_ZONE) => val fromType = operand.resultType.asInstanceOf[TimestampType] val toType = targetType.asInstanceOf[TimestampType] if (fromType.getPrecision <= toType.getPrecision) { operand.copy(resultType = targetType) } else { val method = qualifyMethod(BuiltInMethods.TRUNCATE_SQL_TIMESTAMP) generateUnaryOperatorIfNotNull(ctx, targetType, operand) { operandTerm => s"$method($operandTerm, ${toType.getPrecision})" } } case (TIMESTAMP_WITHOUT_TIME_ZONE, TIMESTAMP_WITH_LOCAL_TIME_ZONE) => val fromType = operand.resultType.asInstanceOf[TimestampType] val toType = targetType.asInstanceOf[LocalZonedTimestampType] val method = qualifyMethod(BuiltInMethods.TIMESTAMP_TO_TIMESTAMP_WITH_LOCAL_ZONE) if (fromType.getPrecision < toType.getPrecision) { generateUnaryOperatorIfNotNull(ctx, targetType, operand) { operandTerm => val timeZone = ctx.addReusableSessionTimeZone() s"$method($operandTerm, $timeZone)" } } else { val truncate_method = qualifyMethod(BuiltInMethods.TRUNCATE_SQL_TIMESTAMP) generateUnaryOperatorIfNotNull(ctx, targetType, operand) { operandTerm => val timeZone = ctx.addReusableSessionTimeZone() s"$truncate_method($method($operandTerm, $timeZone), ${toType.getPrecision})" } } case (TIMESTAMP_WITH_LOCAL_TIME_ZONE, TIMESTAMP_WITHOUT_TIME_ZONE) => val fromType = operand.resultType.asInstanceOf[LocalZonedTimestampType] val toType = targetType.asInstanceOf[TimestampType] val method = qualifyMethod(BuiltInMethods.TIMESTAMP_WITH_LOCAL_ZONE_TO_TIMESTAMP) if (fromType.getPrecision < toType.getPrecision) { generateUnaryOperatorIfNotNull(ctx, targetType, operand) { operandTerm => val zone = ctx.addReusableSessionTimeZone() s"$method($operandTerm, $zone)" } } else { val truncate_method = qualifyMethod(BuiltInMethods.TRUNCATE_SQL_TIMESTAMP) generateUnaryOperatorIfNotNull(ctx, targetType, operand) { operandTerm => val zone = ctx.addReusableSessionTimeZone() s"$truncate_method($method($operandTerm, $zone), ${toType.getPrecision})" } } case (TIMESTAMP_WITH_LOCAL_TIME_ZONE, TIMESTAMP_WITH_LOCAL_TIME_ZONE) => val fromType = operand.resultType.asInstanceOf[LocalZonedTimestampType] val toType = targetType.asInstanceOf[LocalZonedTimestampType] if (fromType.getPrecision <= toType.getPrecision) { operand.copy(resultType = targetType) } else { val method = qualifyMethod(BuiltInMethods.TRUNCATE_SQL_TIMESTAMP) generateUnaryOperatorIfNotNull(ctx, targetType, operand) { operandTerm => s"$method($operandTerm, ${toType.getPrecision})" } } // identity casting case (_, _) if isInteroperable(operand.resultType, targetType) => operand.copy(resultType = targetType) // Date/Time/Timestamp -> String case (_, VARCHAR | CHAR) if TypeCheckUtils.isTimePoint(operand.resultType) => generateStringResultCallIfArgsNotNull(ctx, Seq(operand), targetType) { operandTerm => s"${localTimeToStringCode(ctx, operand.resultType, operandTerm.head)}" } // Interval Months -> String case (INTERVAL_YEAR_MONTH, VARCHAR | CHAR) => val method = qualifyMethod(BuiltInMethod.INTERVAL_YEAR_MONTH_TO_STRING.method) val timeUnitRange = qualifyEnum(TimeUnitRange.YEAR_TO_MONTH) generateStringResultCallIfArgsNotNull(ctx, Seq(operand), targetType) { terms => s"$method(${terms.head}, $timeUnitRange)" } // Interval Millis -> String case (INTERVAL_DAY_TIME, VARCHAR | CHAR) => val method = qualifyMethod(BuiltInMethod.INTERVAL_DAY_TIME_TO_STRING.method) val timeUnitRange = qualifyEnum(TimeUnitRange.DAY_TO_SECOND) generateStringResultCallIfArgsNotNull(ctx, Seq(operand), targetType) { terms => s"$method(${terms.head}, $timeUnitRange, 3)" // milli second precision } // Array -> String case (ARRAY, VARCHAR | CHAR) => generateCastArrayToString( ctx, operand, operand.resultType.asInstanceOf[ArrayType], targetType) // Byte array -> String UTF-8 case (BINARY | VARBINARY, VARCHAR | CHAR) => val charset = classOf[StandardCharsets].getCanonicalName generateStringResultCallIfArgsNotNull(ctx, Seq(operand), targetType) { terms => s"(new String(${terms.head}, $charset.UTF_8))" } // Map -> String case (MAP, VARCHAR | CHAR) => generateCastMapToString( ctx, operand, operand.resultType.asInstanceOf[MapType], targetType) // composite type -> String case (ROW, VARCHAR | CHAR) => generateCastRowDataToString( ctx, operand, operand.resultType.asInstanceOf[RowType], targetType) case (RAW, VARCHAR | CHAR) => generateStringResultCallIfArgsNotNull(ctx, Seq(operand), targetType) { terms => val converter = DataFormatConverters.getConverterForDataType( fromLogicalTypeToDataType(operand.resultType)) val converterTerm = ctx.addReusableObject(converter, "converter") s""" "" + $converterTerm.toExternal(${terms.head})""" } // * (not Date/Time/Timestamp) -> String // TODO: GenericType with Date/Time/Timestamp -> String would call toString implicitly case (_, VARCHAR | CHAR) => generateStringResultCallIfArgsNotNull(ctx, Seq(operand), targetType) { terms => s""" "" + ${terms.head}""" } // String -> Boolean case (VARCHAR | CHAR, BOOLEAN) => generateUnaryOperatorIfNotNull( ctx, targetType, operand, resultNullable = true) { operandTerm => s"$BINARY_STRING_UTIL.toBooleanSQL($operandTerm)" } // String -> NUMERIC TYPE (not Character) case (VARCHAR | CHAR, _) if TypeCheckUtils.isNumeric(targetType) => targetType match { case dt: DecimalType => generateUnaryOperatorIfNotNull(ctx, targetType, operand) { operandTerm => s"$BINARY_STRING_UTIL.toDecimal($operandTerm, ${dt.getPrecision}, ${dt.getScale})" } case _ => val methodName = targetType.getTypeRoot match { case TINYINT => "toByte" case SMALLINT => "toShort" case INTEGER => "toInt" case BIGINT => "toLong" case DOUBLE => "toDouble" case FLOAT => "toFloat" case _ => null } assert(methodName != null, "Unexpected data type.") generateUnaryOperatorIfNotNull( ctx, targetType, operand, resultNullable = true) { operandTerm => s"($BINARY_STRING_UTIL.$methodName($operandTerm.trim()))" } } // String -> Date case (VARCHAR | CHAR, DATE) => generateUnaryOperatorIfNotNull( ctx, targetType, operand, resultNullable = true) { operandTerm => s"${qualifyMethod(BuiltInMethods.STRING_TO_DATE)}($operandTerm.toString())" } // String -> Time case (VARCHAR | CHAR, TIME_WITHOUT_TIME_ZONE) => generateUnaryOperatorIfNotNull( ctx, targetType, operand, resultNullable = true) { operandTerm => s"${qualifyMethod(BuiltInMethods.STRING_TO_TIME)}($operandTerm.toString())" } // String -> Timestamp case (VARCHAR | CHAR, TIMESTAMP_WITHOUT_TIME_ZONE) => generateUnaryOperatorIfNotNull( ctx, targetType, operand, resultNullable = true) { operandTerm => s""" |${qualifyMethod(BuiltInMethods.STRING_TO_TIMESTAMP)}($operandTerm.toString()) """.stripMargin } case (VARCHAR | CHAR, TIMESTAMP_WITH_LOCAL_TIME_ZONE) => generateUnaryOperatorIfNotNull( ctx, targetType, operand, resultNullable = true) { operandTerm => val zone = ctx.addReusableSessionTimeZone() val method = qualifyMethod(BuiltInMethods.STRING_TO_TIMESTAMP_TIME_ZONE) s"$TIMESTAMP_DATA.fromEpochMillis($method($operandTerm.toString(), $zone))" } // String -> binary case (VARCHAR | CHAR, VARBINARY | BINARY) => generateUnaryOperatorIfNotNull(ctx, targetType, operand) { operandTerm => s"$operandTerm.toBytes()" } // Note: SQL2003 $6.12 - casting is not allowed between boolean and numeric types. // Calcite does not allow it either. // Boolean -> DECIMAL case (BOOLEAN, DECIMAL) => val dt = targetType.asInstanceOf[DecimalType] generateUnaryOperatorIfNotNull(ctx, targetType, operand) { operandTerm => s"$DECIMAL_UTIL.castFrom($operandTerm, ${dt.getPrecision}, ${dt.getScale})" } // Boolean -> NUMERIC TYPE case (BOOLEAN, _) if TypeCheckUtils.isNumeric(targetType) => val targetTypeTerm = primitiveTypeTermForType(targetType) generateUnaryOperatorIfNotNull(ctx, targetType, operand) { operandTerm => s"($targetTypeTerm) ($operandTerm ? 1 : 0)" } // DECIMAL -> Boolean case (DECIMAL, BOOLEAN) => generateUnaryOperatorIfNotNull(ctx, targetType, operand) { operandTerm => s"$DECIMAL_UTIL.castToBoolean($operandTerm)" } // NUMERIC TYPE -> Boolean case (_, BOOLEAN) if isNumeric(operand.resultType) => generateUnaryOperatorIfNotNull(ctx, targetType, operand) { operandTerm => s"$operandTerm != 0" } // between NUMERIC TYPE | Decimal case (_, _) if isNumeric(operand.resultType) && isNumeric(targetType) => val operandCasting = numericCasting(operand.resultType, targetType) generateUnaryOperatorIfNotNull(ctx, targetType, operand) { operandTerm => s"${operandCasting(operandTerm)}" } // Date -> Timestamp case (DATE, TIMESTAMP_WITHOUT_TIME_ZONE) => generateUnaryOperatorIfNotNull(ctx, targetType, operand) { operandTerm => s""" |$TIMESTAMP_DATA.fromEpochMillis( | $operandTerm * ${classOf[DateTimeUtils].getCanonicalName}.MILLIS_PER_DAY) """.stripMargin } // Timestamp -> Date case (TIMESTAMP_WITHOUT_TIME_ZONE, DATE) => val targetTypeTerm = primitiveTypeTermForType(targetType) generateUnaryOperatorIfNotNull(ctx, targetType, operand) { operandTerm => s""" |($targetTypeTerm) ($operandTerm.getMillisecond() / | ${classOf[DateTimeUtils].getCanonicalName}.MILLIS_PER_DAY) """.stripMargin } // Time -> Timestamp case (TIME_WITHOUT_TIME_ZONE, TIMESTAMP_WITHOUT_TIME_ZONE) => generateUnaryOperatorIfNotNull(ctx, targetType, operand) { operandTerm => s"$TIMESTAMP_DATA.fromEpochMillis($operandTerm)" } // Timestamp -> Time case (TIMESTAMP_WITHOUT_TIME_ZONE, TIME_WITHOUT_TIME_ZONE) => val targetTypeTerm = primitiveTypeTermForType(targetType) generateUnaryOperatorIfNotNull(ctx, targetType, operand) { operandTerm => s"($targetTypeTerm) ($operandTerm.getMillisecond() % " + s"${classOf[DateTimeUtils].getCanonicalName}.MILLIS_PER_DAY)" } // Date -> Timestamp with local time zone case (DATE, TIMESTAMP_WITH_LOCAL_TIME_ZONE) => generateUnaryOperatorIfNotNull(ctx, targetType, operand) { operandTerm => val zone = ctx.addReusableSessionTimeZone() val method = qualifyMethod(BuiltInMethods.DATE_TO_TIMESTAMP_WITH_LOCAL_TIME_ZONE) s"$TIMESTAMP_DATA.fromEpochMillis($method($operandTerm, $zone))" } // Timestamp with local time zone -> Date case (TIMESTAMP_WITH_LOCAL_TIME_ZONE, DATE) => generateUnaryOperatorIfNotNull(ctx, targetType, operand) { operandTerm => val zone = ctx.addReusableSessionTimeZone() val method = qualifyMethod(BuiltInMethods.TIMESTAMP_WITH_LOCAL_TIME_ZONE_TO_DATE) s"$method($operandTerm.getMillisecond(), $zone)" } // Time -> Timestamp with local time zone case (TIME_WITHOUT_TIME_ZONE, TIMESTAMP_WITH_LOCAL_TIME_ZONE) => generateUnaryOperatorIfNotNull(ctx, targetType, operand) { operandTerm => val zone = ctx.addReusableSessionTimeZone() val method = qualifyMethod(BuiltInMethods.TIME_TO_TIMESTAMP_WITH_LOCAL_TIME_ZONE) s"$TIMESTAMP_DATA.fromEpochMillis($method($operandTerm, $zone))" } // Timestamp with local time zone -> Time case (TIMESTAMP_WITH_LOCAL_TIME_ZONE, TIME_WITHOUT_TIME_ZONE) => generateUnaryOperatorIfNotNull(ctx, targetType, operand) { operandTerm => val zone = ctx.addReusableSessionTimeZone() val method = qualifyMethod(BuiltInMethods.TIMESTAMP_WITH_LOCAL_TIME_ZONE_TO_TIME) s"$method($operandTerm.getMillisecond(), $zone)" } // Disable cast conversion between Numeric type and Timestamp type case (TINYINT, TIMESTAMP_WITHOUT_TIME_ZONE) | (SMALLINT, TIMESTAMP_WITHOUT_TIME_ZONE) | (INTEGER, TIMESTAMP_WITHOUT_TIME_ZONE) | (BIGINT, TIMESTAMP_WITHOUT_TIME_ZONE) | (FLOAT, TIMESTAMP_WITHOUT_TIME_ZONE) | (DOUBLE, TIMESTAMP_WITHOUT_TIME_ZONE) | (DECIMAL, TIMESTAMP_WITHOUT_TIME_ZONE) | (TIMESTAMP_WITHOUT_TIME_ZONE, TINYINT) | (TIMESTAMP_WITHOUT_TIME_ZONE, SMALLINT) | (TIMESTAMP_WITHOUT_TIME_ZONE, INTEGER) | (TIMESTAMP_WITHOUT_TIME_ZONE, BIGINT) | (TIMESTAMP_WITHOUT_TIME_ZONE, FLOAT) | (TIMESTAMP_WITHOUT_TIME_ZONE, DOUBLE) | (TIMESTAMP_WITHOUT_TIME_ZONE, DECIMAL) => { if (TIMESTAMP_WITHOUT_TIME_ZONE.equals(targetType.getTypeRoot)) { throw new ValidationException("The cast conversion from NUMERIC type to TIMESTAMP type" + " is not allowed, it's recommended to use TO_TIMESTAMP(FROM_UNIXTIME(numeric_col))" + " instead, note the numeric is in seconds.") } else { throw new ValidationException("The cast conversion from TIMESTAMP type to NUMERIC type" + " is not allowed, it's recommended to use" + " UNIX_TIMESTAMP(CAST(timestamp_col AS STRING)) instead.") } } // Disable cast conversion between Numeric type and TimestampLtz type case (TINYINT, TIMESTAMP_WITH_LOCAL_TIME_ZONE) | (SMALLINT, TIMESTAMP_WITH_LOCAL_TIME_ZONE) | (INTEGER, TIMESTAMP_WITH_LOCAL_TIME_ZONE) | (BIGINT, TIMESTAMP_WITH_LOCAL_TIME_ZONE) | (FLOAT, TIMESTAMP_WITH_LOCAL_TIME_ZONE) | (DOUBLE, TIMESTAMP_WITH_LOCAL_TIME_ZONE) | (DECIMAL, TIMESTAMP_WITH_LOCAL_TIME_ZONE) | (TIMESTAMP_WITH_LOCAL_TIME_ZONE, TINYINT) | (TIMESTAMP_WITH_LOCAL_TIME_ZONE, SMALLINT) | (TIMESTAMP_WITH_LOCAL_TIME_ZONE, INTEGER) | (TIMESTAMP_WITH_LOCAL_TIME_ZONE, BIGINT) | (TIMESTAMP_WITH_LOCAL_TIME_ZONE, FLOAT) | (TIMESTAMP_WITH_LOCAL_TIME_ZONE, DOUBLE) | (TIMESTAMP_WITH_LOCAL_TIME_ZONE, DECIMAL) => { if (TIMESTAMP_WITH_LOCAL_TIME_ZONE.equals(targetType.getTypeRoot)) { throw new ValidationException("The cast conversion from NUMERIC type" + " to TIMESTAMP_LTZ type is not allowed, it's recommended to use" + " TO_TIMESTAMP_LTZ(numeric_col, precision) instead.") } else { throw new ValidationException("The cast conversion from" + " TIMESTAMP_LTZ type to NUMERIC type is not allowed.") } } // internal temporal casting // Date -> Integer // Time -> Integer // Integer -> Date // Integer -> Time // Integer -> Interval Months // Long -> Interval Millis // Interval Months -> Integer // Interval Millis -> Long case (DATE, INTEGER) | (TIME_WITHOUT_TIME_ZONE, INTEGER) | (INTEGER, DATE) | (INTEGER, TIME_WITHOUT_TIME_ZONE) | (INTEGER, INTERVAL_YEAR_MONTH) | (BIGINT, INTERVAL_DAY_TIME) | (INTERVAL_YEAR_MONTH, INTEGER) | (INTERVAL_DAY_TIME, BIGINT) => internalExprCasting(operand, targetType) // internal reinterpretation of temporal types // Date, Time, Interval Months -> Long case (DATE, BIGINT) | (TIME_WITHOUT_TIME_ZONE, BIGINT) | (INTERVAL_YEAR_MONTH, BIGINT) => internalExprCasting(operand, targetType) case (ROW | STRUCTURED_TYPE, ROW | STRUCTURED_TYPE) if supportsExplicitCast(operand.resultType, targetType) => generateCastRowToRow(ctx, operand, targetType) case (_, _) => throw new CodeGenException(s"Unsupported cast from '${operand.resultType}' to '$targetType'.") } def generateIfElse( ctx: CodeGeneratorContext, operands: Seq[GeneratedExpression], resultType: LogicalType, i: Int = 0) : GeneratedExpression = { // else part if (i == operands.size - 1) { generateCast(ctx, operands(i), resultType) } else { // check that the condition is boolean // we do not check for null instead we use the default value // thus null is false requireBoolean(operands(i)) val condition = operands(i) val trueAction = generateCast(ctx, operands(i + 1), resultType) val falseAction = generateIfElse(ctx, operands, resultType, i + 2) val Seq(resultTerm, nullTerm) = newNames("result", "isNull") val resultTypeTerm = primitiveTypeTermForType(resultType) val defaultValue = primitiveDefaultValue(resultType) val operatorCode = if (ctx.nullCheck) { s""" |${condition.code} |$resultTypeTerm $resultTerm = $defaultValue; |boolean $nullTerm; |if (${condition.resultTerm}) { | ${trueAction.code} | $nullTerm = ${trueAction.nullTerm}; | if (!$nullTerm) { | $resultTerm = ${trueAction.resultTerm}; | } |} |else { | ${falseAction.code} | $nullTerm = ${falseAction.nullTerm}; | if (!$nullTerm) { | $resultTerm = ${falseAction.resultTerm}; | } |} |""".stripMargin.trim } else { s""" |${condition.code} |$resultTypeTerm $resultTerm; |if (${condition.resultTerm}) { | ${trueAction.code} | $resultTerm = ${trueAction.resultTerm}; |} |else { | ${falseAction.code} | $resultTerm = ${falseAction.resultTerm}; |} |""".stripMargin.trim } GeneratedExpression(resultTerm, nullTerm, operatorCode, resultType) } } def generateDot( ctx: CodeGeneratorContext, operands: Seq[GeneratedExpression]): GeneratedExpression = { // due to https://issues.apache.org/jira/browse/CALCITE-2162, expression such as // "array[1].a.b" won't work now. if (operands.size > 2) { throw new CodeGenException( "A DOT operator with more than 2 operands is not supported yet.") } checkArgument(operands(1).literal) checkArgument(isCharacterString(operands(1).resultType)) checkArgument(operands.head.resultType.isInstanceOf[RowType]) val fieldName = operands(1).literalValue.get.toString val fieldIdx = operands .head .resultType .asInstanceOf[RowType] .getFieldIndex(fieldName) val access = generateFieldAccess( ctx, operands.head.resultType, operands.head.resultTerm, fieldIdx) val Seq(resultTerm, nullTerm) = newNames("result", "isNull") val resultTypeTerm = primitiveTypeTermForType(access.resultType) val defaultValue = primitiveDefaultValue(access.resultType) val resultCode = if (ctx.nullCheck) { s""" |${operands.map(_.code).mkString("\\n")} |$resultTypeTerm $resultTerm; |boolean $nullTerm; |if (${operands.map(_.nullTerm).mkString(" || ")}) { | $resultTerm = $defaultValue; | $nullTerm = true; |} |else { | ${access.code} | $resultTerm = ${access.resultTerm}; | $nullTerm = ${access.nullTerm}; |} |""".stripMargin } else { s""" |${operands.map(_.code).mkString("\\n")} |${access.code} |$resultTypeTerm $resultTerm = ${access.resultTerm}; |""".stripMargin } GeneratedExpression( resultTerm, nullTerm, resultCode, access.resultType ) } // ---------------------------------------------------------------------------------------- // value construction and accessing generate utils // ---------------------------------------------------------------------------------------- def generateRow( ctx: CodeGeneratorContext, rowType: LogicalType, elements: Seq[GeneratedExpression]): GeneratedExpression = { val fieldTypes = getFieldTypes(rowType) val isLiteral = elements.forall(e => e.literal) val isPrimitive = fieldTypes.forall(PlannerTypeUtils.isPrimitive) if (isLiteral) { // generate literal row generateLiteralRow(ctx, rowType, elements) } else { if (isPrimitive) { // generate primitive row val mapped = elements.zipWithIndex.map { case (element, idx) => if (element.literal) { element } else { val tpe = fieldTypes(idx) val resultTerm = primitiveDefaultValue(tpe) GeneratedExpression(resultTerm, ALWAYS_NULL, NO_CODE, tpe, Some(null)) } } val row = generateLiteralRow(ctx, rowType, mapped) val code = elements.zipWithIndex.map { case (element, idx) => val tpe = fieldTypes(idx) if (element.literal) { "" } else if(ctx.nullCheck) { s""" |${element.code} |if (${element.nullTerm}) { | ${binaryRowSetNull(idx, row.resultTerm, tpe)}; |} else { | ${binaryRowFieldSetAccess(idx, row.resultTerm, tpe, element.resultTerm)}; |} """.stripMargin } else { s""" |${element.code} |${binaryRowFieldSetAccess(idx, row.resultTerm, tpe, element.resultTerm)}; """.stripMargin } }.mkString("\\n") GeneratedExpression(row.resultTerm, NEVER_NULL, code, rowType) } else { // generate general row generateNonLiteralRow(ctx, rowType, elements) } } } private def generateLiteralRow( ctx: CodeGeneratorContext, rowType: LogicalType, elements: Seq[GeneratedExpression]): GeneratedExpression = { checkArgument(elements.forall(e => e.literal)) val expr = generateNonLiteralRow(ctx, rowType, elements) ctx.addReusableInitStatement(expr.code) GeneratedExpression(expr.resultTerm, GeneratedExpression.NEVER_NULL, NO_CODE, rowType) } private def generateNonLiteralRow( ctx: CodeGeneratorContext, rowType: LogicalType, elements: Seq[GeneratedExpression]): GeneratedExpression = { val fieldTypes = getFieldTypes(rowType) val rowTerm = newName("row") val writerTerm = newName("writer") val writerCls = className[BinaryRowWriter] val writeCode = elements.zipWithIndex.map { case (element, idx) => val tpe = fieldTypes(idx) if (ctx.nullCheck) { s""" |${element.code} |if (${element.nullTerm}) { | ${binaryWriterWriteNull(idx, writerTerm, tpe)}; |} else { | ${binaryWriterWriteField(ctx, idx, element.resultTerm, writerTerm, tpe)}; |} """.stripMargin } else { s""" |${element.code} |${binaryWriterWriteField(ctx, idx, element.resultTerm, writerTerm, tpe)}; """.stripMargin } }.mkString("\\n") val code = s""" |$writerTerm.reset(); |$writeCode |$writerTerm.complete(); """.stripMargin ctx.addReusableMember(s"$BINARY_ROW $rowTerm = new $BINARY_ROW(${fieldTypes.length});") ctx.addReusableMember(s"$writerCls $writerTerm = new $writerCls($rowTerm);") GeneratedExpression(rowTerm, GeneratedExpression.NEVER_NULL, code, rowType) } def generateArray( ctx: CodeGeneratorContext, resultType: LogicalType, elements: Seq[GeneratedExpression]): GeneratedExpression = { checkArgument(resultType.isInstanceOf[ArrayType]) val arrayType = resultType.asInstanceOf[ArrayType] val elementType = arrayType.getElementType val isLiteral = elements.forall(e => e.literal) val isPrimitive = PlannerTypeUtils.isPrimitive(elementType) if (isLiteral) { // generate literal array generateLiteralArray(ctx, arrayType, elements) } else { if (isPrimitive) { // generate primitive array val mapped = elements.map { element => if (element.literal) { element } else { val resultTerm = primitiveDefaultValue(elementType) GeneratedExpression(resultTerm, ALWAYS_NULL, NO_CODE, elementType, Some(null)) } } val array = generateLiteralArray(ctx, arrayType, mapped) val code = generatePrimitiveArrayUpdateCode(ctx, array.resultTerm, elementType, elements) GeneratedExpression(array.resultTerm, GeneratedExpression.NEVER_NULL, code, arrayType) } else { // generate general array generateNonLiteralArray(ctx, arrayType, elements) } } } private def generatePrimitiveArrayUpdateCode( ctx: CodeGeneratorContext, arrayTerm: String, elementType: LogicalType, elements: Seq[GeneratedExpression]): String = { elements.zipWithIndex.map { case (element, idx) => if (element.literal) { "" } else if (ctx.nullCheck) { s""" |${element.code} |if (${element.nullTerm}) { | ${binaryArraySetNull(idx, arrayTerm, elementType)}; |} else { | ${binaryRowFieldSetAccess( idx, arrayTerm, elementType, element.resultTerm)}; |} """.stripMargin } else { s""" |${element.code} |${binaryRowFieldSetAccess( idx, arrayTerm, elementType, element.resultTerm)}; """.stripMargin } }.mkString("\\n") } private def generateLiteralArray( ctx: CodeGeneratorContext, arrayType: ArrayType, elements: Seq[GeneratedExpression]): GeneratedExpression = { checkArgument(elements.forall(e => e.literal)) val expr = generateNonLiteralArray(ctx, arrayType, elements) ctx.addReusableInitStatement(expr.code) GeneratedExpression(expr.resultTerm, GeneratedExpression.NEVER_NULL, NO_CODE, arrayType) } private def generateNonLiteralArray( ctx: CodeGeneratorContext, arrayType: ArrayType, elements: Seq[GeneratedExpression]): GeneratedExpression = { val elementType = arrayType.getElementType val arrayTerm = newName("array") val writerTerm = newName("writer") val writerCls = className[BinaryArrayWriter] val elementSize = BinaryArrayData.calculateFixLengthPartSize(elementType) val writeCode = elements.zipWithIndex.map { case (element, idx) => s""" |${element.code} |if (${element.nullTerm}) { | ${binaryArraySetNull(idx, writerTerm, elementType)}; |} else { | ${binaryWriterWriteField(ctx, idx, element.resultTerm, writerTerm, elementType)}; |} """.stripMargin }.mkString("\\n") val code = s""" |$writerTerm.reset(); |$writeCode |$writerTerm.complete(); """.stripMargin val memberStmt = s""" |$BINARY_ARRAY $arrayTerm = new $BINARY_ARRAY(); |$writerCls $writerTerm = new $writerCls($arrayTerm, ${elements.length}, $elementSize); """.stripMargin ctx.addReusableMember(memberStmt) GeneratedExpression(arrayTerm, GeneratedExpression.NEVER_NULL, code, arrayType) } /** * Return null when array index out of bounds which follows Calcite's behaviour. * @see [[org.apache.calcite.sql.fun.SqlStdOperatorTable.ITEM]] */ def generateArrayElementAt( ctx: CodeGeneratorContext, array: GeneratedExpression, index: GeneratedExpression): GeneratedExpression = { val Seq(resultTerm, nullTerm) = newNames("result", "isNull") val componentInfo = array.resultType.asInstanceOf[ArrayType].getElementType val resultTypeTerm = primitiveTypeTermForType(componentInfo) val defaultTerm = primitiveDefaultValue(componentInfo) index.literalValue match { case Some(v: Int) if v < 1 => throw new ValidationException( s"Array element access needs an index starting at 1 but was $v.") case _ => //nothing } val idxStr = s"${index.resultTerm} - 1" val arrayIsNull = s"${array.resultTerm}.isNullAt($idxStr)" val arrayGet = rowFieldReadAccess(ctx, idxStr, array.resultTerm, componentInfo) val arrayAccessCode = s""" |${array.code} |${index.code} |boolean $nullTerm = ${array.nullTerm} || ${index.nullTerm} || | $idxStr < 0 || $idxStr >= ${array.resultTerm}.size() || $arrayIsNull; |$resultTypeTerm $resultTerm = $nullTerm ? $defaultTerm : $arrayGet; |""".stripMargin GeneratedExpression(resultTerm, nullTerm, arrayAccessCode, componentInfo) } def generateArrayElement( ctx: CodeGeneratorContext, array: GeneratedExpression): GeneratedExpression = { val Seq(resultTerm, nullTerm) = newNames("result", "isNull") val resultType = array.resultType.asInstanceOf[ArrayType].getElementType val resultTypeTerm = primitiveTypeTermForType(resultType) val defaultValue = primitiveDefaultValue(resultType) val arrayLengthCode = s"${array.nullTerm} ? 0 : ${array.resultTerm}.size()" val arrayGet = rowFieldReadAccess(ctx, 0, array.resultTerm, resultType) val arrayAccessCode = s""" |${array.code} |boolean $nullTerm; |$resultTypeTerm $resultTerm; |switch ($arrayLengthCode) { | case 0: | $nullTerm = true; | $resultTerm = $defaultValue; | break; | case 1: | $nullTerm = ${array.resultTerm}.isNullAt(0); | $resultTerm = $nullTerm ? $defaultValue : $arrayGet; | break; | default: | throw new RuntimeException("Array has more than one element."); |} |""".stripMargin GeneratedExpression(resultTerm, nullTerm, arrayAccessCode, resultType) } def generateArrayCardinality( ctx: CodeGeneratorContext, array: GeneratedExpression) : GeneratedExpression = { generateUnaryOperatorIfNotNull(ctx, new IntType(), array) { _ => s"${array.resultTerm}.size()" } } def generateMap( ctx: CodeGeneratorContext, resultType: LogicalType, elements: Seq[GeneratedExpression]): GeneratedExpression = { checkArgument(resultType.isInstanceOf[MapType]) val mapType = resultType.asInstanceOf[MapType] val baseMap = newName("map") // prepare map key array val keyElements = elements.grouped(2).map { case Seq(key, _) => key }.toSeq val keyType = mapType.getKeyType val keyExpr = generateArray(ctx, new ArrayType(keyType), keyElements) val isKeyFixLength = isPrimitive(keyType) // prepare map value array val valueElements = elements.grouped(2).map { case Seq(_, value) => value }.toSeq val valueType = mapType.getValueType val valueExpr = generateArray(ctx, new ArrayType(valueType), valueElements) val isValueFixLength = isPrimitive(valueType) // construct binary map ctx.addReusableMember(s"$MAP_DATA $baseMap = null;") val code = if (isKeyFixLength && isValueFixLength) { val binaryMap = newName("binaryMap") ctx.addReusableMember(s"$BINARY_MAP $binaryMap = null;") // the key and value are fixed length, initialize and reuse the map in constructor val init = s"$binaryMap = $BINARY_MAP.valueOf(${keyExpr.resultTerm}, ${valueExpr.resultTerm});" ctx.addReusableInitStatement(init) // there are some non-literal primitive fields need to update val keyArrayTerm = newName("keyArray") val valueArrayTerm = newName("valueArray") val keyUpdate = generatePrimitiveArrayUpdateCode( ctx, keyArrayTerm, keyType, keyElements) val valueUpdate = generatePrimitiveArrayUpdateCode( ctx, valueArrayTerm, valueType, valueElements) s""" |$BINARY_ARRAY $keyArrayTerm = $binaryMap.keyArray(); |$keyUpdate |$BINARY_ARRAY $valueArrayTerm = $binaryMap.valueArray(); |$valueUpdate |$baseMap = $binaryMap; """.stripMargin } else { // the key or value is not fixed length, re-create the map on every update s""" |${keyExpr.code} |${valueExpr.code} |$baseMap = $BINARY_MAP.valueOf(${keyExpr.resultTerm}, ${valueExpr.resultTerm}); """.stripMargin } GeneratedExpression(baseMap, NEVER_NULL, code, resultType) } def generateMapGet( ctx: CodeGeneratorContext, map: GeneratedExpression, key: GeneratedExpression): GeneratedExpression = { val Seq(resultTerm, nullTerm) = newNames("result", "isNull") val tmpKey = newName("key") val length = newName("length") val keys = newName("keys") val values = newName("values") val index = newName("index") val found = newName("found") val tmpValue = newName("value") val mapType = map.resultType.asInstanceOf[MapType] val keyType = mapType.getKeyType val valueType = mapType.getValueType // use primitive for key as key is not null val keyTypeTerm = primitiveTypeTermForType(keyType) val valueTypeTerm = primitiveTypeTermForType(valueType) val valueDefault = primitiveDefaultValue(valueType) val binaryMapTerm = newName("binaryMap") val genericMapTerm = newName("genericMap") val boxedValueTypeTerm = boxedTypeTermForType(valueType) val mapTerm = map.resultTerm val equal = generateEquals( ctx, // We have to create a new GeneratedExpression from `key`, but erase the code of it. // Otherwise, the code of `key` will be called twice in `accessCode`, which may lead to // exceptions such as 'Redefinition of local variable'. GeneratedExpression(key.resultTerm, key.nullTerm, NO_CODE, key.resultType, key.literalValue), GeneratedExpression(tmpKey, NEVER_NULL, NO_CODE, keyType) ) val code = s""" |if ($mapTerm instanceof $BINARY_MAP) { | $BINARY_MAP $binaryMapTerm = ($BINARY_MAP) $mapTerm; | final int $length = $binaryMapTerm.size(); | final $BINARY_ARRAY $keys = $binaryMapTerm.keyArray(); | final $BINARY_ARRAY $values = $binaryMapTerm.valueArray(); | | int $index = 0; | boolean $found = false; | if (${key.nullTerm}) { | while ($index < $length && !$found) { | if ($keys.isNullAt($index)) { | $found = true; | } else { | $index++; | } | } | } else { | while ($index < $length && !$found) { | final $keyTypeTerm $tmpKey = ${rowFieldReadAccess(ctx, index, keys, keyType)}; | ${equal.code} | if (${equal.resultTerm}) { | $found = true; | } else { | $index++; | } | } | } | | if (!$found || $values.isNullAt($index)) { | $nullTerm = true; | } else { | $resultTerm = ${rowFieldReadAccess(ctx, index, values, valueType)}; | } |} else { | $GENERIC_MAP $genericMapTerm = ($GENERIC_MAP) $mapTerm; | $boxedValueTypeTerm $tmpValue = | ($boxedValueTypeTerm) $genericMapTerm.get(($keyTypeTerm) ${key.resultTerm}); | if ($tmpValue == null) { | $nullTerm = true; | } else { | $resultTerm = $tmpValue; | } |} """.stripMargin val accessCode = s""" |${map.code} |${key.code} |boolean $nullTerm = (${map.nullTerm} || ${key.nullTerm}); |$valueTypeTerm $resultTerm = $valueDefault; |if (!$nullTerm) { | $code |} """.stripMargin GeneratedExpression(resultTerm, nullTerm, accessCode, valueType) } def generateMapCardinality( ctx: CodeGeneratorContext, map: GeneratedExpression): GeneratedExpression = { generateUnaryOperatorIfNotNull(ctx, new IntType(), map) { _ => s"${map.resultTerm}.size()" } } // ---------------------------------------------------------------------------------------- // private generate utils // ---------------------------------------------------------------------------------------- private def generateCastRowToRow( ctx: CodeGeneratorContext, operand: GeneratedExpression, targetRowType: LogicalType) : GeneratedExpression = { // assumes that the arity has been checked before generateCallWithStmtIfArgsNotNull(ctx, targetRowType, Seq(operand)) { case Seq(rowTerm) => val fieldExprs = operand .resultType .getChildren .zip(targetRowType.getChildren) .zipWithIndex .map { case ((sourceType, targetType), idx) => val sourceTypeTerm = primitiveTypeTermForType(sourceType) val sourceTerm = newName("field") val sourceAccessCode = rowFieldReadAccess(ctx, idx, rowTerm, sourceType) val sourceExpr = GeneratedExpression( sourceTerm, s"$rowTerm.isNullAt($idx)", s"$sourceTypeTerm $sourceTerm = ($sourceTypeTerm) $sourceAccessCode;", sourceType) generateCast(ctx, sourceExpr, targetType) } val generateRowExpr = generateRow(ctx, targetRowType, fieldExprs) (generateRowExpr.code, generateRowExpr.resultTerm) } } private def generateCastStringLiteralToDateTime( ctx: CodeGeneratorContext, stringLiteral: GeneratedExpression, expectType: LogicalType): GeneratedExpression = { checkArgument(stringLiteral.literal) val rightTerm = stringLiteral.resultTerm val typeTerm = primitiveTypeTermForType(expectType) val defaultTerm = primitiveDefaultValue(expectType) val term = newName("stringToTime") val code = stringToLocalTimeCode(expectType, rightTerm) val stmt = s"$typeTerm $term = ${stringLiteral.nullTerm} ? $defaultTerm : $code;" ctx.addReusableMember(stmt) stringLiteral.copy(resultType = expectType, resultTerm = term) } private def generateCastArrayToString( ctx: CodeGeneratorContext, operand: GeneratedExpression, at: ArrayType, targetType: LogicalType): GeneratedExpression = generateStringResultCallWithStmtIfArgsNotNull(ctx, Seq(operand), targetType) { terms => val builderCls = classOf[JStringBuilder].getCanonicalName val builderTerm = newName("builder") ctx.addReusableMember(s"""$builderCls $builderTerm = new $builderCls();""") val arrayTerm = terms.head val indexTerm = newName("i") val numTerm = newName("num") val elementType = at.getElementType val elementCls = primitiveTypeTermForType(elementType) val elementTerm = newName("element") val elementNullTerm = newName("isNull") val elementCode = s""" |$elementCls $elementTerm = ${primitiveDefaultValue(elementType)}; |boolean $elementNullTerm = $arrayTerm.isNullAt($indexTerm); |if (!$elementNullTerm) { | $elementTerm = ($elementCls) ${ rowFieldReadAccess(ctx, indexTerm, arrayTerm, elementType)}; |} """.stripMargin val elementExpr = GeneratedExpression( elementTerm, elementNullTerm, elementCode, elementType) val castExpr = generateCast(ctx, elementExpr, targetType) val stmt = s""" |$builderTerm.setLength(0); |$builderTerm.append("["); |int $numTerm = $arrayTerm.size(); |for (int $indexTerm = 0; $indexTerm < $numTerm; $indexTerm++) { | if ($indexTerm != 0) { | $builderTerm.append(", "); | } | | ${castExpr.code} | if (${castExpr.nullTerm}) { | $builderTerm.append("null"); | } else { | $builderTerm.append(${castExpr.resultTerm}); | } |} |$builderTerm.append("]"); """.stripMargin (stmt, s"$builderTerm.toString()") } private def generateCastMapToString( ctx: CodeGeneratorContext, operand: GeneratedExpression, mt: MapType, targetType: LogicalType): GeneratedExpression = generateStringResultCallWithStmtIfArgsNotNull(ctx, Seq(operand), targetType) { terms => val resultTerm = newName("toStringResult") val builderCls = classOf[JStringBuilder].getCanonicalName val builderTerm = newName("builder") ctx.addReusableMember(s"$builderCls $builderTerm = new $builderCls();") val mapTerm = terms.head val genericMapTerm = newName("genericMap") val binaryMapTerm = newName("binaryMap") val keyArrayTerm = newName("keyArray") val valueArrayTerm = newName("valueArray") val indexTerm = newName("i") val numTerm = newName("num") val keyType = mt.getKeyType val keyCls = primitiveTypeTermForType(keyType) val keyTerm = newName("key") val keyNullTerm = newName("isNull") val keyCode = s""" |$keyCls $keyTerm = ${primitiveDefaultValue(keyType)}; |boolean $keyNullTerm = $keyArrayTerm.isNullAt($indexTerm); |if (!$keyNullTerm) { | $keyTerm = ($keyCls) ${ rowFieldReadAccess(ctx, indexTerm, keyArrayTerm, keyType)}; |} """.stripMargin val keyExpr = GeneratedExpression(keyTerm, keyNullTerm, keyCode, keyType) val keyCastExpr = generateCast(ctx, keyExpr, targetType) val valueType = mt.getValueType val valueCls = primitiveTypeTermForType(valueType) val valueTerm = newName("value") val valueNullTerm = newName("isNull") val valueCode = s""" |$valueCls $valueTerm = ${primitiveDefaultValue(valueType)}; |boolean $valueNullTerm = $valueArrayTerm.isNullAt($indexTerm); |if (!$valueNullTerm) { | $valueTerm = ($valueCls) ${ rowFieldReadAccess(ctx, indexTerm, valueArrayTerm, valueType)}; |} """.stripMargin val valueExpr = GeneratedExpression(valueTerm, valueNullTerm, valueCode, valueType) val valueCastExpr = generateCast(ctx, valueExpr, targetType) val stmt = s""" |String $resultTerm; |if ($mapTerm instanceof $BINARY_MAP) { | $BINARY_MAP $binaryMapTerm = ($BINARY_MAP) $mapTerm; | $ARRAY_DATA $keyArrayTerm = $binaryMapTerm.keyArray(); | $ARRAY_DATA $valueArrayTerm = $binaryMapTerm.valueArray(); | | $builderTerm.setLength(0); | $builderTerm.append("{"); | | int $numTerm = $binaryMapTerm.size(); | for (int $indexTerm = 0; $indexTerm < $numTerm; $indexTerm++) { | if ($indexTerm != 0) { | $builderTerm.append(", "); | } | | ${keyCastExpr.code} | if (${keyCastExpr.nullTerm}) { | $builderTerm.append("null"); | } else { | $builderTerm.append(${keyCastExpr.resultTerm}); | } | $builderTerm.append("="); | | ${valueCastExpr.code} | if (${valueCastExpr.nullTerm}) { | $builderTerm.append("null"); | } else { | $builderTerm.append(${valueCastExpr.resultTerm}); | } | } | $builderTerm.append("}"); | | $resultTerm = $builderTerm.toString(); |} else { | $GENERIC_MAP $genericMapTerm = ($GENERIC_MAP) $mapTerm; | $resultTerm = $genericMapTerm.toString(); |} """.stripMargin (stmt, resultTerm) } private def generateCastRowDataToString( ctx: CodeGeneratorContext, operand: GeneratedExpression, brt: RowType, targetType: LogicalType): GeneratedExpression = generateStringResultCallWithStmtIfArgsNotNull(ctx, Seq(operand), targetType) { terms => val builderCls = classOf[JStringBuilder].getCanonicalName val builderTerm = newName("builder") ctx.addReusableMember(s"""$builderCls $builderTerm = new $builderCls();""") val rowTerm = terms.head val appendCode = brt.getChildren.zipWithIndex.map { case (elementType, idx) => val elementCls = primitiveTypeTermForType(elementType) val elementTerm = newName("element") val elementExpr = GeneratedExpression( elementTerm, s"$rowTerm.isNullAt($idx)", s"$elementCls $elementTerm = ($elementCls) ${rowFieldReadAccess( ctx, idx, rowTerm, elementType)};", elementType) val castExpr = generateCast(ctx, elementExpr, targetType) s""" |${if (idx != 0) s"""$builderTerm.append(",");""" else ""} |${castExpr.code} |if (${castExpr.nullTerm}) { | $builderTerm.append("null"); |} else { | $builderTerm.append(${castExpr.resultTerm}); |} """.stripMargin }.mkString("\\n") val stmt = s""" |$builderTerm.setLength(0); |$builderTerm.append("("); |$appendCode |$builderTerm.append(")"); """.stripMargin (stmt, s"$builderTerm.toString()") } private def generateArrayComparison( ctx: CodeGeneratorContext, left: GeneratedExpression, right: GeneratedExpression): GeneratedExpression = generateCallWithStmtIfArgsNotNull(ctx, new BooleanType(), Seq(left, right)) { args => val leftTerm = args.head val rightTerm = args(1) val resultTerm = newName("compareResult") val elementType = left.resultType.asInstanceOf[ArrayType].getElementType val elementCls = primitiveTypeTermForType(elementType) val elementDefault = primitiveDefaultValue(elementType) val leftElementTerm = newName("leftElement") val leftElementNullTerm = newName("leftElementIsNull") val leftElementExpr = GeneratedExpression(leftElementTerm, leftElementNullTerm, "", elementType) val rightElementTerm = newName("rightElement") val rightElementNullTerm = newName("rightElementIsNull") val rightElementExpr = GeneratedExpression(rightElementTerm, rightElementNullTerm, "", elementType) val indexTerm = newName("index") val elementEqualsExpr = generateEquals(ctx, leftElementExpr, rightElementExpr) val stmt = s""" |boolean $resultTerm; |if ($leftTerm instanceof $BINARY_ARRAY && $rightTerm instanceof $BINARY_ARRAY) { | $resultTerm = $leftTerm.equals($rightTerm); |} else { | if ($leftTerm.size() == $rightTerm.size()) { | $resultTerm = true; | for (int $indexTerm = 0; $indexTerm < $leftTerm.size(); $indexTerm++) { | $elementCls $leftElementTerm = $elementDefault; | boolean $leftElementNullTerm = $leftTerm.isNullAt($indexTerm); | if (!$leftElementNullTerm) { | $leftElementTerm = | ${rowFieldReadAccess(ctx, indexTerm, leftTerm, elementType)}; | } | | $elementCls $rightElementTerm = $elementDefault; | boolean $rightElementNullTerm = $rightTerm.isNullAt($indexTerm); | if (!$rightElementNullTerm) { | $rightElementTerm = | ${rowFieldReadAccess(ctx, indexTerm, rightTerm, elementType)}; | } | | ${elementEqualsExpr.code} | if (!${elementEqualsExpr.resultTerm}) { | $resultTerm = false; | break; | } | } | } else { | $resultTerm = false; | } |} """.stripMargin (stmt, resultTerm) } private def generateMapComparison( ctx: CodeGeneratorContext, left: GeneratedExpression, right: GeneratedExpression, keyType: LogicalType, valueType: LogicalType) : GeneratedExpression = generateCallWithStmtIfArgsNotNull(ctx, new BooleanType(), Seq(left, right)) { args => val leftTerm = args.head val rightTerm = args(1) val resultTerm = newName("compareResult") val mapCls = className[java.util.Map[_, _]] val keyCls = boxedTypeTermForType(keyType) val valueCls = boxedTypeTermForType(valueType) val leftMapTerm = newName("leftMap") val leftKeyTerm = newName("leftKey") val leftValueTerm = newName("leftValue") val leftValueNullTerm = newName("leftValueIsNull") val leftValueExpr = GeneratedExpression(leftValueTerm, leftValueNullTerm, "", valueType) val rightMapTerm = newName("rightMap") val rightValueTerm = newName("rightValue") val rightValueNullTerm = newName("rightValueIsNull") val rightValueExpr = GeneratedExpression(rightValueTerm, rightValueNullTerm, "", valueType) val entryTerm = newName("entry") val entryCls = classOf[java.util.Map.Entry[AnyRef, AnyRef]].getCanonicalName val valueEqualsExpr = generateEquals(ctx, leftValueExpr, rightValueExpr) val internalTypeCls = classOf[LogicalType].getCanonicalName val keyTypeTerm = ctx.addReusableObject(keyType, "keyType", internalTypeCls) val valueTypeTerm = ctx.addReusableObject(valueType, "valueType", internalTypeCls) val mapDataUtil = className[MapDataUtil] val stmt = s""" |boolean $resultTerm; |if ($leftTerm.size() == $rightTerm.size()) { | $resultTerm = true; | $mapCls $leftMapTerm = $mapDataUtil | .convertToJavaMap($leftTerm, $keyTypeTerm, $valueTypeTerm); | $mapCls $rightMapTerm = $mapDataUtil | .convertToJavaMap($rightTerm, $keyTypeTerm, $valueTypeTerm); | | for ($entryCls $entryTerm : $leftMapTerm.entrySet()) { | $keyCls $leftKeyTerm = ($keyCls) $entryTerm.getKey(); | if ($rightMapTerm.containsKey($leftKeyTerm)) { | $valueCls $leftValueTerm = ($valueCls) $entryTerm.getValue(); | $valueCls $rightValueTerm = ($valueCls) $rightMapTerm.get($leftKeyTerm); | boolean $leftValueNullTerm = ($leftValueTerm == null); | boolean $rightValueNullTerm = ($rightValueTerm == null); | | ${valueEqualsExpr.code} | if (!${valueEqualsExpr.resultTerm}) { | $resultTerm = false; | break; | } | } else { | $resultTerm = false; | break; | } | } |} else { | $resultTerm = false; |} """.stripMargin (stmt, resultTerm) } // ------------------------------------------------------------------------------------------ private def generateUnaryOperatorIfNotNull( ctx: CodeGeneratorContext, returnType: LogicalType, operand: GeneratedExpression, resultNullable: Boolean = false) (expr: String => String): GeneratedExpression = { generateCallIfArgsNotNull(ctx, returnType, Seq(operand), resultNullable) { args => expr(args.head) } } private def generateOperatorIfNotNull( ctx: CodeGeneratorContext, returnType: LogicalType, left: GeneratedExpression, right: GeneratedExpression, resultNullable: Boolean = false) (expr: (String, String) => String) : GeneratedExpression = { generateCallIfArgsNotNull(ctx, returnType, Seq(left, right), resultNullable) { args => expr(args.head, args(1)) } } // ---------------------------------------------------------------------------------------------- private def internalExprCasting( expr: GeneratedExpression, targetType: LogicalType) : GeneratedExpression = { expr.copy(resultType = targetType) } private def numericCasting( operandType: LogicalType, resultType: LogicalType): String => String = { val resultTypeTerm = primitiveTypeTermForType(resultType) def decToPrimMethod(targetType: LogicalType): String = targetType.getTypeRoot match { case TINYINT => "castToByte" case SMALLINT => "castToShort" case INTEGER => "castToInt" case BIGINT => "castToLong" case FLOAT => "castToFloat" case DOUBLE => "castToDouble" case BOOLEAN => "castToBoolean" case _ => throw new CodeGenException(s"Unsupported decimal casting type: '$targetType'") } // no casting necessary if (isInteroperable(operandType, resultType)) { operandTerm => s"$operandTerm" } // decimal to decimal, may have different precision/scale else if (isDecimal(resultType) && isDecimal(operandType)) { val dt = resultType.asInstanceOf[DecimalType] operandTerm => s"$DECIMAL_UTIL.castToDecimal($operandTerm, ${dt.getPrecision}, ${dt.getScale})" } // non_decimal_numeric to decimal else if (isDecimal(resultType) && isNumeric(operandType)) { val dt = resultType.asInstanceOf[DecimalType] operandTerm => s"$DECIMAL_UTIL.castFrom($operandTerm, ${dt.getPrecision}, ${dt.getScale})" } // decimal to non_decimal_numeric else if (isNumeric(resultType) && isDecimal(operandType) ) { operandTerm => s"$DECIMAL_UTIL.${decToPrimMethod(resultType)}($operandTerm)" } // numeric to numeric // TODO: Create a wrapper layer that handles type conversion between numeric. else if (isNumeric(operandType) && isNumeric(resultType)) { val resultTypeValue = resultTypeTerm + "Value()" val boxedTypeTerm = boxedTypeTermForType(operandType) operandTerm => s"(new $boxedTypeTerm($operandTerm)).$resultTypeValue" } // result type is time interval and operand type is integer else if (isTimeInterval(resultType) && isInteger(operandType)){ operandTerm => s"(($resultTypeTerm) $operandTerm)" } else { throw new CodeGenException(s"Unsupported casting from $operandType to $resultType.") } } private def stringToLocalTimeCode( targetType: LogicalType, operandTerm: String): String = targetType.getTypeRoot match { case DATE => s"${qualifyMethod(BuiltInMethods.STRING_TO_DATE)}($operandTerm.toString())" case TIME_WITHOUT_TIME_ZONE => s"${qualifyMethod(BuiltInMethods.STRING_TO_TIME)}($operandTerm.toString())" case TIMESTAMP_WITHOUT_TIME_ZONE => s""" |${qualifyMethod(BuiltInMethods.STRING_TO_TIMESTAMP)}($operandTerm.toString()) |""".stripMargin case _ => throw new UnsupportedOperationException } private def localTimeToStringCode( ctx: CodeGeneratorContext, fromType: LogicalType, operandTerm: String): String = fromType.getTypeRoot match { case DATE => s"${qualifyMethod(BuiltInMethod.UNIX_DATE_TO_STRING.method)}($operandTerm)" case TIME_WITHOUT_TIME_ZONE => s"${qualifyMethod(BuiltInMethods.UNIX_TIME_TO_STRING)}($operandTerm)" case TIMESTAMP_WITHOUT_TIME_ZONE => // including rowtime indicator // The interpreted string conforms to the definition of timestamp literal // SQL 2011 Part 2 Section 6.13 General Rules 11) d) val precision = fromType.asInstanceOf[TimestampType].getPrecision s"${qualifyMethod(BuiltInMethods.TIMESTAMP_TO_STRING)}($operandTerm, $precision)" case TIMESTAMP_WITH_LOCAL_TIME_ZONE => val method = qualifyMethod(BuiltInMethods.TIMESTAMP_TO_STRING_TIME_ZONE) val zone = ctx.addReusableSessionTimeZone() val precision = fromType.asInstanceOf[LocalZonedTimestampType].getPrecision s"$method($operandTerm, $zone, $precision)" } }
rmetzger/flink
flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/codegen/calls/ScalarOperatorGens.scala
Scala
apache-2.0
93,299
package cuckoo_egg class BirdNest extends Nest { def layEgg: Egg = { new BirdEgg } }
PaulNoth/scala_ao_patterns
cuckoo_egg/bird_nest.scala
Scala
mit
93
/* * Copyright (c) 2014-2020 by The Monix Project Developers. * See the project homepage at: https://monix.io * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package monix.eval import monix.execution.exceptions.DummyException import monix.execution.internal.Platform import scala.util.Success object CoevalMemoizeSuite extends BaseTestSuite { test("Coeval.eval.memoize should work for first subscriber") { implicit s => var effect = 0 val coeval = Coeval.eval { effect += 1; effect }.memoize val f = coeval.runTry() assertEquals(f, Success(1)) } test("Coeval.eval.memoize should work for next subscribers") { implicit s => var effect = 0 val coeval = Coeval.eval { effect += 1; effect }.memoize coeval.runTry() val f1 = coeval.runTry() assertEquals(f1, Success(1)) val f2 = coeval.runTry() assertEquals(f2, Success(1)) } test("Coeval.evalOnce.memoize should work for first subscriber") { implicit s => var effect = 0 val coeval = Coeval.evalOnce { effect += 1; effect }.memoize val f = coeval.runTry() assertEquals(f, Success(1)) } test("Coeval.evalOnce.memoize should work for next subscribers") { implicit s => var effect = 0 val coeval = Coeval.evalOnce { effect += 1; effect }.memoize coeval.runTry() val f1 = coeval.runTry() assertEquals(f1, Success(1)) val f2 = coeval.runTry() assertEquals(f2, Success(1)) } test("Coeval.now.memoize should return self") { implicit s => assertEquals(Coeval.now(10), Coeval.now(10).memoize) } test("Coeval.error.memoize should return self") { implicit s => val dummy = DummyException("dummy") assertEquals(Coeval.raiseError(dummy), Coeval.raiseError(dummy).memoize) } test("Coeval.memoize should be stack safe") { implicit s => var effect = 0 var coeval = Coeval { effect += 1; effect } val count = if (Platform.isJVM) 100000 else 5000 for (_ <- 0 until count) coeval = coeval.memoize assertEquals(coeval.runTry(), Success(1)) } test("Coeval.apply.memoize effects") { implicit s => var effect = 0 val coeval1 = Coeval { effect += 1; 3 }.memoize val coeval2 = coeval1.map { x => effect += 1; x + 1 } val result1 = coeval2.runTry() assertEquals(effect, 2) assertEquals(result1, Success(4)) val result2 = coeval2.runTry() assertEquals(effect, 3) assertEquals(result2, Success(4)) } test("Coeval.suspend.memoize effects") { implicit s => var effect = 0 val coeval1 = Coeval.defer { effect += 1; Coeval.now(3) }.memoize val coeval2 = coeval1.map { x => effect += 1; x + 1 } val result1 = coeval2.runTry() assertEquals(effect, 2) assertEquals(result1, Success(4)) val result2 = coeval2.runTry() assertEquals(effect, 3) assertEquals(result2, Success(4)) } test("Coeval.suspend.flatMap.memoize effects") { implicit s => var effect = 0 val coeval1 = Coeval.defer { effect += 1; Coeval.now(2) } .flatMap(x => Coeval.now(x + 1)) .memoize val coeval2 = coeval1.map { x => effect += 1; x + 1 } val result1 = coeval2.runTry() assertEquals(effect, 2) assertEquals(result1, Success(4)) val result2 = coeval2.runTry() assertEquals(effect, 3) assertEquals(result2, Success(4)) } }
alexandru/monifu
monix-eval/shared/src/test/scala/monix/eval/CoevalMemoizeSuite.scala
Scala
apache-2.0
3,851
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.streaming.receiver import org.apache.spark.streaming.Time /** Messages sent to the Receiver. */ private[streaming] sealed trait ReceiverMessage extends Serializable private[streaming] object StopReceiver extends ReceiverMessage private[streaming] case class CleanupOldBlocks(threshTime: Time) extends ReceiverMessage
andrewor14/iolap
streaming/src/main/scala/org/apache/spark/streaming/receiver/ReceiverMessage.scala
Scala
apache-2.0
1,147
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.rdd import scala.reflect.ClassTag import org.apache.spark.{NarrowDependency, Partition, TaskContext} import org.apache.spark.annotation.DeveloperApi private[spark] class PartitionPruningRDDPartition(idx: Int, val parentSplit: Partition) extends Partition { override val index = idx } /** * Represents a dependency between the PartitionPruningRDD and its parent. In this * case, the child RDD contains a subset of partitions of the parents'. */ private[spark] class PruneDependency[T](rdd: RDD[T], partitionFilterFunc: Int => Boolean) extends NarrowDependency[T](rdd) { @transient val partitions: Array[Partition] = rdd.partitions .filter(s => partitionFilterFunc(s.index)).zipWithIndex .map { case(split, idx) => new PartitionPruningRDDPartition(idx, split) : Partition } override def getParents(partitionId: Int): List[Int] = { List(partitions(partitionId).asInstanceOf[PartitionPruningRDDPartition].parentSplit.index) } } /** * :: DeveloperApi :: * A RDD used to prune RDD partitions/partitions so we can avoid launching tasks on * all partitions. An example use case: If we know the RDD is partitioned by range, * and the execution DAG has a filter on the key, we can avoid launching tasks * on partitions that don't have the range covering the key. */ @DeveloperApi class PartitionPruningRDD[T: ClassTag]( prev: RDD[T], partitionFilterFunc: Int => Boolean) extends RDD[T](prev.context, List(new PruneDependency(prev, partitionFilterFunc))) { override def compute(split: Partition, context: TaskContext): Iterator[T] = { firstParent[T].iterator( split.asInstanceOf[PartitionPruningRDDPartition].parentSplit, context) } override protected def getPartitions: Array[Partition] = getDependencies.head.asInstanceOf[PruneDependency[T]].partitions } @DeveloperApi object PartitionPruningRDD { /** * Create a PartitionPruningRDD. This function can be used to create the PartitionPruningRDD * when its type T is not known at compile time. */ def create[T](rdd: RDD[T], partitionFilterFunc: Int => Boolean): PartitionPruningRDD[T] = { new PartitionPruningRDD[T](rdd, partitionFilterFunc)(rdd.elementClassTag) } }
pronix/spark
core/src/main/scala/org/apache/spark/rdd/PartitionPruningRDD.scala
Scala
apache-2.0
3,031
/* * ____ ____ _____ ____ ___ ____ * | _ \\ | _ \\ | ____| / ___| / _/ / ___| Precog (R) * | |_) | | |_) | | _| | | | | /| | | _ Advanced Analytics Engine for NoSQL Data * | __/ | _ < | |___ | |___ |/ _| | | |_| | Copyright (C) 2010 - 2013 SlamData, Inc. * |_| |_| \\_\\ |_____| \\____| /__/ \\____| All Rights Reserved. * * This program is free software: you can redistribute it and/or modify it under the terms of the * GNU Affero General Public License as published by the Free Software Foundation, either version * 3 of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See * the GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License along with this * program. If not, see <http://www.gnu.org/licenses/>. * */ package com.precog.bifrost package jdbc import akka.actor.ActorSystem import akka.dispatch.{ExecutionContext, Future, Promise} import blueeyes.bkka._ import scalaz.Monad import org.streum.configrity.Configuration import com.precog.common.jobs.JobManager import com.precog.common.security.APIKeyFinder import com.precog.standalone.StandaloneShardServer object JDBCShardServer extends StandaloneShardServer { val caveatMessage = Some(""" !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! Precog for PostgreSQL is a free product that Precog provides to the PostgreSQL community for doing data analysis on PostgreSQL. Due to technical limitations, we only recommend the product for exploratory data analysis. For developers interested in high-performance analytics on their PostgreSQL data, we recommend our cloud-based analytics solution and the PostgreSQL data importer, which can nicely complement existing PostgreSQL installations for analytic-intensive workloads. Please note that path globs are not yet supported in Precog for PostgreSQL !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! """) val actorSystem = ActorSystem("ExecutorSystem") implicit val executionContext = ExecutionContext.defaultExecutionContext(actorSystem) implicit val M: Monad[Future] = new FutureMonad(executionContext) def platformFor(config: Configuration, apiKeyfinder: APIKeyFinder[Future], jobManager: JobManager[Future]) = (new JDBCQueryExecutor(new JDBCQueryExecutorConfig(config.detach("queryExecutor")), jobManager, actorSystem), Stoppable.fromFuture(Future(()))) }
precog/platform
miklagard/jdbc/src/main/scala/com/precog/shard/jdbc/JDBCShardServer.scala
Scala
agpl-3.0
2,715
/* * Copyright 2016 The BigDL Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intel.analytics.bigdl.dllib.keras.layers import com.intel.analytics.bigdl.dllib.nn.{RReLU => BRReLU} import com.intel.analytics.bigdl.dllib.keras.layers.{RReLU => ZRReLU} import com.intel.analytics.bigdl.dllib.tensor.Tensor import com.intel.analytics.bigdl.dllib.utils.Shape import com.intel.analytics.bigdl.dllib.keras.ZooSpecHelper import com.intel.analytics.bigdl.dllib.keras.serializer.ModuleSerializationTest class RReLUSpec extends ZooSpecHelper { "RReLU 3D Zoo" should "be the same as BigDL" in { val blayer = BRReLU[Float](1.0/9, 1.0/4) val zlayer = ZRReLU[Float](1.0/9, 1.0/4, inputShape = Shape(3, 4)) zlayer.build(Shape(-1, 3, 4)) zlayer.getOutputShape().toSingle().toArray should be (Array(-1, 3, 4)) val input = Tensor[Float](Array(2, 3, 4)).rand() compareOutputAndGradInput(blayer, zlayer, input) } "RReLU 4D Zoo" should "be the same as BigDL" in { val blayer = BRReLU[Float]() val zlayer = ZRReLU[Float](inputShape = Shape(4, 8, 8)) zlayer.build(Shape(-1, 4, 8, 8)) zlayer.getOutputShape().toSingle().toArray should be (Array(-1, 4, 8, 8)) val input = Tensor[Float](Array(3, 4, 8, 8)).rand() compareOutputAndGradInput(blayer, zlayer, input) } } class RReLUSerialTest extends ModuleSerializationTest { override def test(): Unit = { val layer = RReLU[Float](inputShape = Shape(4, 5)) layer.build(Shape(2, 4, 5)) val input = Tensor[Float](2, 4, 5).rand() runSerializationTest(layer, input) } }
intel-analytics/BigDL
scala/dllib/src/test/scala/com/intel/analytics/bigdl/dllib/keras/layers/RReLUSpec.scala
Scala
apache-2.0
2,104
/* * Copyright 2020 Precog Data * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package quasar.qscript import slamdata.Predef._ import quasar.fp.ski._ import quasar.{RenderTree, Terminal} import monocle.Iso import scalaz._ sealed abstract class Hole final case object SrcHole extends Hole object Hole { def apply(): Hole = SrcHole def unit = Iso[Hole, Unit](κ(()))(κ(SrcHole)) implicit val equal: Equal[Hole] = Equal.equalA implicit val show: Show[Hole] = Show.showFromToString implicit val renderTree: RenderTree[Hole] = RenderTree.make(κ(Terminal(List("○"), None))) }
quasar-analytics/quasar
qscript/src/main/scala/quasar/qscript/Hole.scala
Scala
apache-2.0
1,108
/* * Copyright (c) 2014-2021 by The Monix Project Developers. * See the project homepage at: https://monix.io * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package monix.tail.internal import scala.collection.mutable.ArrayBuffer import cats.effect.Sync import cats.syntax.functor._ import scala.util.control.NonFatal import monix.execution.internal.collection.ChunkedArrayStack import monix.tail.Iterant import monix.tail.Iterant._ import monix.tail.batches.BatchCursor private[tail] object IterantIntersperse { def apply[F[_], A](source: Iterant[F, A], separator: A)(implicit F: Sync[F]): Iterant[F, A] = Suspend { F.delay(new Loop[F, A](separator).apply(source)) } private class Loop[F[_], A](separator: A)(implicit F: Sync[F]) extends (Iterant[F, A] => Iterant[F, A]) { private[this] var prepend = false private[this] val stack = ChunkedArrayStack[F[Iterant[F, A]]]() def apply(source: Iterant[F, A]): Iterant[F, A] = { try source match { case halt @ Halt(opt) => val next = stack.pop() if (opt.nonEmpty || next == null) { halt } else { Suspend(next.map(this)) } case Suspend(rest) => Suspend(rest.map(this)) case NextCursor(cursor, rest) if !cursor.hasNext() => Suspend(rest.map(this)) case NextBatch(batch, rest) if !batch.cursor().hasNext() => Suspend(rest.map(this)) case Concat(lh, rh) => stack.push(rh) Suspend(lh.map(this)) case b @ Scope(_, _, _) => b.runMap(this) case _ if prepend => prepend = false Next(separator, F.pure(source).map(this)) case ref @ NextCursor(_, _) => processNonEmptyCursor(ref) case NextBatch(batch, rest) => processNonEmptyCursor(NextCursor(batch.cursor(), rest)) case Next(item, rest) => prepend = true Next(item, rest.map(this)) case last @ Last(a) => stack.pop() match { case null => last case some => prepend = true Next(a, some.map(this)) } } catch { case ex if NonFatal(ex) => Halt(Some(ex)) } } def processNonEmptyCursor(ref: NextCursor[F, A]): Iterant[F, A] = { val NextCursor(cursor, rest) = ref val batchSize = cursor.recommendedBatchSize if (batchSize <= 1) { val item = cursor.next() prepend = true Next(item, F.delay(this(ref))) } else { var appends = 0 val maxAppends = batchSize / 2 val buffer = ArrayBuffer.empty[A] var continue = true while (continue && appends < maxAppends) { buffer += cursor.next() appends += 1 if (cursor.hasNext()) { // only append separator if element is guaranteed to be not the last one buffer += separator } else { continue = false } } val batchCursor = BatchCursor.fromArray(buffer.toArray[Any]).asInstanceOf[BatchCursor[A]] if (cursor.hasNext()) { // ref now contains mutated cursor, continue with it prepend = false NextCursor(batchCursor, F.delay(this(ref))) } else { prepend = true NextCursor(batchCursor, rest.map(this)) } } } } }
monixio/monix
monix-tail/shared/src/main/scala/monix/tail/internal/IterantIntersperse.scala
Scala
apache-2.0
3,946
package tuner.test.functional import org.scalatest.FeatureSpec import org.scalatest.GivenWhenThen import org.scalatest.Matchers._ import tuner.test.Util._ import tuner.Tuner import tuner.project.Project class OpenProjectSpec extends FeatureSpec with GivenWhenThen { feature("Opening a viewable project shouldn't cause any errors") { Given("A project in a viewable state") val projPath = resource("/3d_viewable.proj") val proj = Project.fromFile(projPath) When("The project is opened") Tuner.openProject(proj) Then("No errors should occur") And("It should be viewable") proj shouldBe a [tuner.project.Viewable] } }
gabysbrain/tuner
src/test/scala/tuner/functional/OpenProjectSpec.scala
Scala
mit
660
package com.twitter.finatra.http.marshalling trait DefaultMessageBodyWriter extends MessageBodyWriter[Any]
nkhuyu/finatra
http/src/main/scala/com/twitter/finatra/http/marshalling/DefaultMessageBodyWriter.scala
Scala
apache-2.0
110
package org.openurp.edu.eams.system.report.service.internal import org.beangle.commons.dao.impl.BaseServiceImpl import org.beangle.data.jpa.dao.OqlBuilder import org.openurp.edu.base.Project import org.openurp.edu.eams.system.report.ReportTemplate import org.openurp.edu.eams.system.report.service.ReportTemplateService class ReportTemplateServiceImpl extends BaseServiceImpl with ReportTemplateService { def getTemplate(project: Project, code: String): ReportTemplate = { val builder = OqlBuilder.from(classOf[ReportTemplate], "rt") builder.where("rt.project =:project and rt.code=:code", project, code) .cacheable() val templates = entityDao.search(builder) if ((templates.isEmpty)) null else templates.head } def getCategoryTemplates(project: Project, category: String): Seq[ReportTemplate] = { val builder = OqlBuilder.from(classOf[ReportTemplate], "rt") builder.where("rt.project =:project and rt.category=:category", project, category) .cacheable() entityDao.search(builder) } }
openurp/edu-eams-webapp
core/src/main/scala/org/openurp/edu/eams/system/report/service/internal/ReportTemplateServiceImpl.scala
Scala
gpl-3.0
1,038
package cmdreader import types._ /** * A class to represent a preloaded command. * @author bluebear94 */ abstract class Command { /** * Returns the name of the command. * @return the name of the command */ def getName(): String// = "" /** * Returns whether a given arity is valid. * @param n the arity to check */ def isValidArg0(n: Int): Boolean// = true /** * Returns the value returned by this command for a given set of arguments. * @param args the array containing the arguments */ def apply(args: Array[Type]): Type// = new TVoid() }
bluebear94/bag
src/main/scala/cmdreader/Command.scala
Scala
gpl-3.0
581
/* * Copyright 2007-2010 WorldWide Conferencing, LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.liftweb { package util { import common._ /** * Companion object for FatLaxy. */ object FatLazy { /** * Create a new FatLazy. */ def apply[T](f: => T) = new FatLazy(f) // implicit def fromLazy[T](in: Lazy[T]): T = in.get } /** * A class that does lazy evaluation * * @param f -- a function that evaluates to the default value of the instance */ class FatLazy[T](f: => T) { private var value: Box[T] = Empty /** * Get the value of the instance. If it's not yet been set, call f to calculate it * * @return the value of the instance */ def get: T = synchronized { value match { case Full(v) => v case _ => value = Full(f) value.open_! } } /** * Test whether the value of this class has been set or initialized from the default. */ def defined_? = synchronized { value != None } /** * Set the instance to a new value and return that value * * @param v - the new value of the instance * * @return v */ def set(v: T): T = synchronized { value = Full(v) v } /** * Copy the value of the specified FatLazy into this FatLazy */ def setFrom(other: FatLazy[T]): Unit = synchronized { value = other.value } /** * and the lazy() = foo style of assignment */ def update(v: T): Unit = set(v) /** * Reset the value of this FatLazy to the default (which will be lazily determined * on retrieval.) */ def reset = synchronized {value = Empty} /** * Determine whether the value of this FatLazy has been determined. */ def calculated_? = synchronized {value.isDefined} // implicit def fromLazy[T](in: Lazy[T]): T = in.get } /** * Sometimes, you want to do pattern matching against a lazy value. Why? * Because, there may be parts of the pattern that must be evaluated first * and if they evaluate successfully, you then want to test another part of * the pattern. Thus, the LZ pattern match. */ object LZ { def apply[T](f: => T): LZ[T] = new LZ(f) def unapply[T](in: LZ[T]): Option[T] = Some(in.get) // implicit def lazyToT[T](in: LazyMatcher[T]): T = in.get } /** * LZ encapsulates a lazy value. * * @param f - a value to be evaluated lazily */ class LZ[T](f: => T) { lazy val get = f override def toString = "LZ("+get+")" } object ThreadLazy { def apply[T](f: => T) = new ThreadLazy(f) implicit def what[T](in: ThreadLazy[T]): T = in.get } /** * A thread-local lazy value that provides a means to evaluate * a function in a lazily-evaluated scope. * * @param theFunc the lazily-evaluated expression for which to * cache the result in thread-local scope. */ class ThreadLazy[TheType](theFunc: => TheType) extends LoanWrapper { private val calced = new ThreadGlobal[Boolean] private val value = new ThreadGlobal[TheType] /** * Save the current cached lazy value, if any, evaluate the specified * function and then restore the previous value to the cache. The effect * of this function is to essentially perform a reset of this lazy value * to being unevaluated prior to function evaluation. */ def apply[T](f: => T): T = { val old = value.value calced.set(false) try { f } finally { calced.set(false) value.set(old) } } /** * Reset the lazy value so that it will be recalculated from the default expression * on the next retrieval. */ def reset(): Unit = calced.set(false) /** * Return the value, evaluating the default expression if necessary. */ def get: TheType = { if (calced.value) value.value else { value.set(theFunc) calced.set(true) value.value } } } } }
wsaccaco/lift
framework/lift-base/lift-util/src/main/scala/net/liftweb/util/FatLazy.scala
Scala
apache-2.0
4,304
package core.clustering.kmeans import core.DataSet import core.clustering.model.{CentroidCluster, CentroidModel} import core.util.Distances.DistanceFunc import scala.annotation.tailrec import scala.util.Random /** * Implementation of the Kmeans algorithm. * * @param distance the distance function that is going to be used inside the algorithm. * @param K the number of clusters. * @param maxIters maximum number of iterations. */ class KMeans (K: Int, maxIters: Int, distance: DistanceFunc) { require(maxIters > 0, s"The maximum number of iterations ($maxIters) cannot be lesser than 1") /** * Trains a series of clusters (the model). * * @param dataSet the dataSet that is going to be used to learn the clusters. * @return the model. */ def train(dataSet: DataSet): CentroidModel ={ require(dataSet.data.length >= K, s"K ($K) cannot be greater than the dataSet size (${dataSet.data.length})") // Initializes the model by giving value to the centroids. val clusters = initialize(dataSet) // This array will remember the assigned cluster of each instance of the dataSet. // Initially all the instances belong to the first cluster. val assignments = Array.fill(dataSet.data.length)(-1) // Assigns each instance to its nearest cluster. CentroidCluster.assignToClusters(dataSet, clusters, assignments, distance) // Initializes current iterations. val iterations = 1 // Launch the recursion val finalClusters = iterate(dataSet, clusters, assignments, iterations) new CentroidModel(finalClusters, assignments, dataSet) } /** * * @param model * @param dataSet * @return */ def train(model: CentroidModel, dataSet: DataSet): CentroidModel ={ require(dataSet.data.length >= K, s"K ($K) cannot be greater than the dataSet size (${dataSet.data.length})") // Initial clusters val clusters = model.getClusters // Initial assignments val assignments = model.getAssignments // Initializes current iterations. val iterations = 1 // Launch the recursion val finalClusters = iterate(dataSet, clusters, assignments, iterations) new CentroidModel(finalClusters, assignments, dataSet) } /** * Initializes the clusters values randomly. * * @param dataSet the dataSet that is going to be used to learn the clusters. * @return the initialized clusters. */ private def initialize(dataSet: DataSet): List[CentroidCluster] = { // Creates a Set that will store the index of the random instance that will be chosen as the centroid of the cluster. val indexes = collection.mutable.Set(Random.nextInt(dataSet.data.length)) // It keeps adding more centroids until its size is equal to K. while (indexes.size < K) indexes.add(Random.nextInt(dataSet.data.length)) // TODO: It is necessary to transform it at the beginning or we would ignore 'repeated' instances // TODO: But, shouldn't centroids be different? // It creates a new cluster for each centroid. indexes.toList.map(x => new CentroidCluster(dataSet.data(x),dataSet)) } /** * Recursive private method that updates the centroids values and reassigns the instances to the new clusters. * * @param _initialClusters the initial value of the centroids. * @param assignments the array containing the cluster that each instance belongs to. * @param iters current number of iterations passed. * @return the updated model. */ @tailrec private def iterate(dataSet: DataSet, _initialClusters: List[CentroidCluster], assignments: Array[Int], iters : Int): List[CentroidCluster] = { // The clusters' centroids are moved. val updatedClusters = _initialClusters.map(_.moveCenter) // Instances are re-assigned val numberOfAssignments = CentroidCluster.assignToClusters(dataSet, updatedClusters, assignments, distance) // Stop condition of the algorithm if( iters >= maxIters || numberOfAssignments == 0) updatedClusters else iterate(dataSet, updatedClusters,assignments,iters + 1) } }
fernandoj92/ScalaML
src/main/scala/core/clustering/kmeans/KMeans.scala
Scala
gpl-3.0
4,168
package x7c1.linen.modern.init.settings.preset import x7c1.linen.database.struct.HasAccountId case class PresetChannelSubscriptionChanged( channelId: Long, accountId: Long, isSubscribed: Boolean, from: PresetEventLocation ) object PresetChannelSubscriptionChanged { implicit object account extends HasAccountId[PresetChannelSubscriptionChanged]{ override def toId = _.accountId } }
x7c1/Linen
linen-modern/src/main/scala/x7c1/linen/modern/init/settings/preset/PresetChannelSubscriptionChanged.scala
Scala
mit
402
// Project: angulate2 (https://github.com/jokade/angulate2) // Description: // Copyright (c) 2017 Johannes.Kastner <[email protected]> // Distributed under the MIT License (see included LICENSE file) package angulate2.platformBrowser.animations import scala.scalajs.js import scala.scalajs.js.annotation.JSImport @js.native @JSImport("@angular/platform-browser/animations","BrowserAnimationsModule") class NoopAnimationsModule extends js.Object
jokade/angulate2
bindings/src/main/scala/angulate2/platformBrowser/animations/NoopAnimationsModule.scala
Scala
mit
468
package io.flow.lint.linters import io.apibuilder.spec.v0.models.{Field, Model, Service} import io.flow.lint.Linter /** * Mapping models create qualified associations between two models. We enforce * naming such that <model1>_<model2>_mapping must have the fields: * * a. id * b. model1 of type model1_reference * c. model2 of type model2_reference */ case object MappingModels extends Linter with Helpers { override def validate(service: Service): Seq[String] = { service.models. filter { m => isMapping(m.name) }. flatMap(validateModel) } private[this] def validateModel(model: Model): Seq[String] = { model.fields.toList match { case f1 :: f2 :: f3 :: _ => { val typeErrors = validateTypes(f1, f2, f3) val nameErrors = typeErrors.toList match { case Nil => validateNames(f1, f2, f3) case _ => Nil } val modelNameErrors = (typeErrors ++ nameErrors).toList match { case Nil => validateModelName(s"${f2.name}_${f3.name}_mapping", model.name) case _ => Nil } typeErrors ++ nameErrors ++ modelNameErrors } case _ => { Seq( error(model, "Mapping models must have at least 3 fields") ) } } } private[this] def validateTypes(f1: Field, f2: Field, f3: Field): Seq[String] = { val f1Errors = if (f1.`type` == "string") { Nil } else { Seq(s"Field '${f1.name}' type must be 'string'") } val f2Errors = if (isReference(f2.`type`)) { Nil } else { Seq(s"Field '${f2.name}' type must be '${f2.name}_reference'") } val f3Errors = if (isReference(f3.`type`)) { Nil } else { Seq(s"Field '${f3.name}' type must be '${f3.name}_reference'") } f1Errors ++ f2Errors ++ f3Errors } private[this] def isReference(typ: String): Boolean = { typ.endsWith("_reference") } private[this] def validateNames(f1: Field, f2: Field, f3: Field): Seq[String] = { validateName(1, "id", f1.name) ++ validateName(2, stripReference(stripPackage(f2.`type`)), f2.name) ++ validateName(3, stripReference(stripPackage(f3.`type`)), f3.name) } private[this] def validateName(index: Int, expected: String, actual: String): Seq[String] = { if (expected == actual) { Nil } else { Seq(s"Field $index '$actual' must be named '$expected'") } } private[this] def stripReference(typ: String): String = { typ.stripSuffix("_reference") } private[this] def stripPackage(typ: String): String = { typ.split("\\.").last } private[this] def validateModelName(expected: String, actual: String): Seq[String] = { if (expected == actual) { Nil } else { Seq(s"Model '$actual' must be named '$expected'") } } }
flowcommerce/api-lint
src/main/scala/io/flow/lint/linters/MappingModels.scala
Scala
mit
2,832
package actors_and_concurrency import akka.actor.{Actor, ActorSystem, PoisonPill, Props, Terminated} object DeathWatchTest extends App { // create the ActorSystem instance val system = ActorSystem("DeathWatchTest") // create the Parent that will create Denny val parent = system.actorOf(Props[Parent], name = "Parent") parent ! "any message" Thread.sleep(3000) // lookup kenny, then kill it val denny = system.actorSelection("/user/Parent/Denny") denny ! PoisonPill Thread.sleep(5000) println("calling system.shutdown") system.terminate() } class Parent extends Actor { // start Kenny as a child, then keep an eye on it val denny = context.actorOf(Props[Denny], name = "Denny") context.watch(denny) def receive = { case Terminated(denny) => println("OMG, they killed Denny") case _ => println("Parent received a message") } } class Denny extends Actor { def receive = { case _ => println("Kenny received a message") } }
rafaelkyrdan/cookbook
src/main/scala/actors_and_concurrency/DeathWatchTest.scala
Scala
mit
976
package org.opencommercesearch.cache /* * Licensed to OpenCommerceSearch under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. OpenCommerceSearch licenses this * file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import scala.concurrent.ExecutionContext.Implicits.global import org.specs2.mutable._ import org.specs2.mock.Mockito import java.util.concurrent.CountDownLatch import scala.concurrent.Future class DefaultMemoizerSpec extends Specification with Mockito { class DummyResult { def foo : String = { "any" } } "Memoizer" should { /** * This test case will potentially test the case where two threads enter the non-atomic * if statement. There no clean way to guarantee execution order though */ "should compute once even when two threads enter the non-atomic if statement" in { val result = mock[DummyResult] val endGate = new CountDownLatch(2) result.foo returns "one" val computable = new Computable[String, DummyResult] { def compute(arg: String) : DummyResult = { result.foo result } } val memoizer = new DefaultMemoizer[String, DummyResult](computable) var f1: Future[DummyResult] = null var f2: Future[DummyResult] = null val t1 = new Thread(new Runnable() { @Override def run() : Unit = { f1 = memoizer.compute("1") f1.map(s => { endGate.countDown() }) } }, "test1-thread-1") val t2 = new Thread(new Runnable() { @Override def run() : Unit = { f2 = memoizer.compute("1") f2.map(s => { endGate.countDown() }) } }, "test1-thread-2") t1.start() t2.start() endGate.await() f1.map({s => s must be(result) }) f2.map({s => s must be(result) }) f1 must be(f2) there was one(result).foo } } "should compute once when value is already cached" in { val result = mock[DummyResult] val startGate = new CountDownLatch(1) val endGate = new CountDownLatch(2) result.foo returns "one" val computable = new Computable[String, DummyResult] { def compute(arg: String) : DummyResult = { result.foo result } } val memoizer = new DefaultMemoizer[String, DummyResult](computable) var f1: Future[DummyResult] = null var f2: Future[DummyResult] = null val t1 = new Thread(new Runnable() { @Override def run() : Unit = { f1 = memoizer.compute("1") f1.map(s => { endGate.countDown() startGate.countDown() }) } }, "test2-thread-1") val t2 = new Thread(new Runnable() { @Override def run() : Unit = { startGate.await() f2 = memoizer.compute("1") f2.map(s => { endGate.countDown() }) } }, "test2-thread-2") t1.start() t2.start() endGate.await() f1.map({s => s must be(result) }) f2.map({s => s must be(result) }) f1 must be(f2) there was one(result).foo } "should not cache when exception occurs" in { val result = mock[DummyResult] val startGate = new CountDownLatch(1) val endGate = new CountDownLatch(2) result.foo returns "one" val computable = new Computable[String, DummyResult] { def compute(arg: String) : DummyResult = { if (result.foo.equals("one")) { throw new Exception("just fail") } result } } val memoizer = new DefaultMemoizer[String, DummyResult](computable) var f1: Future[DummyResult] = null var f2: Future[DummyResult] = null val t1 = new Thread(new Runnable() { @Override def run() : Unit = { f1 = memoizer.compute("1") f1.recover { case _ => { endGate.countDown() startGate.countDown() } } } }, "test3-thread-1") val t2 = new Thread(new Runnable() { @Override def run() : Unit = { startGate.await() result.foo returns "two" f2 = memoizer.compute("1") f2.map(s => { endGate.countDown() }) } }, "test3-thread-2") t1.start() t2.start() endGate.await() f1.recover { case ex: IllegalArgumentException => { ex.getMessage must be("just fail") } } f2.map({s => s must be(result) }) f1 must not be(f2) there was two(result).foo } }
madickson/opencommercesearch
opencommercesearch-api/test/org/opencommercesearch/cache/DefaultMemoizerSpec.scala
Scala
apache-2.0
5,183
/* * Copyright (C) 2005, The Beangle Software. * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.beangle.commons.dbf; import org.scalatest.funspec.AnyFunSpec import org.scalatest.matchers.should.Matchers import java.io.File import java.nio.charset.Charset class ReaderTest extends AnyFunSpec with Matchers { describe("Reader") { it("readinfo") { val fileName = "/home/chaostone/buf/XL_2013_10.dbf" val in = new File(fileName) if (in.exists) { println(Reader.readInfo(in)) val csv = new File(fileName.replace("dbf", "csv")) Reader.writeToCsv(in, csv, Charset.forName("GB18030")) } } } }
beangle/commons
dbf/src/test/scala/org/beangle/commons/dbf/ReaderTest.scala
Scala
lgpl-3.0
1,286
package graphique.backends.dummybackend import graphique.backends._ /** * A fully functional backend that stores the file content in memory. */ object DummyBackend { def apply(): Backend = { val io = new DummyIO val urls = new DummyUrlProvider(io, DummyPaths) val images = new ImageManager(io, DummyPaths) new Backend(images, urls) } }
amrhassan/graphique
src/test/scala/graphique/backends/dummybackend/DummyBackend.scala
Scala
mit
361
/* * Copyright 2014 Michael Krolikowski * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.github.mkroli.dns4s.netty import java.net.InetSocketAddress import com.github.mkroli.dns4s.Message import io.netty.channel.DefaultAddressedEnvelope class DnsPacket( msg: Message, dst: InetSocketAddress, src: InetSocketAddress) extends DefaultAddressedEnvelope[Message, InetSocketAddress]( msg, dst, src) object DnsPacket { def apply(msg: Message, dst: InetSocketAddress, src: InetSocketAddress) = new DnsPacket(msg, dst, src) def apply(msg: Message, dst: InetSocketAddress) = new DnsPacket(msg, dst, null) }
mesosphere/dns4s
netty/src/main/scala/com/github/mkroli/dns4s/netty/DnsPacket.scala
Scala
apache-2.0
1,153
/* NSC -- new Scala compiler * Copyright 2005-2013 LAMP/EPFL * @author James Iry */ // $Id$ package scala package tools.nsc.settings /** * Represents a single Scala version in a manner that * supports easy comparison and sorting. */ sealed abstract class ScalaVersion extends Ordered[ScalaVersion] { def unparse: String } /** * A scala version that sorts higher than all actual versions */ case object NoScalaVersion extends ScalaVersion { def unparse = "none" def compare(that: ScalaVersion): Int = that match { case NoScalaVersion => 0 case _ => 1 } } /** * A specific Scala version, not one of the magic min/max versions. An SpecificScalaVersion * may or may not be a released version - i.e. this same class is used to represent * final, release candidate, milestone, and development builds. The build argument is used * to segregate builds */ case class SpecificScalaVersion(major: Int, minor: Int, rev: Int, build: ScalaBuild) extends ScalaVersion { def unparse = s"${major}.${minor}.${rev}${build.unparse}" def compare(that: ScalaVersion): Int = that match { case SpecificScalaVersion(thatMajor, thatMinor, thatRev, thatBuild) => // this could be done more cleanly by importing scala.math.Ordering.Implicits, but we have to do these // comparisons a lot so I'm using brute force direct style code if (major < thatMajor) -1 else if (major > thatMajor) 1 else if (minor < thatMinor) -1 else if (minor > thatMinor) 1 else if (rev < thatRev) -1 else if (rev > thatRev) 1 else build compare thatBuild case AnyScalaVersion => 1 case NoScalaVersion => -1 } } /** * A Scala version that sorts lower than all actual versions */ case object AnyScalaVersion extends ScalaVersion { def unparse = "any" def compare(that: ScalaVersion): Int = that match { case AnyScalaVersion => 0 case _ => -1 } } /** * Factory methods for producing ScalaVersions */ object ScalaVersion { private val dot = """\\.""" private val dash = "-" private val vchar = """\\d""" //"[^-+.]" private val vpat = s"(?s)($vchar+)(?:$dot($vchar+)(?:$dot($vchar+)(?:$dash(.*))?)?)?".r private val rcpat = """(?i)rc(\\d*)""".r private val mspat = """(?i)m(\\d*)""".r def apply(versionString: String, errorHandler: String => Unit): ScalaVersion = { def error() = errorHandler( s"Bad version (${versionString}) not major[.minor[.revision[-suffix]]]" ) def toInt(s: String) = s match { case null | "" => 0 case _ => s.toInt } def toBuild(s: String) = s match { case null | "FINAL" => Final case rcpat(i) => RC(toInt(i)) case mspat(i) => Milestone(toInt(i)) case _ /* | "" */ => Development(s) } versionString match { case "none" => NoScalaVersion case "" => NoScalaVersion case "any" => AnyScalaVersion case vpat(majorS, minorS, revS, buildS) => SpecificScalaVersion(toInt(majorS), toInt(minorS), toInt(revS), toBuild(buildS)) case _ => error() ; AnyScalaVersion } } def apply(versionString: String): ScalaVersion = apply(versionString, msg => throw new NumberFormatException(msg)) /** * The version of the compiler running now */ val current = apply(util.Properties.versionNumberString) /** * The 2.8.0 version. */ val twoDotEight = SpecificScalaVersion(2, 8, 0, Final) } /** * Represents the data after the dash in major.minor.rev-build */ abstract class ScalaBuild extends Ordered[ScalaBuild] { /** * Return a version of this build information that can be parsed back into the * same ScalaBuild */ def unparse: String } /** * A development, test, nightly, snapshot or other "unofficial" build */ case class Development(id: String) extends ScalaBuild { def unparse = s"-${id}" def compare(that: ScalaBuild) = that match { // sorting two development builds based on id is reasonably valid for two versions created with the same schema // otherwise it's not correct, but since it's impossible to put a total ordering on development build versions // this is a pragmatic compromise case Development(thatId) => id compare thatId // assume a development build is newer than anything else, that's not really true, but good luck // mapping development build versions to other build types case _ => 1 } } /** * A final final */ case object Final extends ScalaBuild { def unparse = "" def compare(that: ScalaBuild) = that match { case Final => 0 // a final is newer than anything other than a development build or another final case Development(_) => -1 case _ => 1 } } /** * A candidate for final release */ case class RC(n: Int) extends ScalaBuild { def unparse = s"-RC${n}" def compare(that: ScalaBuild) = that match { // compare two rcs based on their RC numbers case RC(thatN) => n - thatN // an rc is older than anything other than a milestone or another rc case Milestone(_) => 1 case _ => -1 } } /** * An intermediate release */ case class Milestone(n: Int) extends ScalaBuild { def unparse = s"-M${n}" def compare(that: ScalaBuild) = that match { // compare two milestones based on their milestone numbers case Milestone(thatN) => n - thatN // a milestone is older than anything other than another milestone case _ => -1 } }
felixmulder/scala
src/compiler/scala/tools/nsc/settings/ScalaVersion.scala
Scala
bsd-3-clause
5,444
/** * Copyright 2011-2017 GatlingCorp (http://gatling.io) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.gatling.charts.template import io.gatling.charts.component.Component import io.gatling.charts.config.ChartsFiles.GlobalPageName private[charts] class GlobalPageTemplate(components: Component*) extends PageTemplate(GlobalPageName, false, None, None, components: _*)
MykolaB/gatling
gatling-charts/src/main/scala/io/gatling/charts/template/GlobalPageTemplate.scala
Scala
apache-2.0
898
package com.sksamuel.elastic4s import scala.language.implicitConversions /** @author Stephen Samuel */ case class IndexType(index: String, `type`: String) case class IndexesTypes(indexes: Seq[String], types: Seq[String]) { def index = indexes.headOption.getOrElse(throw new RuntimeException("Specify at least one index")) def typ = types.headOption } object IndexesTypes { def apply(indexes: Iterable[String]): IndexesTypes = indexes.size match { case 0 => throw new RuntimeException("Could not parse into index/type") case 1 => apply(indexes.head) case _ => apply(indexes.toSeq, Nil) } def apply(tuple: (String, String)): IndexesTypes = apply(tuple._1, tuple._2) def apply(index: String, `type`: String): IndexesTypes = IndexesTypes(List(index), List(`type`)) def apply(indexType: IndexType): IndexesTypes = IndexesTypes(indexType.index, indexType.`type`) def apply(string: String): IndexesTypes = { string.split("/") match { case Array(index) => IndexesTypes(Array(index), Nil) case Array(index, t) => IndexesTypes(List(index), List(t)) case _ => throw new RuntimeException("Could not parse into index/type") } } } trait IndexesTypesDsl { implicit def string2indexestypes(string: String): IndexesTypes = IndexesTypes(string) implicit def tuple2indexestypes(tuple: (String, String)): IndexesTypes = IndexesTypes(tuple) }
l15k4/elastic4s
elastic4s-core/src/main/scala/com/sksamuel/elastic4s/IndexType.scala
Scala
apache-2.0
1,384
/* * Wire * Copyright (C) 2016 Wire Swiss GmbH * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package com.waz.sync.handler import com.waz.log.BasicLogging.LogTag.DerivedLogTag import com.waz.log.LogSE._ import com.waz.model.AddressBook import com.waz.service._ import com.waz.service.tracking.TrackingService import com.waz.sync.SyncResult import com.waz.sync.client.AddressBookClient import com.waz.threading.Threading import scala.concurrent.Future class AddressBookSyncHandler(contacts: ContactsServiceImpl, client: AddressBookClient, tracking: TrackingService) extends DerivedLogTag { import Threading.Implicits.Background def postAddressBook(ab: AddressBook): Future[SyncResult] = { verbose(l"postAddressBook()") if (ab == AddressBook.Empty) Future.successful(SyncResult.Success) else { // TODO: post incremental changes only - once backend supports that for { postRes <- client.postAddressBook(ab).future result <- postRes match { case Left(error) => Future.successful(SyncResult(error)) case Right(users) => contacts.onAddressBookUploaded(ab, users) .map(_ => SyncResult.Success) .recover { case e: Throwable => SyncResult(e) } } } yield result } } }
wireapp/wire-android-sync-engine
zmessaging/src/main/scala/com/waz/sync/handler/AddressBookSyncHandler.scala
Scala
gpl-3.0
2,011
package dotty.tools package dotc package typer import backend.sjs.JSDefinitions import core._ import Contexts._, Types._, Symbols._, Names._, Decorators._, ProtoTypes._ import Flags._, SymDenotations._ import NameKinds.FlatName import StdNames._ import config.Printers.{implicits, implicitsDetailed} import ast.{untpd, tpd} import Implicits.{hasExtMethod, Candidate} import java.util.{Timer, TimerTask} import collection.mutable import scala.util.control.NonFatal /** This trait defines the method `importSuggestionAddendum` that adds an addendum * to error messages suggesting additional imports. */ trait ImportSuggestions: this: Typer => /** The maximal number of suggested imports to make */ inline val MaxSuggestions = 10 import tpd._ /** Timeout to test a single implicit value as a suggestion, in ms */ private inline val testOneImplicitTimeOut = 500 /** A list of TermRefs referring to the roots where suggestions for * imports of givens or extension methods that might fix a type error * are searched. * * These roots are the smallest set of objects and packages that includes * * - any object that is a defined in an enclosing scope, * - any object that is a member of an enclosing class, * - any enclosing package (including the root package), * - any object that is a member of a searched object or package, * - any object or package from which something is imported in an enclosing scope, * - any package that is nested in a searched package, provided * the package was accessed in some way previously. * * Excluded from the root set are: * * - Objects that contain `$`s in their name. These have to * be omitted since they might be inner Java class files which * cannot be read by the ClassfileParser without crashing. * - Any members of static parts of Java classes. * - Any members of the empty package. These should be * skipped since the empty package often contains unrelated junk files * that should not be used for suggestions. * - Any members of the java or java.lang packages. These are * skipped as an optimization, since they won't contain implicits anyway. */ private def suggestionRoots(using Context) = val seen = mutable.Set[TermRef]() def lookInside(root: Symbol)(using Context): Boolean = explore { if root.is(Package) then root.isTerm && root.isCompleted else !root.name.is(FlatName) && !root.name.lastPart.contains('$') && root.is(ModuleVal, butNot = JavaDefined) // The implicits in `scalajs.js.|` are implementation details and shouldn't be suggested && !(root.name == nme.raw.BAR && ctx.settings.scalajs.value && root == JSDefinitions.jsdefn.PseudoUnionModule) } def nestedRoots(site: Type)(using Context): List[Symbol] = val seenNames = mutable.Set[Name]() site.baseClasses.flatMap { bc => bc.info.decls.filter { dcl => lookInside(dcl) && !seenNames.contains(dcl.name) && { seenNames += dcl.name; true } } } def rootsStrictlyIn(ref: Type)(using Context): List[TermRef] = val site = ref.widen val refSym = site.typeSymbol val nested = if refSym.is(Package) then if refSym == defn.EmptyPackageClass // Don't search the empty package || refSym == defn.JavaPackageClass // As an optimization, don't search java... || refSym == defn.JavaLangPackageClass // ... or java.lang. then Nil else refSym.info.decls.filter(lookInside) else if refSym.infoOrCompleter.isInstanceOf[StubInfo] then Nil // Don't chase roots that do not exist else if !refSym.is(Touched) then refSym.ensureCompleted() // JavaDefined is reliably known only after completion if refSym.is(JavaDefined) then Nil else nestedRoots(site) nested .map(mbr => TermRef(ref, mbr.asTerm)) .flatMap(rootsIn) .toList def rootsIn(ref: TermRef)(using Context): List[TermRef] = if seen.contains(ref) then Nil else implicitsDetailed.println(i"search for suggestions in ${ref.symbol.fullName}") seen += ref ref :: rootsStrictlyIn(ref) def rootsOnPath(tp: Type)(using Context): List[TermRef] = tp match case ref: TermRef => rootsIn(ref) ::: rootsOnPath(ref.prefix) case _ => Nil def recur(using Context): List[TermRef] = if ctx.owner.exists then val defined = if ctx.owner.isClass then if ctx.owner eq ctx.outer.owner then Nil else rootsStrictlyIn(ctx.owner.thisType) else if ctx.scope eq ctx.outer.scope then Nil else ctx.scope .filter(lookInside(_)) .flatMap(sym => rootsIn(sym.termRef)) val imported = if ctx.importInfo eqn ctx.outer.importInfo then Nil else ctx.importInfo.nn.importSym.info match case ImportType(expr) => rootsOnPath(expr.tpe) case _ => Nil defined ++ imported ++ recur(using ctx.outer) else Nil recur end suggestionRoots /** Given an expected type `pt`, return two lists of TermRefs: * * 1. The _fully matching_ given instances that can be completed * to a full synthesized given term that matches the expected type `pt`. * * 2. The _head matching_ given instances, that conform to the * expected type `pt`, ignoring any dependent implicit arguments. * * If there are no fully matching given instances under (1), and `pt` is * a view prototype of a selection of the form `T ?=>? { name: ... }`, * return instead a list of all possible references to extension methods named * `name` that are applicable to `T`. */ private def importSuggestions(pt: Type)(using Context): (List[TermRef], List[TermRef]) = val timer = new Timer() val allotted = ctx.run.nn.importSuggestionBudget if allotted <= 1 then return (Nil, Nil) implicits.println(i"looking for import suggestions, timeout = ${allotted}ms") val start = System.currentTimeMillis() val deadLine = start + allotted // Candidates that are already available without explicit import because they // are already provided by the context (imported or inherited) or because they // are in the implicit scope of `pt`. val alreadyAvailableCandidates: Set[Symbol] = { val wildProto = wildApprox(pt) val contextualCandidates = ctx.implicits.eligible(wildProto) val implicitScopeCandidates = ctx.run.nn.implicitScope(wildProto).eligible val allCandidates = contextualCandidates ++ implicitScopeCandidates allCandidates.map(_.implicitRef.underlyingRef.symbol).toSet } def testContext(): Context = ctx.fresh.retractMode(Mode.ImplicitsEnabled).setExploreTyperState() /** Test whether the head of a given instance matches the expected type `pt`, * ignoring any dependent implicit arguments. */ def shallowTest(ref: TermRef): Boolean = System.currentTimeMillis < deadLine && inContext(testContext()) { def test(pt: Type): Boolean = pt match case ViewProto(argType, OrType(rt1, rt2)) => // Union types do not constrain results, since comparison with a union // type on the right might lose information. See ProtoTypes.disregardProto. // To regain precision, test both sides separately. test(ViewProto(argType, rt1)) || test(ViewProto(argType, rt2)) case pt: ViewProto => pt.isMatchedBy(ref) case _ => normalize(ref, pt) <:< pt test(pt) } /** Test whether a full given term can be synthesized that matches * the expected type `pt`. */ def deepTest(ref: TermRef): Boolean = System.currentTimeMillis < deadLine && { val task = new TimerTask: def run() = println(i"Cancelling test of $ref when making suggestions for error in ${ctx.source}") ctx.run.nn.isCancelled = true val span = ctx.owner.srcPos.span val (expectedType, argument, kind) = pt match case ViewProto(argType, resType) => (resType, untpd.Ident(ref.name).withSpan(span).withType(argType), if hasExtMethod(ref, resType) then Candidate.Extension else Candidate.Conversion) case _ => (pt, EmptyTree, Candidate.Value) val candidate = Candidate(ref, kind, 0) try timer.schedule(task, testOneImplicitTimeOut) typedImplicit(candidate, expectedType, argument, span)( using testContext()).isSuccess finally val run = ctx.run.nn if task.cancel() then // timer task has not run yet assert(!run.isCancelled) else while !run.isCancelled do () // wait until timer task has run to completion run.isCancelled = false } end deepTest /** Optionally, an extension method reference `site.name` that is * applicable to `argType`. */ def extensionMethod(site: TermRef, name: TermName, argType: Type): Option[TermRef] = site.member(name) .alternatives .map(mbr => TermRef(site, mbr.symbol)) .filter(ref => ctx.typer.isApplicableExtensionMethod(ref, argType)) .headOption try val roots = suggestionRoots .filterNot(root => defn.rootImportTypes.exists(_.symbol == root.symbol)) // don't suggest things that are imported by default def extensionImports = pt match case ViewProto(argType, SelectionProto(name: TermName, _, _, _)) => roots.flatMap(extensionMethod(_, name, argType)) case _ => Nil roots .flatMap(_.implicitMembers.filter { ref => !alreadyAvailableCandidates(ref.symbol) && shallowTest(ref) }) // filter whether the head of the implicit can match .partition(deepTest) // partition into full matches and head matches match case (Nil, partials) => (extensionImports, partials) case givenImports => givenImports catch case NonFatal(ex) => if ctx.settings.Ydebug.value then println("caught exception when searching for suggestions") ex.printStackTrace() (Nil, Nil) finally timer.cancel() reduceTimeBudget(((System.currentTimeMillis() - start) min Int.MaxValue).toInt) end importSuggestions /** Reduce next timeout for import suggestions by the amount of time it took * for current search, but but never less than to half of the previous budget. */ private def reduceTimeBudget(used: Int)(using Context) = val run = ctx.run.nn run.importSuggestionBudget = (run.importSuggestionBudget - used) max (run.importSuggestionBudget / 2) /** The `ref` parts of this list of pairs, discarding subsequent elements that * have the same String part. Elements are sorted by their String parts. */ extension (refs: List[(TermRef, String)]) def distinctRefs(using Context): List[TermRef] = val buf = new mutable.ListBuffer[TermRef] var last = "" for (ref, str) <- refs do if last != str then buf += ref last = str buf.toList /** The best `n` references in `refs`, according to `compare` * `compare` is a partial order. If there's a tie, we take elements * in the order thy appear in the list. */ extension (refs: List[TermRef]) def best(n: Int)(using Context): List[TermRef] = val top = new Array[TermRef](n) var filled = 0 val rest = new mutable.ListBuffer[TermRef] val noImplicitsCtx = ctx.retractMode(Mode.ImplicitsEnabled) for ref <- refs do var i = 0 var diff = 0 while i < filled && diff == 0 do diff = compare(ref, top(i))(using noImplicitsCtx) if diff > 0 then rest += top(i) top(i) = ref i += 1 end while if diff == 0 && filled < n then top(filled) = ref filled += 1 else if diff <= 0 then rest += ref end for val remaining = if filled < n && rest.nonEmpty then rest.toList.best(n - filled) else Nil top.take(filled).toList ++ remaining //end best TODO: re-enable with new syntax /** An addendum to an error message where the error might be fixed * by some implicit value of type `pt` that is however not found. * The addendum suggests given imports that might fix the problem. * If there's nothing to suggest, an empty string is returned. */ override def importSuggestionAddendum(pt: Type)(using Context): String = val (fullMatches, headMatches) = importSuggestions(pt)(using ctx.fresh.setExploreTyperState()) implicits.println(i"suggestions for $pt in ${ctx.owner} = ($fullMatches%, %, $headMatches%, %)") val (suggestedRefs, help) = if fullMatches.nonEmpty then (fullMatches, "fix") else (headMatches, "make progress towards fixing") def importString(ref: TermRef): String = val imported = if ref.symbol.is(ExtensionMethod) then s"${ctx.printer.toTextPrefix(ref.prefix).show}${ref.symbol.name}" else ctx.printer.toTextRef(ref).show s" import $imported" val suggestions = suggestedRefs .zip(suggestedRefs.map(importString)) .filter((ref, str) => str.contains('.')) // must be a real import with `.` .sortBy(_._2) // sort first alphabetically for stability .distinctRefs // TermRefs might be different but generate the same strings .best(MaxSuggestions) // take MaxSuggestions best references according to specificity .map(importString) if suggestions.isEmpty then "" else val fix = if suggestions.tail.isEmpty then "The following import" else "One of the following imports" i""" | |$fix might $help the problem: | |$suggestions%\n% | |""" end importSuggestionAddendum end ImportSuggestions
dotty-staging/dotty
compiler/src/dotty/tools/dotc/typer/ImportSuggestions.scala
Scala
apache-2.0
14,269
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.carbondata.spark.testsuite.datacompaction import scala.collection.JavaConverters._ import org.apache.spark.sql.Row import org.scalatest.BeforeAndAfterAll import org.apache.carbondata.core.metadata.{AbsoluteTableIdentifier, CarbonTableIdentifier} import org.apache.carbondata.core.constants.CarbonCommonConstants import org.apache.carbondata.core.statusmanager.SegmentStatusManager import org.apache.carbondata.core.util.CarbonProperties import org.apache.spark.sql.test.util.QueryTest /** * FT for data compaction scenario. */ class DataCompactionCardinalityBoundryTest extends QueryTest with BeforeAndAfterAll { override def beforeAll { CarbonProperties.getInstance().addProperty(CarbonCommonConstants.ENABLE_AUTO_LOAD_MERGE, "true") sql("drop table if exists cardinalityTest") CarbonProperties.getInstance() .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "mm/dd/yyyy") sql( "CREATE TABLE IF NOT EXISTS cardinalityTest (country String, ID String, date Timestamp, name " + "String, " + "phonetype String, serialname String, salary Int) STORED BY 'org.apache.carbondata" + ".format'" ) val csvFilePath1 = s"$resourcesPath/compaction/compaction1.csv" // loading the rows greater than 256. so that the column cardinality crosses byte boundary. val csvFilePath2 = s"$resourcesPath/compaction/compactioncard2.csv" val csvFilePath3 = s"$resourcesPath/compaction/compaction3.csv" sql("LOAD DATA LOCAL INPATH '" + csvFilePath1 + "' INTO TABLE cardinalityTest OPTIONS" + "('DELIMITER'= ',', 'QUOTECHAR'= '\"')" ) sql("LOAD DATA LOCAL INPATH '" + csvFilePath2 + "' INTO TABLE cardinalityTest OPTIONS" + "('DELIMITER'= ',', 'QUOTECHAR'= '\"')" ) // compaction will happen here. sql("LOAD DATA LOCAL INPATH '" + csvFilePath3 + "' INTO TABLE cardinalityTest OPTIONS" + "('DELIMITER'= ',', 'QUOTECHAR'= '\"')" ) // compaction will happen here. sql("alter table cardinalityTest compact 'major'" ) } test("check if compaction is completed or not and verify select query.") { var status = true var noOfRetries = 0 while (status && noOfRetries < 10) { val segmentStatusManager: SegmentStatusManager = new SegmentStatusManager(new AbsoluteTableIdentifier( CarbonProperties.getInstance.getProperty(CarbonCommonConstants.STORE_LOCATION), new CarbonTableIdentifier("default", "cardinalityTest", "1") ) ) val segments = segmentStatusManager.getValidAndInvalidSegments.getValidSegments.asScala.toList if (!segments.contains("0.1")) { // wait for 2 seconds for compaction to complete. Thread.sleep(500) noOfRetries += 1 } else { status = false } } // now check the answers it should be same. checkAnswer( sql("select country,count(*) from cardinalityTest group by country"), Seq(Row("america",1), Row("canada",1), Row("chile",1), Row("china",2), Row("england",1), Row("burma",152), Row("butan",101), Row("mexico",1), Row("newzealand",1), Row("westindies",1), Row("india",1), Row("iran",1), Row("iraq",1), Row("ireland",1) ) ) } override def afterAll { sql("drop table if exists cardinalityTest") CarbonProperties.getInstance() .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "dd-MM-yyyy") CarbonProperties.getInstance().addProperty(CarbonCommonConstants.ENABLE_AUTO_LOAD_MERGE, "false") } }
HuaweiBigData/carbondata
integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/DataCompactionCardinalityBoundryTest.scala
Scala
apache-2.0
4,465
package com.github.jeanadrien.gatling.mqtt.protocol import akka.actor.{ActorRef, ActorSystem} import com.github.jeanadrien.gatling.mqtt.client.{FuseSourceMqttClient, MqttClient} import com.typesafe.scalalogging.StrictLogging import io.gatling.commons.validation.Validation import io.gatling.core.protocol.ProtocolComponents import io.gatling.core.session._ import org.fusesource.mqtt.client.CallbackConnection /** * */ case class MqttComponents( mqttProtocol : MqttProtocol, system : ActorSystem ) extends ProtocolComponents with StrictLogging { def mqttEngine( session : Session, connectionSettings : ConnectionSettings, gatlingMqttId : String ) : Validation[ActorRef] = { logger.debug(s"MqttComponents: new mqttEngine: ${gatlingMqttId}") mqttProtocol.configureMqtt(session).map { config => // inject the selected engine val mqttClient = system.actorOf(MqttClient.clientInjection(config, gatlingMqttId)) mqttClient } } override def onStart : Option[(Session) => Session] = Some(s => { logger.debug("MqttComponents: onStart"); s }) override def onExit : Option[(Session) => Unit] = Some(s => { logger.debug("MqttComponents: onExit"); s("engine").asOption[ActorRef].foreach { mqtt => system.stop(mqtt) } }) }
jeanadrien/gatling-mqtt-protocol
src/main/scala/com/github/jeanadrien/gatling/mqtt/protocol/MqttComponents.scala
Scala
apache-2.0
1,369
package io.circe.generic.simple.decoding import cats.Apply import io.circe.{ Decoder, DecodingFailure, HCursor } import shapeless.{ :+:, ::, CNil, Coproduct, HList, HNil, Inl, Inr, Witness } import shapeless.labelled.{ FieldType, field } /** * A decoder for a generic representation of a case class or ADT. * * Note that users typically will not work with instances of this class. */ abstract class ReprDecoder[A] extends Decoder[A] object ReprDecoder { private[this] def consResults[F[_], K, V, T <: HList](hv: F[V], tr: F[T])( implicit F: Apply[F] ): F[FieldType[K, V] :: T] = F.map2(hv, tr)((v, t) => field[K].apply[V](v) :: t) implicit val decodeHNil: ReprDecoder[HNil] = new ReprDecoder[HNil] { def apply(c: HCursor): Decoder.Result[HNil] = Right(HNil) } implicit def decodeHCons[K <: Symbol, H, T <: HList]( implicit key: Witness.Aux[K], decodeH: Decoder[H], decodeT: ReprDecoder[T] ): ReprDecoder[FieldType[K, H] :: T] = new ReprDecoder[FieldType[K, H] :: T] { def apply(c: HCursor): Decoder.Result[FieldType[K, H] :: T] = for { h <- c.get(key.value.name)(decodeH) t <- decodeT(c) } yield field[K](h) :: t override def decodeAccumulating(c: HCursor): Decoder.AccumulatingResult[FieldType[K, H] :: T] = consResults[Decoder.AccumulatingResult, K, H, T]( decodeH.tryDecodeAccumulating(c.downField(key.value.name)), decodeT.decodeAccumulating(c) ) } implicit val decodeCNil: ReprDecoder[CNil] = new ReprDecoder[CNil] { def apply(c: HCursor): Decoder.Result[CNil] = Left(DecodingFailure("CNil", c.history)) } implicit def decodeCoproduct[K <: Symbol, L, R <: Coproduct]( implicit key: Witness.Aux[K], decodeL: Decoder[L], decodeR: => ReprDecoder[R] ): ReprDecoder[FieldType[K, L] :+: R] = new ReprDecoder[FieldType[K, L] :+: R] { private[this] lazy val cachedDecodeR: Decoder[R] = decodeR def apply(c: HCursor): Decoder.Result[FieldType[K, L] :+: R] = c.downField(key.value.name).focus match { case Some(value) => value.as(decodeL).map(l => Inl(field(l))) case None => cachedDecodeR(c).map(Inr(_)) } override def decodeAccumulating(c: HCursor): Decoder.AccumulatingResult[FieldType[K, L] :+: R] = { val f = c.downField(key.value.name) f.focus match { case Some(value) => decodeL.tryDecodeAccumulating(f).map(l => Inl(field(l))) case None => cachedDecodeR.decodeAccumulating(c).map(Inr(_)) } } } }
travisbrown/circe
modules/generic-simple/src/main/scala/io/circe/generic/simple/decoding/ReprDecoder.scala
Scala
apache-2.0
2,530
package com.omega.service import java.util.{ List => JList } import org.springframework.transaction.annotation.Transactional import com.omega.domain.Book import com.omega.repository.BookDao import com.omega.util.BeanLifeCycle @Transactional class BookServiceImpl(val bookDao: BookDao) extends BookService with BeanLifeCycle { @Transactional override def save(book: Book): Book = { bookDao.save(book) } @Transactional override def getBooks: JList[Book] = { bookDao.getBooks } }
milind-chawla/Omega
src/main/scala/com/omega/service/BookServiceImpl.scala
Scala
mit
531
/* * Copyright 2014 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.github.carlomicieli.scalakoans import org.scalatest._ import org.junit.runner.RunWith import org.scalatest.junit.JUnitRunner @RunWith(classOf[JUnitRunner]) class AboutAsserts extends FunSuite with ShouldMatchers { test("asserts can take a boolean argument") { assert(true) } test("asserts can include a message") { assert(true, "This should be true") } test("true and false values can be compared with should matchers") { true should be(true) } test("booleans in asserts can test equality") { val v1 = 4 val v2 = 4 v1 === v2 // === is an assert. It is from ScalaTest, not from the Scala language } test("sometimes we expect you to fill in the values") { assert(2 == 1 + 1) } }
CarloMicieli/first-steps-with-scala
src/test/scala/io/github/carlomicieli/scalakoans/AboutAsserts.scala
Scala
apache-2.0
1,357
package pureconfig.generic import scala.reflect.macros.blackbox import pureconfig._ /** Macros used to circumvent divergence checker restrictions in the compiler. */ class ExportMacros(val c: blackbox.Context) { import c.universe._ final def exportDerivedReader[A](implicit a: c.WeakTypeTag[A]): c.Expr[Exported[ConfigReader[A]]] = { c.typecheck(q"_root_.shapeless.lazily[_root_.pureconfig.generic.DerivedConfigReader[$a]]", silent = true) match { case EmptyTree => c.abort(c.enclosingPosition, s"Unable to infer value of type $a") case t => c.Expr[Exported[ConfigReader[A]]](q"new _root_.pureconfig.Exported($t: _root_.pureconfig.ConfigReader[$a])") } } final def exportDerivedWriter[A](implicit a: c.WeakTypeTag[A]): c.Expr[Exported[ConfigWriter[A]]] = { c.typecheck(q"_root_.shapeless.lazily[_root_.pureconfig.generic.DerivedConfigWriter[$a]]", silent = true) match { case EmptyTree => c.abort(c.enclosingPosition, s"Unable to infer value of type $a") case t => c.Expr[Exported[ConfigWriter[A]]](q"new _root_.pureconfig.Exported($t: _root_.pureconfig.ConfigWriter[$a])") } } }
pureconfig/pureconfig
modules/generic/src/main/scala/pureconfig/generic/ExportMacros.scala
Scala
mpl-2.0
1,153
package com.softwaremill.streams import java.io.File import akka.actor.ActorSystem import akka.stream.ActorMaterializer import akka.stream.io.Framing import akka.stream.scaladsl.{FileIO, Keep} import akka.util.ByteString import com.softwaremill.streams.util.TestFiles import com.softwaremill.streams.util.Timed._ import scala.concurrent.{Await, Future} import scalaz.stream.{io, text} import scala.concurrent.duration._ trait TransferTransformFile { /** * @return Number of bytes written */ def run(from: File, to: File): Long } object AkkaStreamsTransferTransformFile extends TransferTransformFile { private lazy implicit val system = ActorSystem() override def run(from: File, to: File) = { implicit val mat = ActorMaterializer() val r: Future[Long] = FileIO.fromFile(from) .via(Framing.delimiter(ByteString("\\n"), 1048576)) .map(_.utf8String) .filter(!_.contains("#!@")) .map(_.replace("*", "0")) .intersperse("\\n") .map(ByteString(_)) .toMat(FileIO.toFile(to))(Keep.right) .run() Await.result(r, 1.hour) } def shutdown() = { system.terminate() } } object ScalazStreamsTransferTransformFile extends TransferTransformFile { override def run(from: File, to: File) = { io.linesR(from.getAbsolutePath) .filter(!_.contains("#!@")) .map(_.replace("*", "0")) .intersperse("\\n") .pipe(text.utf8Encode) .to(io.fileChunkW(to.getAbsolutePath)) .run .run to.length() } } object TransferTransformFileRunner extends App { def runTransfer(ttf: TransferTransformFile, sizeMB: Int): String = { val output = File.createTempFile("fft", "txt") try { ttf.run(TestFiles.testFile(sizeMB), output).toString } finally output.delete() } val tests = List( (ScalazStreamsTransferTransformFile, 10), (ScalazStreamsTransferTransformFile, 100), (ScalazStreamsTransferTransformFile, 500), (AkkaStreamsTransferTransformFile, 10), (AkkaStreamsTransferTransformFile, 100), (AkkaStreamsTransferTransformFile, 500) ) runTests(tests.map { case (ttf, sizeMB) => (s"${if (ttf == ScalazStreamsTransferTransformFile) "scalaz" else "akka"}, $sizeMB MB", () => runTransfer(ttf, sizeMB)) }, 3) AkkaStreamsTransferTransformFile.shutdown() }
softwaremill/streams-tests
src/main/scala/com/softwaremill/streams/TransferTransformFile.scala
Scala
apache-2.0
2,318
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.gearpump.streaming.examples.wordcount import scala.concurrent.Future import scala.util.Success import org.scalatest.prop.PropertyChecks import org.scalatest.{BeforeAndAfter, Matchers, PropSpec} import org.apache.gearpump.cluster.ClientToMaster.SubmitApplication import org.apache.gearpump.cluster.MasterToClient.SubmitApplicationResult import org.apache.gearpump.cluster.{MasterHarness, TestUtil} class WordCountSpec extends PropSpec with PropertyChecks with Matchers with BeforeAndAfter with MasterHarness { before { startActorSystem() } after { shutdownActorSystem() } protected override def config = TestUtil.DEFAULT_CONFIG property("WordCount should succeed to submit application with required arguments") { val requiredArgs = Array.empty[String] val optionalArgs = Array( "-split", "1", "-sum", "1") val args = { Table( ("requiredArgs", "optionalArgs"), (requiredArgs, optionalArgs) ) } val masterReceiver = createMockMaster() forAll(args) { (requiredArgs: Array[String], optionalArgs: Array[String]) => val args = requiredArgs ++ optionalArgs Future { WordCount.main(masterConfig, args) } masterReceiver.expectMsgType[SubmitApplication](PROCESS_BOOT_TIME) masterReceiver.reply(SubmitApplicationResult(Success(0))) } } }
manuzhang/incubator-gearpump
examples/streaming/wordcount/src/test/scala/org/apache/gearpump/streaming/examples/wordcount/WordCountSpec.scala
Scala
apache-2.0
2,195
package scala.meta.internal.metabrowse import scala.meta.internal.semanticdb.Scala._ object ScalametaInternals { def ownerAndDesc(symbol: String): (String, Descriptor) = { DescriptorParser(symbol).swap } }
scalameta/metadoc
metabrowse-cli/src/main/scala/scala/meta/internal/metabrowse/ScalametaInternals.scala
Scala
apache-2.0
216
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.util import java.util.concurrent.ConcurrentHashMap import scala.collection.JavaConversions import scala.collection.mutable.Map import scala.collection.immutable import org.apache.spark.scheduler.MapStatus import org.apache.spark.Logging /** * This is a custom implementation of scala.collection.mutable.Map which stores the insertion * timestamp along with each key-value pair. If specified, the timestamp of each pair can be * updated every time it is accessed. Key-value pairs whose timestamp are older than a particular * threshold time can then be removed using the clearOldValues method. This is intended to * be a drop-in replacement of scala.collection.mutable.HashMap. * @param updateTimeStampOnGet When enabled, the timestamp of a pair will be * updated when it is accessed */ class TimeStampedHashMap[A, B](updateTimeStampOnGet: Boolean = false) extends Map[A, B]() with Logging { val internalMap = new ConcurrentHashMap[A, (B, Long)]() def get(key: A): Option[B] = { val value = internalMap.get(key) if (value != null && updateTimeStampOnGet) { internalMap.replace(key, value, (value._1, currentTime)) } Option(value).map(_._1) } def iterator: Iterator[(A, B)] = { val jIterator = internalMap.entrySet().iterator() JavaConversions.asScalaIterator(jIterator).map(kv => (kv.getKey, kv.getValue._1)) } override def + [B1 >: B](kv: (A, B1)): Map[A, B1] = { val newMap = new TimeStampedHashMap[A, B1] newMap.internalMap.putAll(this.internalMap) newMap.internalMap.put(kv._1, (kv._2, currentTime)) newMap } override def - (key: A): Map[A, B] = { val newMap = new TimeStampedHashMap[A, B] newMap.internalMap.putAll(this.internalMap) newMap.internalMap.remove(key) newMap } override def += (kv: (A, B)): this.type = { internalMap.put(kv._1, (kv._2, currentTime)) this } // Should we return previous value directly or as Option ? def putIfAbsent(key: A, value: B): Option[B] = { val prev = internalMap.putIfAbsent(key, (value, currentTime)) if (prev != null) Some(prev._1) else None } override def -= (key: A): this.type = { internalMap.remove(key) this } override def update(key: A, value: B) { this += ((key, value)) } override def apply(key: A): B = { val value = internalMap.get(key) if (value == null) throw new NoSuchElementException() value._1 } override def filter(p: ((A, B)) => Boolean): Map[A, B] = { JavaConversions.mapAsScalaConcurrentMap(internalMap).map(kv => (kv._1, kv._2._1)).filter(p) } override def empty: Map[A, B] = new TimeStampedHashMap[A, B]() override def size: Int = internalMap.size override def foreach[U](f: ((A, B)) => U) { val iterator = internalMap.entrySet().iterator() while(iterator.hasNext) { val entry = iterator.next() val kv = (entry.getKey, entry.getValue._1) f(kv) } } def toMap: immutable.Map[A, B] = iterator.toMap /** * Removes old key-value pairs that have timestamp earlier than `threshTime`, * calling the supplied function on each such entry before removing. */ def clearOldValues(threshTime: Long, f: (A, B) => Unit) { val iterator = internalMap.entrySet().iterator() while (iterator.hasNext) { val entry = iterator.next() if (entry.getValue._2 < threshTime) { f(entry.getKey, entry.getValue._1) logDebug("Removing key " + entry.getKey) iterator.remove() } } } /** * Removes old key-value pairs that have timestamp earlier than `threshTime` */ def clearOldValues(threshTime: Long) { clearOldValues(threshTime, (_, _) => ()) } private def currentTime: Long = System.currentTimeMillis() }
dotunolafunmiloye/spark
core/src/main/scala/org/apache/spark/util/TimeStampedHashMap.scala
Scala
apache-2.0
4,602