code
stringlengths
5
1M
repo_name
stringlengths
5
109
path
stringlengths
6
208
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
5
1M
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // scalastyle:off println package org.apache.spark.examples.ml // $example on$ import org.apache.spark.ml.feature.NGram // $example off$ import org.apache.spark.sql.SparkSession object NGramExample { def main(args: Array[String]): Unit = { val spark = SparkSession .builder .appName("NGramExample") .getOrCreate() // $example on$ val wordDataFrame = spark.createDataFrame(Seq( (0, Array("Hi", "I", "heard", "about", "Spark")), (1, Array("I", "wish", "Java", "could", "use", "case", "classes")), (2, Array("Logistic", "regression", "models", "are", "neat")) )).toDF("label", "words") val ngram = new NGram().setInputCol("words").setOutputCol("ngrams") val ngramDataFrame = ngram.transform(wordDataFrame) ngramDataFrame.take(3).map(_.getAs[Stream[String]]("ngrams").toList).foreach(println) // $example off$ spark.stop() } } // scalastyle:on println
mrchristine/spark-examples-dbc
src/main/scala/org/apache/spark/examples/ml/NGramExample.scala
Scala
apache-2.0
1,731
/* * Copyright 2022 HM Revenue & Customs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package services import connectors.{ICLConnector, VatRegistrationConnector} import javax.inject.{Inject, Singleton} import models.CurrentProfile import models.api.SicCode import play.api.Logger import play.api.libs.json.{JsObject, Json, OWrites} import uk.gov.hmrc.http.HeaderCarrier import uk.gov.hmrc.play.bootstrap.config.ServicesConfig import scala.concurrent.{ExecutionContext, Future} @Singleton class ICLService @Inject()(val iclConnector: ICLConnector, config: ServicesConfig, val keystore: SessionService, val sicAndCompliance: SicAndComplianceService, val registrationConnector: VatRegistrationConnector )(implicit ec: ExecutionContext) { lazy val vatFeUrl: String = config.getConfString("vat-registration-frontend.www.url", throw new RuntimeException("[ICLService] Could not retrieve config for 'vat-registration-frontend'")) lazy val vatFeUri: String = config.getConfString("vat-registration-frontend.www.uri", throw new RuntimeException("[ICLService] Could not retrieve config for 'vat-registration-frontend.uri'")) lazy val iclReturnUrl: String = config.getConfString("vat-registration-frontend.redirect.url", throw new RuntimeException("[ICLService] Could not retrieve config for 'vat-registration-lookup-frontend.redirect.url'")) lazy val vatRedirectUrl: String = vatFeUrl + vatFeUri + iclReturnUrl def prepopulateSicCodes(implicit hc: HeaderCarrier, cp: CurrentProfile): Future[List[String]] = { sicAndCompliance.getSicAndCompliance flatMap { sac => sac.businessActivities match { case Some(res) => Future.successful(res.sicCodes map (_.code)) } } recover { case e => logger.warn(s"[ICLServiceImpl] [prepopulateSicCodes] Retrieving S4L/VR sic codes failed: ${e.getMessage}") Nil } } def journeySetup(customICLMessages: CustomICLMessages)(implicit hc: HeaderCarrier, cp: CurrentProfile): Future[String] = { def extractFromJsonSetup(jsonSetup: JsObject, item: String) = { (jsonSetup \ item).validate[String].getOrElse { logger.error(s"[ICLServiceImpl] [journeySetup] $item couldn't be parsed from Json object") throw new Exception } } for { codes <- prepopulateSicCodes jsonSetup <- iclConnector.iclSetup(constructJsonForJourneySetup(codes, customICLMessages)) fetchResultsUri = extractFromJsonSetup(jsonSetup, "fetchResultsUri") storeFetch <- keystore.cache[String]("ICLFetchResultsUri", fetchResultsUri) } yield { extractFromJsonSetup(jsonSetup, "journeyStartUri") } } private[services] def constructJsonForJourneySetup(sicCodes: List[String], customICLMessages: CustomICLMessages): JsObject = { Json.obj( "redirectUrl" -> vatRedirectUrl, "journeySetupDetails" -> Json.obj( "customMessages" -> Json.obj( "summary" -> customICLMessages ), "sicCodes" -> sicCodes ) ) } def getICLSICCodes()(implicit hc: HeaderCarrier, cp: CurrentProfile): Future[List[SicCode]] = { for { url <- keystore.fetchAndGet[String]("ICLFetchResultsUri").map(_.getOrElse(throw new Exception(s"[ICLService] [getICLCodes] No URL in keystore for key ICLFetchResultsUri for reg id ${cp.registrationId}"))) js <- iclConnector.iclGetResult(url) list = Json.fromJson[List[SicCode]](js)(SicCode.readsList).get } yield { if (list.isEmpty) { logger.error(s"[ICLService] [getICLCodes] ICLGetResult returned no sicCodes for regId: ${cp.registrationId}") throw new Exception } list } } } case class CustomICLMessages(heading: String, lead: String, hint: String) object CustomICLMessages { implicit val writes: OWrites[CustomICLMessages] = Json.writes[CustomICLMessages] }
hmrc/vat-registration-frontend
app/services/ICLService.scala
Scala
apache-2.0
4,550
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.samza.job.local import java.util.concurrent.CountDownLatch import org.apache.samza.coordinator.JobModelManager import org.apache.samza.coordinator.metadatastore.CoordinatorStreamStore import org.apache.samza.job.ApplicationStatus.{New, Running, SuccessfulFinish, UnsuccessfulFinish} import org.apache.samza.job.{ApplicationStatus, CommandBuilder, StreamJob} import org.apache.samza.util.Logging import scala.collection.JavaConverters._ object ProcessJob { private def createProcessBuilder(commandBuilder: CommandBuilder): ProcessBuilder = { val processBuilder = new ProcessBuilder(commandBuilder.buildCommand.split(" ").toList.asJava) processBuilder.environment.putAll(commandBuilder.buildEnvironment) // Pipe all output to this process's streams. processBuilder.redirectOutput(ProcessBuilder.Redirect.INHERIT) processBuilder.redirectError(ProcessBuilder.Redirect.INHERIT) processBuilder } } class ProcessJob( commandBuilder: CommandBuilder, val jobModelManager: JobModelManager, val coordinatorStreamStore: CoordinatorStreamStore) extends StreamJob with Logging { import ProcessJob._ val lock = new Object val processBuilder: ProcessBuilder = createProcessBuilder(commandBuilder) var jobStatus: ApplicationStatus = New var processThread: Option[Thread] = None def submit: StreamJob = { val threadStartCountDownLatch = new CountDownLatch(1) // Create a non-daemon thread to make job runner block until the job finishes. // Without this, the proc dies when job runner ends. processThread = Some(new Thread { override def run { var processExitCode = -1 var process: Option[Process] = None setStatus(Running) try { threadStartCountDownLatch.countDown process = Some(processBuilder.start) processExitCode = process.get.waitFor } catch { case _: InterruptedException => process foreach { p => p.destroyForcibly } case e: Exception => error("Encountered an error during job start: %s".format(e.getMessage)) } finally { jobModelManager.stop coordinatorStreamStore.close setStatus(if (processExitCode == 0) SuccessfulFinish else UnsuccessfulFinish) } } }) info("Starting process job") processThread.get.start threadStartCountDownLatch.await ProcessJob.this } def kill: StreamJob = { getStatus match { case Running => { info("Attempting to kill running process job") processThread foreach { thread => thread.interrupt thread.join info("Process job killed successfully") } } case status => warn("Ignoring attempt to kill a process job that is not running. Job status is %s".format(status)) } ProcessJob.this } def waitForFinish(timeoutMs: Long): ApplicationStatus = { require(timeoutMs >= 0, "Timeout values must be non-negative") processThread foreach { thread => thread.join(timeoutMs) } getStatus } def waitForStatus(status: ApplicationStatus, timeoutMs: Long): ApplicationStatus = lock.synchronized { require(timeoutMs >= 0, "Timeout values must be non-negative") timeoutMs match { case 0 => { info("Waiting for application status %s indefinitely".format(status)) while (getStatus != status) lock.wait(0) } case _ => { info("Waiting for application status %s for %d ms".format(status, timeoutMs)) val startTimeMs = System.currentTimeMillis var remainingTimeoutMs = timeoutMs while (getStatus != status && remainingTimeoutMs > 0) { lock.wait(remainingTimeoutMs) val elapsedWaitTimeMs = System.currentTimeMillis - startTimeMs remainingTimeoutMs = timeoutMs - elapsedWaitTimeMs } } } getStatus } def getStatus: ApplicationStatus = lock.synchronized { jobStatus } private def setStatus(status: ApplicationStatus): Unit = lock.synchronized { info("Changing process job status from %s to %s".format(jobStatus, status)) jobStatus = status lock.notify } }
prateekm/samza
samza-core/src/main/scala/org/apache/samza/job/local/ProcessJob.scala
Scala
apache-2.0
4,991
/* Copyright (C) 2008-2016 University of Massachusetts Amherst. This file is part of "FACTORIE" (Factor graphs, Imperative, Extensible) http://factorie.cs.umass.edu, http://github.com/factorie Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package cc.factorie.app.nlp.segment import java.io.StringReader import cc.factorie.app.nlp.{Document, DocumentAnnotator, Token} /** Split a String into a sequence of Tokens. Aims to adhere to tokenization rules used in Ontonotes and Penn Treebank. Note that CoNLL tokenization would use tokenizeAllDashedWords=true. Punctuation that ends a sentence should be placed alone in its own Token, hence this segmentation implicitly defines sentence segmentation also. (Although our the DeterministicSentenceSegmenter does make a few adjustments beyond this tokenizer.) This tokenizer can also normalize. This tokenizer is significantly faster than the DeterministicRegexTokenizer, using the EnglishLexer DFA automatically generated by JFlex using the definition in EnglishLexer.flex. */ class DeterministicLexerTokenizer( val tokenizeSgml:Boolean = false, // Keep sgml/html tags as tokens val tokenizeNewline:Boolean = false, // Keep newlines as tokens val tokenizeWhitespace:Boolean = false, // Keep all whitespace, including newlines, as tokens val tokenizeAllDashedWords:Boolean = false, // Separate dashed words into separate tokens, such as in CoNLL val abbrevPrecedesLowercase:Boolean = false, // Assume a period followed by a lower case word is an abbrev and not end of sentence (see below) val normalize: Boolean = true, // Whether to normalize token strings val normalizeQuote:Boolean = true, // Convert all double quotes to " val normalizeApostrophe:Boolean = true, // Convert all apostrophes to ', even within token strings val normalizeCurrency:Boolean = true, // Convert all currency symbols to "$", except cents symbol to "cents" val normalizeAmpersand:Boolean = true, // Convert all ampersand symbols (including "&amp;" to "&" val normalizeFractions:Boolean = true, // Convert unicode fraction characters to their spelled out analogues, like "3/4" val normalizeEllipsis:Boolean = true, // Convert unicode ellipsis character to spelled out analogue, "..." val undoPennParens:Boolean = true, // Change -LRB- etc to "(" etc. val unescapeSlash:Boolean = true, // Change \/ to / val unescapeAsterisk:Boolean = true, // Change \* to * val normalizeMDash:Boolean = true, // Convert all em-dashes to double dash -- val normalizeDash:Boolean = true, // Convert all other dashes to single dash - val normalizeHtmlSymbol:Boolean = true, // Convert &lt; to <, etc val normalizeHtmlAccent:Boolean = true // Convert Beyonc&eacute; to Beyonce ) extends DocumentAnnotator { /** How the annotation of this DocumentAnnotator should be printed in one-word-per-line (OWPL) format. If there is no per-token annotation, return null. Used in Document.owplString. */ def tokenAnnotationString(token: Token) = token.stringStart.toString+'\t'+token.stringEnd.toString val lexer = // here we make sure that if normalize = false, we really don't normalize anything if(normalize) new EnglishLexer(null, tokenizeSgml, tokenizeNewline, tokenizeWhitespace, tokenizeAllDashedWords, abbrevPrecedesLowercase, normalizeQuote, normalizeApostrophe, normalizeCurrency, normalizeAmpersand, normalizeFractions, normalizeEllipsis, undoPennParens, unescapeSlash, unescapeAsterisk, normalizeMDash, normalizeDash, normalizeHtmlSymbol, normalizeHtmlAccent) else new EnglishLexer(null, tokenizeSgml, tokenizeNewline, tokenizeWhitespace, tokenizeAllDashedWords, abbrevPrecedesLowercase, false, false, false, false, false, false, false, false, false, false, false, false, false) def process(document: Document): Document = { for (section <- document.sections) { /* Add this newline to avoid JFlex issue where we can't match EOF with lookahead */ val reader = new StringReader(section.string + "\n") lexer.yyreset(reader) var currentToken = lexer.yylex().asInstanceOf[(String, Int, Int)] while (currentToken != null){ if (abbrevPrecedesLowercase && section.length > 1 && section.tokens.last.string == "." && java.lang.Character.isLowerCase(currentToken._1(0)) && section.tokens(section.length-2).stringEnd == section.tokens(section.length-1).stringStart) { // If we have a pattern like "Abbrev. has" (where "has" is any lowercase word) with token strings "Abbrev", ".", "is" (currently looking at "is") // then assume that the previous-previous word is actually an abbreviation; patch it up to become "Abbrev.", "has". val lastTwoTokens = section.takeRight(2).toIndexedSeq section.remove(section.length - 1); section.remove(section.length - 1) new Token(section, lastTwoTokens(0).stringStart, lastTwoTokens(1).stringEnd) } val tok = new Token(section, currentToken._2, currentToken._2 + currentToken._3) if(normalize && tok.string != currentToken._1) tok.attr += new PlainNormalizedTokenString(tok, currentToken._1) currentToken = lexer.yylex().asInstanceOf[(String, Int, Int)] } /* If tokenizing newlines, remove the trailing newline we added */ if(tokenizeNewline) section.remove(section.tokens.length - 1) } if (!document.annotators.contains(classOf[Token])) document.annotators(classOf[Token]) = this.getClass document } def prereqAttrs: Iterable[Class[_]] = Nil def postAttrs: Iterable[Class[_]] = List(classOf[Token]) /** Convenience function to run the tokenizer on an arbitrary String. The implementation builds a Document internally, then maps to token strings. */ def apply(s:String): Seq[String] = process(new Document(s)).tokens.toSeq.map(_.string) } /* This version does not perform normalization, only tokenization */ object DeterministicTokenizer extends DeterministicLexerTokenizer( tokenizeSgml = false, tokenizeNewline = false, tokenizeWhitespace = false, tokenizeAllDashedWords = false, abbrevPrecedesLowercase = false, normalize = false, normalizeQuote = false, normalizeApostrophe = false, normalizeCurrency = false, normalizeAmpersand = false, normalizeFractions = false, normalizeEllipsis = false, undoPennParens = false, unescapeSlash = false, unescapeAsterisk = false, normalizeMDash = false, normalizeDash = false, normalizeHtmlSymbol = false, normalizeHtmlAccent = false ) /* This version performs normalization while it tokenizes, and also includes html tags as tokens */ object DeterministicNormalizingHtmlTokenizer extends DeterministicLexerTokenizer( tokenizeSgml = true, tokenizeNewline = false, tokenizeWhitespace = false, tokenizeAllDashedWords = false, abbrevPrecedesLowercase = false, normalize = true, normalizeQuote = true, normalizeApostrophe = true, normalizeCurrency = true, normalizeAmpersand = true, normalizeFractions = true, normalizeEllipsis = true, undoPennParens = true, unescapeSlash = true, unescapeAsterisk = true, normalizeMDash = true, normalizeDash = true, normalizeHtmlSymbol = true, normalizeHtmlAccent = true ) /* This token performs normalization while it tokenizes, removing html tags; You probably want to use this one */ object DeterministicNormalizingTokenizer extends DeterministicLexerTokenizer( tokenizeSgml = false, tokenizeNewline = false, tokenizeWhitespace = false, tokenizeAllDashedWords = false, abbrevPrecedesLowercase = false, normalize = true, normalizeQuote = true, normalizeApostrophe = true, normalizeCurrency = true, normalizeAmpersand = true, normalizeFractions = true, normalizeEllipsis = true, undoPennParens = true, unescapeSlash = true, unescapeAsterisk = true, normalizeMDash = true, normalizeDash = true, normalizeHtmlSymbol = true, normalizeHtmlAccent = true ){ /* For testing purposes: Tokenizes and normalizes input from stdin using DeterministicNormalizingTokenizer */ def main(args: Array[String]): Unit = { val string = io.Source.fromInputStream(System.in).mkString // println("Tokenizing...") val doc = new Document(string) val t0 = System.currentTimeMillis() DeterministicNormalizingTokenizer.process(doc) val time = System.currentTimeMillis()-t0 println(s"Processed ${doc.tokenCount} tokens in ${time}ms (${doc.tokenCount.toDouble/time*1000} tokens/second)") println(doc.tokens.map(_.string).mkString("\n")) } }
factorie/factorie
src/main/scala/cc/factorie/app/nlp/segment/DeterministicLexerTokenizer.scala
Scala
apache-2.0
9,101
// Copyright: 2010 - 2016 https://github.com/ensime/ensime-server/graphs // Licence: http://www.gnu.org/licenses/gpl-3.0.en.html package org.ensime.core import akka.actor._ import akka.event.LoggingReceive.withLabel import org.apache.commons.vfs2.FileObject import org.ensime.api._ import org.ensime.core.debug.DebugActor import org.ensime.vfs._ import org.ensime.indexer._ import scala.collection.immutable.ListSet import scala.concurrent.duration._ import scala.util.Properties._ import scala.util._ import org.ensime.util.file._ import org.ensime.util.FileUtils final case class ShutdownRequest(reason: String, isError: Boolean = false) /** * The Project actor simply forwards messages coming from the user to * the respective subcomponent. */ class Project( broadcaster: ActorRef, implicit val config: EnsimeConfig ) extends Actor with ActorLogging with Stash { import context.{ dispatcher, system } import FileUtils._ /* The main components of the ENSIME server */ private var scalac: ActorRef = _ private var javac: ActorRef = _ private var debugger: ActorRef = _ private var indexer: ActorRef = _ private var docs: ActorRef = _ // vfs, resolver, search and watchers are considered "reliable" (hah!) private implicit val vfs: EnsimeVFS = EnsimeVFS() private val resolver = new SourceResolver(config) private val searchService = new SearchService(config, resolver) private val sourceWatcher = new SourceWatcher(config, resolver :: Nil) private val reTypecheck = new FileChangeListener { def reTypeCheck(): Unit = self ! AskReTypecheck def fileAdded(f: FileObject): Unit = reTypeCheck() def fileChanged(f: FileObject): Unit = reTypeCheck() def fileRemoved(f: FileObject): Unit = reTypeCheck() override def baseReCreated(f: FileObject): Unit = reTypeCheck() } private val classfileWatcher = context.actorOf(Props(new ClassfileWatcher(config, searchService :: reTypecheck :: Nil)), "classFileWatcher") def receive: Receive = awaitingConnectionInfoReq def awaitingConnectionInfoReq: Receive = withLabel("awaitingConnectionInfoReq") { case ConnectionInfoReq => sender() ! ConnectionInfo() context.become(handleRequests) init() unstashAll() case other => stash() } private def init(): Unit = { searchService.refresh().onComplete { case Success((deletes, inserts)) => // legacy clients expect to see IndexerReady on connection. // we could also just blindly send this on each connection. broadcaster ! Broadcaster.Persist(IndexerReadyEvent) log.debug(s"created $inserts and removed $deletes searchable rows") if (propOrFalse("ensime.exitAfterIndex")) context.parent ! ShutdownRequest("Index only run", isError = false) case Failure(problem) => log.warning(s"Refresh failed: ${problem.toString}") throw problem }(context.dispatcher) indexer = context.actorOf(Indexer(searchService), "indexer") if (config.scalaLibrary.isDefined || Set("scala", "dotty")(config.name)) { // we merge scala and java AnalyzerReady messages into a single // AnalyzerReady message, fired only after java *and* scala are ready val merger = context.actorOf(Props(new Actor { var senders = ListSet.empty[ActorRef] def receive: Receive = { case Broadcaster.Persist(AnalyzerReadyEvent) if senders.size == 1 => broadcaster ! Broadcaster.Persist(AnalyzerReadyEvent) case Broadcaster.Persist(AnalyzerReadyEvent) => senders += sender() case msg => broadcaster forward msg } })) scalac = context.actorOf(Analyzer(merger, indexer, searchService), "scalac") javac = context.actorOf(JavaAnalyzer(merger, indexer, searchService), "javac") } else { log.warning("Detected a pure Java project. Scala queries are not available.") scalac = system.deadLetters javac = context.actorOf(JavaAnalyzer(broadcaster, indexer, searchService), "javac") } debugger = context.actorOf(DebugActor.props(broadcaster), "debugging") docs = context.actorOf(DocResolver(), "docs") } override def postStop(): Unit = { // make sure the "reliable" dependencies are cleaned up Try(sourceWatcher.shutdown()) searchService.shutdown() // async Try(vfs.close()) } // debounces ReloadExistingFilesEvent private var rechecking: Cancellable = _ def handleRequests: Receive = withLabel("handleRequests") { case AskReTypecheck => Option(rechecking).foreach(_.cancel()) rechecking = system.scheduler.scheduleOnce( 5 seconds, scalac, ReloadExistingFilesEvent ) // HACK: to expedite initial dev, Java requests use the Scala API case m @ TypecheckFileReq(sfi) if sfi.file.isJava => javac forward m case m @ CompletionsReq(sfi, _, _, _, _) if sfi.file.isJava => javac forward m case m @ DocUriAtPointReq(sfi, _) if sfi.file.isJava => javac forward m case m @ TypeAtPointReq(sfi, _) if sfi.file.isJava => javac forward m case m @ SymbolDesignationsReq(sfi, _, _, _) if sfi.file.isJava => javac forward m case m @ SymbolAtPointReq(sfi, _) if sfi.file.isJava => javac forward m // mixed mode query case TypecheckFilesReq(files) => val (javas, scalas) = files.partition(_.file.isJava) if (javas.nonEmpty) javac forward TypecheckFilesReq(javas) if (scalas.nonEmpty) scalac forward TypecheckFilesReq(scalas) case m: RpcAnalyserRequest => scalac forward m case m: RpcDebuggerRequest => debugger forward m case m: RpcSearchRequest => indexer forward m case m: DocSigPair => docs forward m // added here to prevent errors when client sends this repeatedly (e.g. as a keepalive case ConnectionInfoReq => sender() ! ConnectionInfo() } } object Project { def apply(target: ActorRef)(implicit config: EnsimeConfig): Props = Props(classOf[Project], target, config) }
mwielocha/ensime-server
core/src/main/scala/org/ensime/core/Project.scala
Scala
gpl-3.0
5,981
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.deploy.yarn import java.io.{File, FileInputStream, FileNotFoundException, FileOutputStream} import java.net.URI import java.nio.file.Paths import java.util.Properties import java.util.concurrent.ConcurrentHashMap import scala.collection.JavaConverters._ import scala.collection.mutable.{HashMap => MutableHashMap} import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.Path import org.apache.hadoop.mapreduce.MRJobConfig import org.apache.hadoop.yarn.api.ApplicationConstants.Environment import org.apache.hadoop.yarn.api.protocolrecords.{GetNewApplicationResponse, SubmitApplicationRequest} import org.apache.hadoop.yarn.api.records._ import org.apache.hadoop.yarn.client.api.{YarnClient, YarnClientApplication} import org.apache.hadoop.yarn.conf.YarnConfiguration import org.apache.hadoop.yarn.event.{Dispatcher, Event, EventHandler} import org.apache.hadoop.yarn.server.resourcemanager.{ClientRMService, RMAppManager, RMContext} import org.apache.hadoop.yarn.server.resourcemanager.ahs.RMApplicationHistoryWriter import org.apache.hadoop.yarn.server.resourcemanager.metrics.SystemMetricsPublisher import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp import org.apache.hadoop.yarn.server.security.ApplicationACLsManager import org.apache.hadoop.yarn.util.Records import org.mockito.ArgumentMatchers.{any, anyBoolean, eq => meq} import org.mockito.Mockito._ import org.mockito.invocation.InvocationOnMock import org.scalatest.matchers.must.Matchers import org.scalatest.matchers.should.Matchers._ import org.apache.spark.{SparkConf, SparkException, SparkFunSuite, TestUtils} import org.apache.spark.deploy.yarn.ResourceRequestHelper._ import org.apache.spark.deploy.yarn.config._ import org.apache.spark.internal.config._ import org.apache.spark.resource.ResourceID import org.apache.spark.resource.ResourceUtils.AMOUNT import org.apache.spark.util.{SparkConfWithEnv, Utils} class ClientSuite extends SparkFunSuite with Matchers { private def doReturn(value: Any) = org.mockito.Mockito.doReturn(value, Seq.empty: _*) import Client._ var oldSystemProperties: Properties = null test("default Yarn application classpath") { getDefaultYarnApplicationClasspath should be(Fixtures.knownDefYarnAppCP) } test("default MR application classpath") { getDefaultMRApplicationClasspath should be(Fixtures.knownDefMRAppCP) } test("resultant classpath for an application that defines a classpath for YARN") { withAppConf(Fixtures.mapYARNAppConf) { conf => val env = newEnv populateHadoopClasspath(conf, env) classpath(env) should be(Fixtures.knownYARNAppCP +: getDefaultMRApplicationClasspath) } } test("resultant classpath for an application that defines a classpath for MR") { withAppConf(Fixtures.mapMRAppConf) { conf => val env = newEnv populateHadoopClasspath(conf, env) classpath(env) should be(getDefaultYarnApplicationClasspath :+ Fixtures.knownMRAppCP) } } test("resultant classpath for an application that defines both classpaths, YARN and MR") { withAppConf(Fixtures.mapAppConf) { conf => val env = newEnv populateHadoopClasspath(conf, env) classpath(env) should be(Array(Fixtures.knownYARNAppCP, Fixtures.knownMRAppCP)) } } private val SPARK = "local:/sparkJar" private val USER = "local:/userJar" private val ADDED = "local:/addJar1,local:/addJar2,/addJar3" private val PWD = "{{PWD}}" test("Local jar URIs") { val conf = new Configuration() val sparkConf = new SparkConf() .set(SPARK_JARS, Seq(SPARK)) .set(USER_CLASS_PATH_FIRST, true) .set("spark.yarn.dist.jars", ADDED) val env = new MutableHashMap[String, String]() val args = new ClientArguments(Array("--jar", USER)) populateClasspath(args, conf, sparkConf, env) val cp = env("CLASSPATH").split(":|;|<CPS>") s"$SPARK,$USER,$ADDED".split(",").foreach({ entry => val uri = new URI(entry) if (Utils.LOCAL_SCHEME.equals(uri.getScheme())) { cp should contain (uri.getPath()) } else { cp should not contain (uri.getPath()) } }) cp should not contain ("local") cp should contain(PWD) cp should contain (s"$PWD${Path.SEPARATOR}${LOCALIZED_CONF_DIR}") cp should not contain (APP_JAR) } test("Jar path propagation through SparkConf") { val conf = new Configuration() val sparkConf = new SparkConf() .set(SPARK_JARS, Seq(SPARK)) .set("spark.yarn.dist.jars", ADDED) val client = createClient(sparkConf, args = Array("--jar", USER)) doReturn(new Path("/")).when(client).copyFileToRemote(any(classOf[Path]), any(classOf[Path]), meq(None), any(classOf[MutableHashMap[URI, Path]]), anyBoolean(), any()) val tempDir = Utils.createTempDir() try { // Because we mocked "copyFileToRemote" above to avoid having to create fake local files, // we need to create a fake config archive in the temp dir to avoid having // prepareLocalResources throw an exception. new FileOutputStream(new File(tempDir, LOCALIZED_CONF_ARCHIVE)).close() client.prepareLocalResources(new Path(tempDir.getAbsolutePath()), Nil) sparkConf.get(APP_JAR) should be (Some(USER)) // The non-local path should be propagated by name only, since it will end up in the app's // staging dir. val expected = ADDED.split(",") .map(p => { val uri = new URI(p) if (Utils.LOCAL_SCHEME == uri.getScheme()) { p } else { Option(uri.getFragment()).getOrElse(new File(p).getName()) } }) .mkString(",") sparkConf.get(SECONDARY_JARS) should be (Some(expected.split(",").toSeq)) } finally { Utils.deleteRecursively(tempDir) } } test("Cluster path translation") { val conf = new Configuration() val sparkConf = new SparkConf() .set(SPARK_JARS, Seq("local:/localPath/spark.jar")) .set(GATEWAY_ROOT_PATH, "/localPath") .set(REPLACEMENT_ROOT_PATH, "/remotePath") getClusterPath(sparkConf, "/localPath") should be ("/remotePath") getClusterPath(sparkConf, "/localPath/1:/localPath/2") should be ( "/remotePath/1:/remotePath/2") val env = new MutableHashMap[String, String]() populateClasspath(null, conf, sparkConf, env, extraClassPath = Some("/localPath/my1.jar")) val cp = classpath(env) cp should contain ("/remotePath/spark.jar") cp should contain ("/remotePath/my1.jar") } test("configuration and args propagate through createApplicationSubmissionContext") { // When parsing tags, duplicates and leading/trailing whitespace should be removed. // Spaces between non-comma strings should be preserved as single tags. Empty strings may or // may not be removed depending on the version of Hadoop being used. val sparkConf = new SparkConf() .set(APPLICATION_TAGS.key, ",tag1, dup,tag2 , ,multi word , dup") .set(MAX_APP_ATTEMPTS, 42) .set("spark.app.name", "foo-test-app") .set(QUEUE_NAME, "staging-queue") .set(APPLICATION_PRIORITY, 1) val args = new ClientArguments(Array()) val appContext = Records.newRecord(classOf[ApplicationSubmissionContext]) val getNewApplicationResponse = Records.newRecord(classOf[GetNewApplicationResponse]) val containerLaunchContext = Records.newRecord(classOf[ContainerLaunchContext]) val client = new Client(args, sparkConf, null) client.createApplicationSubmissionContext( new YarnClientApplication(getNewApplicationResponse, appContext), containerLaunchContext) appContext.getApplicationName should be ("foo-test-app") appContext.getQueue should be ("staging-queue") appContext.getAMContainerSpec should be (containerLaunchContext) appContext.getApplicationType should be ("SPARK") appContext.getClass.getMethods.filter(_.getName == "getApplicationTags").foreach { method => val tags = method.invoke(appContext).asInstanceOf[java.util.Set[String]] tags should contain allOf ("tag1", "dup", "tag2", "multi word") tags.asScala.count(_.nonEmpty) should be (4) } appContext.getMaxAppAttempts should be (42) appContext.getPriority.getPriority should be (1) } test("specify a more specific type for the application") { // TODO (SPARK-31733) Make this test case pass with Hadoop-3.2 assume(!isYarnResourceTypesAvailable) // When the type exceeds 20 characters will be truncated by yarn val appTypes = Map( 1 -> ("", ""), 2 -> (" ", " "), 3 -> ("SPARK-SQL", "SPARK-SQL"), 4 -> ("012345678901234567890123", "01234567890123456789")) for ((id, (sourceType, targetType)) <- appTypes) { val sparkConf = new SparkConf().set("spark.yarn.applicationType", sourceType) val args = new ClientArguments(Array()) val appContext = spy(Records.newRecord(classOf[ApplicationSubmissionContext])) val appId = ApplicationId.newInstance(123456, id) appContext.setApplicationId(appId) val getNewApplicationResponse = Records.newRecord(classOf[GetNewApplicationResponse]) val containerLaunchContext = Records.newRecord(classOf[ContainerLaunchContext]) val client = new Client(args, sparkConf, null) val context = client.createApplicationSubmissionContext( new YarnClientApplication(getNewApplicationResponse, appContext), containerLaunchContext) val yarnClient = mock(classOf[YarnClient]) when(yarnClient.submitApplication(any())).thenAnswer((invocationOnMock: InvocationOnMock) => { val subContext = invocationOnMock.getArguments()(0) .asInstanceOf[ApplicationSubmissionContext] val request = Records.newRecord(classOf[SubmitApplicationRequest]) request.setApplicationSubmissionContext(subContext) val rmContext = mock(classOf[RMContext]) val conf = mock(classOf[Configuration]) val map = new ConcurrentHashMap[ApplicationId, RMApp]() when(rmContext.getRMApps).thenReturn(map) val dispatcher = mock(classOf[Dispatcher]) when(rmContext.getDispatcher).thenReturn(dispatcher) when[EventHandler[_]](dispatcher.getEventHandler).thenReturn( new EventHandler[Event[_]] { override def handle(event: Event[_]): Unit = {} } ) val writer = mock(classOf[RMApplicationHistoryWriter]) when(rmContext.getRMApplicationHistoryWriter).thenReturn(writer) val publisher = mock(classOf[SystemMetricsPublisher]) when(rmContext.getSystemMetricsPublisher).thenReturn(publisher) when(appContext.getUnmanagedAM).thenReturn(true) val rmAppManager = new RMAppManager(rmContext, null, null, mock(classOf[ApplicationACLsManager]), conf) val clientRMService = new ClientRMService(rmContext, null, rmAppManager, null, null, null) clientRMService.submitApplication(request) assert(map.get(subContext.getApplicationId).getApplicationType === targetType) null }) yarnClient.submitApplication(context) } } test("spark.yarn.jars with multiple paths and globs") { val libs = Utils.createTempDir() val single = Utils.createTempDir() val jar1 = TestUtils.createJarWithFiles(Map(), libs) val jar2 = TestUtils.createJarWithFiles(Map(), libs) val jar3 = TestUtils.createJarWithFiles(Map(), single) val jar4 = TestUtils.createJarWithFiles(Map(), single) val jarsConf = Seq( s"${libs.getAbsolutePath()}/*", jar3.getPath(), s"local:${jar4.getPath()}", s"local:${single.getAbsolutePath()}/*") val sparkConf = new SparkConf().set(SPARK_JARS, jarsConf) val client = createClient(sparkConf) val tempDir = Utils.createTempDir() client.prepareLocalResources(new Path(tempDir.getAbsolutePath()), Nil) assert(sparkConf.get(SPARK_JARS) === Some(Seq(s"local:${jar4.getPath()}", s"local:${single.getAbsolutePath()}/*"))) verify(client).copyFileToRemote(any(classOf[Path]), meq(new Path(jar1.toURI())), meq(None), any(classOf[MutableHashMap[URI, Path]]), anyBoolean(), any()) verify(client).copyFileToRemote(any(classOf[Path]), meq(new Path(jar2.toURI())), meq(None), any(classOf[MutableHashMap[URI, Path]]), anyBoolean(), any()) verify(client).copyFileToRemote(any(classOf[Path]), meq(new Path(jar3.toURI())), meq(None), any(classOf[MutableHashMap[URI, Path]]), anyBoolean(), any()) val cp = classpath(client) cp should contain (buildPath(PWD, LOCALIZED_LIB_DIR, "*")) cp should not contain (jar3.getPath()) cp should contain (jar4.getPath()) cp should contain (buildPath(single.getAbsolutePath(), "*")) } test("distribute jars archive") { val temp = Utils.createTempDir() val archive = TestUtils.createJarWithFiles(Map(), temp) val sparkConf = new SparkConf().set(SPARK_ARCHIVE, archive.getPath()) val client = createClient(sparkConf) client.prepareLocalResources(new Path(temp.getAbsolutePath()), Nil) verify(client).copyFileToRemote(any(classOf[Path]), meq(new Path(archive.toURI())), meq(None), any(classOf[MutableHashMap[URI, Path]]), anyBoolean(), any()) classpath(client) should contain (buildPath(PWD, LOCALIZED_LIB_DIR, "*")) sparkConf.set(SPARK_ARCHIVE, Utils.LOCAL_SCHEME + ":" + archive.getPath()) intercept[IllegalArgumentException] { client.prepareLocalResources(new Path(temp.getAbsolutePath()), Nil) } } test("SPARK-37239: distribute jars archive with set STAGING_FILE_REPLICATION") { val temp = Utils.createTempDir() val archive = TestUtils.createJarWithFiles(Map(), temp) val replication = 5 val sparkConf = new SparkConf() .set(SPARK_ARCHIVE, archive.getPath()) .set(STAGING_FILE_REPLICATION, replication) val client = createClient(sparkConf) client.prepareLocalResources(new Path(temp.getAbsolutePath()), Nil) // It is difficult to assert the result of `setReplication` in UT because this method in // `RawLocalFileSystem` always return true and not change the value of `replication`. // So we can only assert the call of `client.copyFileToRemote` has passed in a non `None`. verify(client).copyFileToRemote(any(classOf[Path]), meq(new Path(archive.toURI())), meq(Some(replication.toShort)), any(classOf[MutableHashMap[URI, Path]]), anyBoolean(), any()) classpath(client) should contain (buildPath(PWD, LOCALIZED_LIB_DIR, "*")) } test("distribute archive multiple times") { val libs = Utils.createTempDir() // Create jars dir and RELEASE file to avoid IllegalStateException. val jarsDir = new File(libs, "jars") assert(jarsDir.mkdir()) new FileOutputStream(new File(libs, "RELEASE")).close() val userLib1 = Utils.createTempDir() val testJar = TestUtils.createJarWithFiles(Map(), userLib1) // Case 1: FILES_TO_DISTRIBUTE and ARCHIVES_TO_DISTRIBUTE can't have duplicate files val sparkConf = new SparkConfWithEnv(Map("SPARK_HOME" -> libs.getAbsolutePath)) .set(FILES_TO_DISTRIBUTE, Seq(testJar.getPath)) .set(ARCHIVES_TO_DISTRIBUTE, Seq(testJar.getPath)) val client = createClient(sparkConf) val tempDir = Utils.createTempDir() intercept[IllegalArgumentException] { client.prepareLocalResources(new Path(tempDir.getAbsolutePath()), Nil) } // Case 2: FILES_TO_DISTRIBUTE can't have duplicate files. val sparkConfFiles = new SparkConfWithEnv(Map("SPARK_HOME" -> libs.getAbsolutePath)) .set(FILES_TO_DISTRIBUTE, Seq(testJar.getPath, testJar.getPath)) val clientFiles = createClient(sparkConfFiles) val tempDirForFiles = Utils.createTempDir() intercept[IllegalArgumentException] { clientFiles.prepareLocalResources(new Path(tempDirForFiles.getAbsolutePath()), Nil) } // Case 3: ARCHIVES_TO_DISTRIBUTE can't have duplicate files. val sparkConfArchives = new SparkConfWithEnv(Map("SPARK_HOME" -> libs.getAbsolutePath)) .set(ARCHIVES_TO_DISTRIBUTE, Seq(testJar.getPath, testJar.getPath)) val clientArchives = createClient(sparkConfArchives) val tempDirForArchives = Utils.createTempDir() intercept[IllegalArgumentException] { clientArchives.prepareLocalResources(new Path(tempDirForArchives.getAbsolutePath()), Nil) } // Case 4: FILES_TO_DISTRIBUTE can have unique file. val sparkConfFilesUniq = new SparkConfWithEnv(Map("SPARK_HOME" -> libs.getAbsolutePath)) .set(FILES_TO_DISTRIBUTE, Seq(testJar.getPath)) val clientFilesUniq = createClient(sparkConfFilesUniq) val tempDirForFilesUniq = Utils.createTempDir() clientFilesUniq.prepareLocalResources(new Path(tempDirForFilesUniq.getAbsolutePath()), Nil) // Case 5: ARCHIVES_TO_DISTRIBUTE can have unique file. val sparkConfArchivesUniq = new SparkConfWithEnv(Map("SPARK_HOME" -> libs.getAbsolutePath)) .set(ARCHIVES_TO_DISTRIBUTE, Seq(testJar.getPath)) val clientArchivesUniq = createClient(sparkConfArchivesUniq) val tempDirArchivesUniq = Utils.createTempDir() clientArchivesUniq.prepareLocalResources(new Path(tempDirArchivesUniq.getAbsolutePath()), Nil) } test("distribute local spark jars") { val temp = Utils.createTempDir() val jarsDir = new File(temp, "jars") assert(jarsDir.mkdir()) val jar = TestUtils.createJarWithFiles(Map(), jarsDir) new FileOutputStream(new File(temp, "RELEASE")).close() val sparkConf = new SparkConfWithEnv(Map("SPARK_HOME" -> temp.getAbsolutePath())) val client = createClient(sparkConf) client.prepareLocalResources(new Path(temp.getAbsolutePath()), Nil) classpath(client) should contain (buildPath(PWD, LOCALIZED_LIB_DIR, "*")) } test("ignore same name jars") { val libs = Utils.createTempDir() val jarsDir = new File(libs, "jars") assert(jarsDir.mkdir()) new FileOutputStream(new File(libs, "RELEASE")).close() val userLib1 = Utils.createTempDir() val userLib2 = Utils.createTempDir() val jar1 = TestUtils.createJarWithFiles(Map(), jarsDir) val jar2 = TestUtils.createJarWithFiles(Map(), userLib1) // Copy jar2 to jar3 with same name val jar3 = { val target = new File(userLib2, new File(jar2.toURI).getName) val input = new FileInputStream(jar2.getPath) val output = new FileOutputStream(target) Utils.copyStream(input, output, closeStreams = true) target.toURI.toURL } val sparkConf = new SparkConfWithEnv(Map("SPARK_HOME" -> libs.getAbsolutePath)) .set(JARS_TO_DISTRIBUTE, Seq(jar2.getPath, jar3.getPath)) val client = createClient(sparkConf) val tempDir = Utils.createTempDir() client.prepareLocalResources(new Path(tempDir.getAbsolutePath()), Nil) // Only jar2 will be added to SECONDARY_JARS, jar3 which has the same name with jar2 will be // ignored. sparkConf.get(SECONDARY_JARS) should be (Some(Seq(new File(jar2.toURI).getName))) } Seq( "client" -> YARN_AM_RESOURCE_TYPES_PREFIX, "cluster" -> YARN_DRIVER_RESOURCE_TYPES_PREFIX ).foreach { case (deployMode, prefix) => test(s"custom resource request ($deployMode mode)") { assume(ResourceRequestHelper.isYarnResourceTypesAvailable()) val resources = Map("fpga" -> 2, "gpu" -> 3) ResourceRequestTestHelper.initializeResourceTypes(resources.keys.toSeq) val conf = new SparkConf().set(SUBMIT_DEPLOY_MODE, deployMode) resources.foreach { case (name, v) => conf.set(s"${prefix}${name}.${AMOUNT}", v.toString) } val appContext = Records.newRecord(classOf[ApplicationSubmissionContext]) val getNewApplicationResponse = Records.newRecord(classOf[GetNewApplicationResponse]) val containerLaunchContext = Records.newRecord(classOf[ContainerLaunchContext]) val client = new Client(new ClientArguments(Array()), conf, null) client.createApplicationSubmissionContext( new YarnClientApplication(getNewApplicationResponse, appContext), containerLaunchContext) resources.foreach { case (name, value) => ResourceRequestTestHelper.getRequestedValue(appContext.getResource, name) should be (value) } } } test("custom driver resource request yarn config and spark config fails") { assume(ResourceRequestHelper.isYarnResourceTypesAvailable()) val conf = new SparkConf().set(SUBMIT_DEPLOY_MODE, "cluster") val resources = Map(conf.get(YARN_GPU_DEVICE) -> "gpu", conf.get(YARN_FPGA_DEVICE) -> "fpga") ResourceRequestTestHelper.initializeResourceTypes(resources.keys.toSeq) resources.keys.foreach { yarnName => conf.set(s"${YARN_DRIVER_RESOURCE_TYPES_PREFIX}${yarnName}.${AMOUNT}", "2") } resources.values.foreach { rName => conf.set(new ResourceID(SPARK_DRIVER_PREFIX, rName).amountConf, "3") } val error = intercept[SparkException] { ResourceRequestHelper.validateResources(conf) }.getMessage() assert(error.contains("Do not use spark.yarn.driver.resource.yarn.io/fpga.amount," + " please use spark.driver.resource.fpga.amount")) assert(error.contains("Do not use spark.yarn.driver.resource.yarn.io/gpu.amount," + " please use spark.driver.resource.gpu.amount")) } test("custom executor resource request yarn config and spark config fails") { assume(ResourceRequestHelper.isYarnResourceTypesAvailable()) val conf = new SparkConf().set(SUBMIT_DEPLOY_MODE, "cluster") val resources = Map(conf.get(YARN_GPU_DEVICE) -> "gpu", conf.get(YARN_FPGA_DEVICE) -> "fpga") ResourceRequestTestHelper.initializeResourceTypes(resources.keys.toSeq) resources.keys.foreach { yarnName => conf.set(s"${YARN_EXECUTOR_RESOURCE_TYPES_PREFIX}${yarnName}.${AMOUNT}", "2") } resources.values.foreach { rName => conf.set(new ResourceID(SPARK_EXECUTOR_PREFIX, rName).amountConf, "3") } val error = intercept[SparkException] { ResourceRequestHelper.validateResources(conf) }.getMessage() assert(error.contains("Do not use spark.yarn.executor.resource.yarn.io/fpga.amount," + " please use spark.executor.resource.fpga.amount")) assert(error.contains("Do not use spark.yarn.executor.resource.yarn.io/gpu.amount," + " please use spark.executor.resource.gpu.amount")) } test("custom resources spark config mapped to yarn config") { assume(ResourceRequestHelper.isYarnResourceTypesAvailable()) val conf = new SparkConf().set(SUBMIT_DEPLOY_MODE, "cluster") val yarnMadeupResource = "yarn.io/madeup" val resources = Map(conf.get(YARN_GPU_DEVICE) -> "gpu", conf.get(YARN_FPGA_DEVICE) -> "fpga", yarnMadeupResource -> "madeup") ResourceRequestTestHelper.initializeResourceTypes(resources.keys.toSeq) resources.values.foreach { rName => conf.set(new ResourceID(SPARK_DRIVER_PREFIX, rName).amountConf, "3") } // also just set yarn one that we don't convert conf.set(s"${YARN_DRIVER_RESOURCE_TYPES_PREFIX}${yarnMadeupResource}.${AMOUNT}", "5") val appContext = Records.newRecord(classOf[ApplicationSubmissionContext]) val getNewApplicationResponse = Records.newRecord(classOf[GetNewApplicationResponse]) val containerLaunchContext = Records.newRecord(classOf[ContainerLaunchContext]) val client = new Client(new ClientArguments(Array()), conf, null) val newContext = client.createApplicationSubmissionContext( new YarnClientApplication(getNewApplicationResponse, appContext), containerLaunchContext) val yarnRInfo = ResourceRequestTestHelper.getResources(newContext.getResource) val allResourceInfo = yarnRInfo.map(rInfo => (rInfo.name -> rInfo.value)).toMap assert(allResourceInfo.get(conf.get(YARN_GPU_DEVICE)).nonEmpty) assert(allResourceInfo.get(conf.get(YARN_GPU_DEVICE)).get === 3) assert(allResourceInfo.get(conf.get(YARN_FPGA_DEVICE)).nonEmpty) assert(allResourceInfo.get(conf.get(YARN_FPGA_DEVICE)).get === 3) assert(allResourceInfo.get(yarnMadeupResource).nonEmpty) assert(allResourceInfo.get(yarnMadeupResource).get === 5) } test("gpu/fpga spark resources mapped to custom yarn resources") { assume(ResourceRequestHelper.isYarnResourceTypesAvailable()) val conf = new SparkConf().set(SUBMIT_DEPLOY_MODE, "cluster") val gpuCustomName = "custom/gpu" val fpgaCustomName = "custom/fpga" conf.set(YARN_GPU_DEVICE.key, gpuCustomName) conf.set(YARN_FPGA_DEVICE.key, fpgaCustomName) val resources = Map(gpuCustomName -> "gpu", fpgaCustomName -> "fpga") ResourceRequestTestHelper.initializeResourceTypes(resources.keys.toSeq) resources.values.foreach { rName => conf.set(new ResourceID(SPARK_DRIVER_PREFIX, rName).amountConf, "3") } val appContext = Records.newRecord(classOf[ApplicationSubmissionContext]) val getNewApplicationResponse = Records.newRecord(classOf[GetNewApplicationResponse]) val containerLaunchContext = Records.newRecord(classOf[ContainerLaunchContext]) val client = new Client(new ClientArguments(Array()), conf, null) val newContext = client.createApplicationSubmissionContext( new YarnClientApplication(getNewApplicationResponse, appContext), containerLaunchContext) val yarnRInfo = ResourceRequestTestHelper.getResources(newContext.getResource) val allResourceInfo = yarnRInfo.map(rInfo => (rInfo.name -> rInfo.value)).toMap assert(allResourceInfo.get(gpuCustomName).nonEmpty) assert(allResourceInfo.get(gpuCustomName).get === 3) assert(allResourceInfo.get(fpgaCustomName).nonEmpty) assert(allResourceInfo.get(fpgaCustomName).get === 3) } test("test yarn jars path not exists") { withTempDir { dir => val conf = new SparkConf().set(SPARK_JARS, Seq(dir.getAbsolutePath + "/test")) val client = new Client(new ClientArguments(Array()), conf, null) withTempDir { distDir => intercept[FileNotFoundException] { client.prepareLocalResources(new Path(distDir.getAbsolutePath), Nil) } } } } test("SPARK-31582 Being able to not populate Hadoop classpath") { Seq(true, false).foreach { populateHadoopClassPath => withAppConf(Fixtures.mapAppConf) { conf => val sparkConf = new SparkConf() .set(POPULATE_HADOOP_CLASSPATH, populateHadoopClassPath) val env = new MutableHashMap[String, String]() val args = new ClientArguments(Array("--jar", USER)) populateClasspath(args, conf, sparkConf, env) if (populateHadoopClassPath) { classpath(env) should (contain (Fixtures.knownYARNAppCP) and contain (Fixtures.knownMRAppCP)) } else { classpath(env) should (not contain (Fixtures.knownYARNAppCP) and not contain (Fixtures.knownMRAppCP)) } } } } test("SPARK-35672: test Client.getUserClasspathUrls") { val gatewayRootPath = "/local/matching/replace" val replacementRootPath = "/replaced/path" val conf = new SparkConf() .set(SECONDARY_JARS, Seq( s"local:$gatewayRootPath/foo.jar", "local:/local/not/matching/replace/foo.jar", "file:/absolute/file/path/foo.jar", s"$gatewayRootPath/but-not-actually-local/foo.jar", "/absolute/path/foo.jar", "relative/path/foo.jar" )) .set(GATEWAY_ROOT_PATH, gatewayRootPath) .set(REPLACEMENT_ROOT_PATH, replacementRootPath) def assertUserClasspathUrls(cluster: Boolean, expectedReplacementPath: String): Unit = { val expectedUrls = Seq( Paths.get(APP_JAR_NAME).toAbsolutePath.toUri.toString, s"file:$expectedReplacementPath/foo.jar", "file:/local/not/matching/replace/foo.jar", "file:/absolute/file/path/foo.jar", // since this path wasn't a local URI, it should never be replaced s"file:$gatewayRootPath/but-not-actually-local/foo.jar", "file:/absolute/path/foo.jar", Paths.get("relative/path/foo.jar").toAbsolutePath.toUri.toString ).map(URI.create(_).toURL).toArray assert(Client.getUserClasspathUrls(conf, cluster) === expectedUrls) } // assert that no replacement happens when cluster = false by expecting the replacement // path to be the same as the original path assertUserClasspathUrls(cluster = false, gatewayRootPath) assertUserClasspathUrls(cluster = true, replacementRootPath) } private val matching = Seq( ("files URI match test1", "file:///file1", "file:///file2"), ("files URI match test2", "file:///c:file1", "file://c:file2"), ("files URI match test3", "file://host/file1", "file://host/file2"), ("wasb URI match test", "wasb://bucket1@user", "wasb://bucket1@user/"), ("hdfs URI match test", "hdfs:/path1", "hdfs:/path1") ) matching.foreach { t => test(t._1) { assert(Client.compareUri(new URI(t._2), new URI(t._3)), s"No match between ${t._2} and ${t._3}") } } private val unmatching = Seq( ("files URI unmatch test1", "file:///file1", "file://host/file2"), ("files URI unmatch test2", "file://host/file1", "file:///file2"), ("files URI unmatch test3", "file://host/file1", "file://host2/file2"), ("wasb URI unmatch test1", "wasb://bucket1@user", "wasb://bucket2@user/"), ("wasb URI unmatch test2", "wasb://bucket1@user", "wasb://bucket1@user2/"), ("s3 URI unmatch test", "s3a://user@pass:bucket1/", "s3a://user2@pass2:bucket1/"), ("hdfs URI unmatch test1", "hdfs://namenode1/path1", "hdfs://namenode1:8080/path2"), ("hdfs URI unmatch test2", "hdfs://namenode1:8020/path1", "hdfs://namenode1:8080/path2") ) unmatching.foreach { t => test(t._1) { assert(!Client.compareUri(new URI(t._2), new URI(t._3)), s"match between ${t._2} and ${t._3}") } } object Fixtures { val knownDefYarnAppCP: Seq[String] = YarnConfiguration.DEFAULT_YARN_APPLICATION_CLASSPATH.toSeq val knownDefMRAppCP: Seq[String] = MRJobConfig.DEFAULT_MAPREDUCE_APPLICATION_CLASSPATH.split(",").toSeq val knownYARNAppCP = "/known/yarn/path" val knownMRAppCP = "/known/mr/path" val mapMRAppConf = Map("mapreduce.application.classpath" -> knownMRAppCP) val mapYARNAppConf = Map(YarnConfiguration.YARN_APPLICATION_CLASSPATH -> knownYARNAppCP) val mapAppConf = mapYARNAppConf ++ mapMRAppConf } def withAppConf(m: Map[String, String] = Map())(testCode: (Configuration) => Any): Unit = { val conf = new Configuration m.foreach { case (k, v) => conf.set(k, v, "ClientSpec") } testCode(conf) } def newEnv: MutableHashMap[String, String] = MutableHashMap[String, String]() def classpath(env: MutableHashMap[String, String]): Array[String] = env(Environment.CLASSPATH.name).split(":|;|<CPS>") private def createClient( sparkConf: SparkConf, args: Array[String] = Array()): Client = { val clientArgs = new ClientArguments(args) spy(new Client(clientArgs, sparkConf, null)) } private def classpath(client: Client): Array[String] = { val env = new MutableHashMap[String, String]() populateClasspath(null, new Configuration(), client.sparkConf, env) classpath(env) } }
nchammas/spark
resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/ClientSuite.scala
Scala
apache-2.0
32,130
/* * Copyright 2001-2013 Artima, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.scalatest private[scalatest] trait StopOnFailure extends SuiteMixin { thisSuite: Suite => override abstract def run(testName: Option[String], args: Args): Status = { super.run(testName, args.copy(reporter = new StopOnFailureReporter(args.reporter, args.stopper, System.err))) } }
travisbrown/scalatest
src/main/scala/org/scalatest/StopOnFailure.scala
Scala
apache-2.0
908
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.carbondata.cluster.sdv.generated import org.apache.spark.sql.common.util._ import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach} /** * Test Class for globalsort1TestCase to verify all scenerios */ class GlobalSortTestCase extends QueryTest with BeforeAndAfterAll with BeforeAndAfterEach{ override def beforeAll { sql(s"""drop table if exists uniqdata11""").collect sql(s"""drop table if exists uniqdataquery1""").collect } override def beforeEach(): Unit = { sql(s"""drop table if exists uniqdata11""").collect sql(s"""drop table if exists uniqdataquery1""").collect } //Carbon-Loading-Optimizations-Global-Sort-01-01-01 test("Carbon-Loading-Optimizations-Global-Sort-01-01-01", Include) { sql(s"""drop table if exists uniqdata11""".stripMargin).collect sql( s"""CREATE TABLE uniqdata11 (CUST_ID int,CUST_NAME String, |ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, |BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), |DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double, |INTEGER_COLUMN1 int) STORED AS carbondata""".stripMargin.replaceAll(System .lineSeparator, "")).collect sql( s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' | into table uniqdata11 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"', | 'BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB, | DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1, | Double_COLUMN2,INTEGER_COLUMN1')""".stripMargin.replaceAll(System.lineSeparator, "")) .collect sql(s"""select * from uniqdata11""").collect sql(s"""drop table if exists uniqdata11""").collect } //Carbon-Loading-Optimizations-Global-Sort-01-01-02 test("Carbon-Loading-Optimizations-Global-Sort-01-01-02", Include) { sql( s"""CREATE TABLE uniqdata11 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, |DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint, |DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, |Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata""".stripMargin .replaceAll(System.lineSeparator, "")).collect sql( s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/2000_UniqData.csv' into table | uniqdata11 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','COMMENTCHAR'='#', | 'MULTILINE'='true','ESCAPECHAR'='\\','BAD_RECORDS_ACTION'='FORCE', | 'FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1, | BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2, | INTEGER_COLUMN1')""".stripMargin.replaceAll(System.lineSeparator, "")).collect sql(s"""select * from uniqdata11""").collect sql(s"""drop table if exists uniqdata11""").collect } //Carbon-Loading-Optimizations-Global-Sort-01-01-03 test("Carbon-Loading-Optimizations-Global-Sort-01-01-03", Include) { sql(s"""CREATE TABLE uniqdata11 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata""").collect sql(s"""LOAD DATA INPATH '$resourcesPath/Data/batchsort/folder1/folder2' into table uniqdata11 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect sql(s"""select * from uniqdata11""").collect sql(s"""drop table if exists uniqdata11""").collect } //Carbon-Loading-Optimizations-Global-Sort-01-01-04 test("Carbon-Loading-Optimizations-Global-Sort-01-01-04", Include) { sql(s"""CREATE TABLE uniqdata11 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata""").collect sql(s"""LOAD DATA INPATH '$resourcesPath/Data/batchsort/folder1' into table uniqdata11 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect sql(s"""select * from uniqdata11""").collect sql(s"""drop table if exists uniqdata11""").collect } //Carbon-Loading-Optimizations-Global-Sort-01-01-05 test("Carbon-Loading-Optimizations-Global-Sort-01-01-05", Include) { sql(s"""CREATE TABLE uniqdata11 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata""").collect sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata11 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','maxcolumns'='13','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect sql(s"""select * from uniqdata11""").collect sql(s"""drop table if exists uniqdata11""").collect } //Carbon-Loading-Optimizations-Global-Sort-01-01-06 test("Carbon-Loading-Optimizations-Global-Sort-01-01-06", Include) { sql(s"""CREATE TABLE uniqdata17 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata""").collect sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata17 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect sql(s"""select * from uniqdata17""").collect sql(s"""drop table if exists uniqdata17""").collect } //Carbon-Loading-Optimizations-Global-Sort-01-01-07 test("Carbon-Loading-Optimizations-Global-Sort-01-01-07", Include) { sql(s"""CREATE TABLE uniqdata19b (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata""").collect sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata19b OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\\','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect sql(s"""select * from uniqdata19b""").collect sql(s"""drop table if exists uniqdata19b""").collect } //Carbon-Loading-Optimizations-Global-Sort-01-01-08 test("Carbon-Loading-Optimizations-Global-Sort-01-01-08", Include) { sql(s"""CREATE TABLE uniqdata19c (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata""").collect sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata19c OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\\','BAD_RECORDS_ACTION'='IGNORE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect sql(s"""select * from uniqdata19c""").collect sql(s"""drop table if exists uniqdata19c""").collect } //Carbon-Loading-Optimizations-Global-Sort-01-01-09 test("Carbon-Loading-Optimizations-Global-Sort-01-01-09", Include) { sql(s"""CREATE TABLE uniqdata19d (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata""").collect sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata19d OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\\','BAD_RECORDS_ACTION'='REDIRECT','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect sql(s"""select * from uniqdata19d""").collect sql(s"""drop table if exists uniqdata19d""").collect } //Carbon-Loading-Optimizations-Global-Sort-01-01-10 test("Carbon-Loading-Optimizations-Global-Sort-01-01-10", Include) { sql(s"""CREATE TABLE uniqdata19e (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata""").collect sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata19e OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\\','BAD_RECORDS_ACTION'='REDIRECT','BAD_RECORDS_LOGGER_ENABLE'='FALSE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect sql(s"""select * from uniqdata19e""").collect sql(s"""drop table if exists uniqdata19e""").collect } //Carbon-Loading-Optimizations-Global-Sort-01-01-11 test("Carbon-Loading-Optimizations-Global-Sort-01-01-11", Include) { sql(s"""CREATE TABLE uniqdata19f (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata""").collect sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata19f OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\\','BAD_RECORDS_ACTION'='REDIRECT','BAD_RECORDS_LOGGER_ENABLE'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""".stripMargin).collect sql(s"""select * from uniqdata19f""").collect sql(s"""drop table if exists uniqdata19f""").collect } //Carbon-Loading-Optimizations-Global-Sort-01-01-14 test("Carbon-Loading-Optimizations-Global-Sort-01-01-14", Include) { sql( s"""CREATE TABLE uniqdata20c (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT','NO_INVERTED_INDEX'='CUST_NAME')""").collect sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata20c OPTIONS('GLOBAL_SORT_PARTITIONS'='2','DELIMITER'=',' ,'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\\','BAD_RECORDS_ACTION'='REDIRECT','BAD_RECORDS_LOGGER_ENABLE'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect sql(s"""select * from uniqdata20c""").collect sql(s"""drop table if exists uniqdata20c""").collect } //Carbon-Loading-Optimizations-Global-Sort-01-01-15 test("Carbon-Loading-Optimizations-Global-Sort-01-01-15", Include) { sql(s"""drop table if exists t3""").collect sql(s"""CREATE TABLE t3 (ID Int, country String, name String, phonetype String, serialname String, salary Int,floatField float) STORED AS carbondata TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT')""").collect sql( s"""LOAD DATA LOCAL INPATH '$resourcesPath/Data/batchsort/data.csv' into table t3 options('GLOBAL_SORT_PARTITIONS'='2')""".stripMargin).collect sql(s"""select * from t3""").collect sql(s"""drop table if exists t3""").collect } //Carbon-Loading-Optimizations-Global-Sort-01-01-16 test("Carbon-Loading-Optimizations-Global-Sort-01-01-16", Include) { sql(s"""drop table if exists t3""").collect sql(s"""CREATE TABLE t3 (ID Int, country String, name String, phonetype String, serialname String, salary Int,floatField float) STORED AS carbondata TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT')""").collect sql(s"""LOAD DATA LOCAL INPATH '$resourcesPath/Data/batchsort/data.csv' into table t3 options('GLOBAL_SORT_PARTITIONS'='2')""").collect sql(s"""select * from t3""").collect sql(s"""drop table if exists t3""").collect } //Carbon-Loading-Optimizations-Global-Sort-01-01-19 test("Carbon-Loading-Optimizations-Global-Sort-01-01-19", Include) { sql(s"""drop table if exists uniqdata20b""").collect sql(s"""drop table if exists uniqdata20c""").collect sql(s"""CREATE TABLE uniqdata20b (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata""").collect sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata20b OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\\','BAD_RECORDS_ACTION'='REDIRECT','BAD_RECORDS_LOGGER_ENABLE'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect sql(s"""CREATE TABLE uniqdata20c (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata""").collect sql(s"""insert into uniqdata20c select * from uniqdata20b""").collect sql(s"""select * from uniqdata20b""").collect sql(s"""drop table if exists uniqdata20b""").collect sql(s"""drop table if exists uniqdata20c""").collect } //Carbon-Loading-Optimizations-Global-Sort-01-01-20 test("Carbon-Loading-Optimizations-Global-Sort-01-01-20", Include) { sql(s"""drop table if exists uniqdata_h""").collect sql(s"""drop table if exists uniqdata_c""").collect sql(s"""CREATE TABLE uniqdata_h (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) ROW FORMAT DELIMITED FIELDS TERMINATED BY ','""").collect sql(s"""load data inpath '$resourcesPath/Data/uniqdata/2000_UniqData_hive2.csv' into table uniqdata_h""").collect sql(s"""CREATE TABLE uniqdata_c (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata""").collect sql(s"""insert into uniqdata_c select * from uniqdata_h""").collect sql(s"""select * from uniqdata_c""").collect sql(s"""drop table if exists uniqdata_h""").collect sql(s"""drop table if exists uniqdata_c""").collect } //Carbon-Loading-Optimizations-Global-Sort-01-01-21 test("Carbon-Loading-Optimizations-Global-Sort-01-01-21", Include) { sql(s"""CREATE TABLE uniqdata11 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT')""").collect sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata11 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect sql(s"""select * from uniqdata11""").collect sql(s"""drop table if exists uniqdata11""").collect } //Carbon-Loading-Optimizations-Global-Sort-01-01-22 test("Carbon-Loading-Optimizations-Global-Sort-01-01-22", Include) { sql(s"""CREATE TABLE uniqdata11 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT')""").collect sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata11 OPTIONS('GLOBAL_SORT_PARTITIONS'='2','DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect sql(s"""select * from uniqdata11""").collect sql(s"""drop table if exists uniqdata11""").collect } //Carbon-Loading-Optimizations-Global-Sort-01-01-23 test("Carbon-Loading-Optimizations-Global-Sort-01-01-23", Include) { sql(s"""CREATE TABLE uniqdata11 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT')""").collect sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/2000_UniqData.csv' into table uniqdata11 OPTIONS('GLOBAL_SORT_PARTITIONS'='2','DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect sql(s"""select * from uniqdata11""").collect sql(s"""drop table if exists uniqdata11""").collect } //Carbon-Loading-Optimizations-Global-Sort-01-01-24 test("Carbon-Loading-Optimizations-Global-Sort-01-01-24", Include) { sql(s"""drop table if exists uniqdata11""").collect sql(s"""CREATE TABLE uniqdata11 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT')""").collect sql(s"""LOAD DATA INPATH '$resourcesPath/Data/batchsort/folder1/folder2' into table uniqdata11 OPTIONS('GLOBAL_SORT_PARTITIONS'='2','DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect sql(s"""select * from uniqdata11""").collect sql(s"""drop table if exists uniqdata11""").collect } //Carbon-Loading-Optimizations-Global-Sort-01-01-25 test("Carbon-Loading-Optimizations-Global-Sort-01-01-25", Include) { sql(s"""drop table if exists uniqdata11""").collect sql(s"""CREATE TABLE uniqdata11 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT')""").collect sql(s"""LOAD DATA INPATH '$resourcesPath/Data/batchsort/folder1' into table uniqdata11 OPTIONS('GLOBAL_SORT_PARTITIONS'='2','DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect sql(s"""select * from uniqdata11""").collect sql(s"""drop table if exists uniqdata11""").collect } //Carbon-Loading-Optimizations-Global-Sort-01-01-26 test("Carbon-Loading-Optimizations-Global-Sort-01-01-26", Include) { sql(s"""drop table if exists uniqdata11""").collect sql(s"""CREATE TABLE uniqdata11 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT')""").collect sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata11 OPTIONS('GLOBAL_SORT_PARTITIONS'='2','DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','maxcolumns'='13','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect sql(s"""select * from uniqdata11""").collect sql(s"""drop table if exists uniqdata11""").collect } //Carbon-Loading-Optimizations-Global-Sort-01-01-27 test("Carbon-Loading-Optimizations-Global-Sort-01-01-27", Include) { sql(s"""drop table if exists uniqdata17""").collect sql(s"""CREATE TABLE uniqdata17 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT')""").collect sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata17 OPTIONS('GLOBAL_SORT_PARTITIONS'='2','DELIMITER'=',' , 'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect sql(s"""select * from uniqdata17""").collect sql(s"""drop table if exists uniqdata17""").collect } //Carbon-Loading-Optimizations-Global-Sort-01-01-28 test("Carbon-Loading-Optimizations-Global-Sort-01-01-28", Include) { sql(s"""drop table if exists uniqdata19b""").collect sql(s"""CREATE TABLE uniqdata19b (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT')""").collect sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata19b OPTIONS('GLOBAL_SORT_PARTITIONS'='2','DELIMITER'=',' , 'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\\','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect sql(s"""select * from uniqdata19b""").collect sql(s"""drop table if exists uniqdata19b""").collect } //Carbon-Loading-Optimizations-Global-Sort-01-01-29 test("Carbon-Loading-Optimizations-Global-Sort-01-01-29", Include) { sql(s"""drop table if exists uniqdata19c""").collect sql(s"""CREATE TABLE uniqdata19c (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT')""").collect sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata19c OPTIONS('GLOBAL_SORT_PARTITIONS'='2','DELIMITER'=',' , 'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\\','BAD_RECORDS_ACTION'='IGNORE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect sql(s"""select * from uniqdata19c""").collect sql(s"""drop table if exists uniqdata19c""").collect } //Carbon-Loading-Optimizations-Global-Sort-01-01-30 test("Carbon-Loading-Optimizations-Global-Sort-01-01-30", Include) { sql(s"""drop table if exists uniqdata19d""").collect sql(s"""CREATE TABLE uniqdata19d (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT')""").collect sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata19d OPTIONS('GLOBAL_SORT_PARTITIONS'='2','DELIMITER'=',' , 'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\\','BAD_RECORDS_ACTION'='REDIRECT','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect sql(s"""select * from uniqdata19d""").collect sql(s"""drop table if exists uniqdata19d""").collect } //Carbon-Loading-Optimizations-Global-Sort-01-01-31 test("Carbon-Loading-Optimizations-Global-Sort-01-01-31", Include) { sql(s"""drop table if exists uniqdata19e""").collect sql(s"""CREATE TABLE uniqdata19e (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT')""").collect sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata19e OPTIONS('GLOBAL_SORT_PARTITIONS'='2','DELIMITER'=',' , 'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\\','BAD_RECORDS_ACTION'='REDIRECT','BAD_RECORDS_LOGGER_ENABLE'='FALSE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect sql(s"""select * from uniqdata19e""").collect sql(s"""drop table if exists uniqdata19e""").collect } //Carbon-Loading-Optimizations-Global-Sort-01-01-32 test("Carbon-Loading-Optimizations-Global-Sort-01-01-32", Include) { sql(s"""drop table if exists uniqdata19f""").collect sql(s"""CREATE TABLE uniqdata19f (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT')""").collect sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata19f OPTIONS('GLOBAL_SORT_PARTITIONS'='2','DELIMITER'=',' , 'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\\','BAD_RECORDS_ACTION'='REDIRECT','BAD_RECORDS_LOGGER_ENABLE'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect sql(s"""select * from uniqdata19f""").collect sql(s"""drop table if exists uniqdata19f""").collect } //Carbon-Loading-Optimizations-Global-Sort-01-01-36 test("Carbon-Loading-Optimizations-Global-Sort-01-01-36", Include) { sql(s"""drop TABLE if exists uniqdata_c""").collect sql(s"""CREATE TABLE uniqdata_c (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT')""").collect sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata_c OPTIONS('GLOBAL_SORT_PARTITIONS'='2','DELIMITER'=',' , 'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\\','BAD_RECORDS_ACTION'='REDIRECT','BAD_RECORDS_LOGGER_ENABLE'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect sql(s"""delete from uniqdata_c where CUST_NAME='CUST_NAME_20000'""").collect sql(s"""select * from uniqdata_c""").collect sql(s"""drop TABLE if exists uniqdata_c""").collect } //Carbon-Loading-Optimizations-Global-Sort-01-01-38 test("Carbon-Loading-Optimizations-Global-Sort-01-01-38", Include) { sql(s"""CREATE TABLE uniqdata (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT')""").collect sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/2000_UniqData.csv' into table uniqdata OPTIONS('GLOBAL_SORT_PARTITIONS'='2','DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect sql(s"""select * from uniqdata""").collect sql(s"""drop TABLE if exists uniqdata""").collect } //Carbon-Loading-Optimizations-Global-Sort-01-01-39 test("Carbon-Loading-Optimizations-Global-Sort-01-01-39", Include) { sql(s"""CREATE TABLE uniqdataquery1 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT')""").collect sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdataquery1 OPTIONS('DELIMITER'=',','QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect sql(s"""select count(*) from uniqdataquery1 where cust_name="CUST_NAME_00000" group by cust_name""").collect sql(s"""drop table if exists uniqdataquery1""").collect } //Carbon-Loading-Optimizations-Global-Sort-01-01-40 test("Carbon-Loading-Optimizations-Global-Sort-01-01-40", Include) { sql(s"""CREATE TABLE uniqdataquery1 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT')""").collect sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdataquery1 OPTIONS('DELIMITER'=',','QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect sql(s"""select count(*) from uniqdataquery1 where cust_name IN(1,2,3) group by cust_name""").collect sql(s"""drop table if exists uniqdataquery1""").collect } //Carbon-Loading-Optimizations-Global-Sort-01-01-41 test("Carbon-Loading-Optimizations-Global-Sort-01-01-41", Include) { sql(s"""CREATE TABLE uniqdataquery1 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT')""").collect sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdataquery1 OPTIONS('DELIMITER'=',','QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect sql(s"""select * from uniqdataquery1 where cust_id between 9002 and 9030""").collect sql(s"""drop table if exists uniqdataquery1""").collect } //Carbon-Loading-Optimizations-Global-Sort-01-01-42 test("Carbon-Loading-Optimizations-Global-Sort-01-01-42", Include) { sql(s"""drop table if exists uniqdataquery1""").collect sql(s"""CREATE TABLE uniqdataquery1 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT')""").collect sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdataquery1 OPTIONS('DELIMITER'=',','QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect intercept[Exception] { sql(s"""select * from uniqdataquery1 where Is NulL""").collect } sql(s"""drop table if exists uniqdataquery1""").collect } //Carbon-Loading-Optimizations-Global-Sort-01-01-43 test("Carbon-Loading-Optimizations-Global-Sort-01-01-43", Include) { sql(s"""drop table if exists uniqdataquery1""").collect sql(s"""CREATE TABLE uniqdataquery1 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT')""").collect sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdataquery1 OPTIONS('DELIMITER'=',' ,'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect sql(s"""select * from uniqdataquery1 where cust_id IS NOT NULL""").collect sql(s"""drop table if exists uniqdataquery1""").collect } //Carbon-Loading-Optimizations-Global-Sort-01-01-44 test("Carbon-Loading-Optimizations-Global-Sort-01-01-44", Include) { sql(s"""drop table if exists uniqdataquery1""").collect sql(s"""CREATE TABLE uniqdataquery1 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT')""").collect sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdataquery1 OPTIONS('DELIMITER'=',','QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect sql(s"""select * from (select cust_id from uniqdataquery1 where cust_id IN (10987,10988)) uniqdataquery1 where cust_id IN (10987, 10988)""").collect sql(s"""drop table if exists uniqdataquery1""").collect } //Carbon-Loading-Optimizations-Global-Sort-01-01-45 test("Carbon-Loading-Optimizations-Global-Sort-01-01-45", Include) { sql(s"""drop table if exists uniqdataquery1""").collect sql(s"""drop table if exists uniqdataquery11""").collect sql(s"""CREATE TABLE uniqdataquery1 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT')""").collect sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdataquery1 OPTIONS('DELIMITER'=',','QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect sql(s"""drop table if exists uniqdataquery11""").collect sql(s"""CREATE TABLE uniqdataquery11 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT')""").collect sql(s"""LOAD DATA INPATH '$resourcesPath/Data/batchsort/1000_UniqData.csv' into table uniqdataquery11 OPTIONS('DELIMITER'=',','QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect sql(s"""select uniqdataquery1.CUST_ID from uniqdataquery1 join uniqdataquery11 where uniqdataquery1.CUST_ID > 10700 and uniqdataquery11.CUST_ID > 10500""").collect sql(s"""drop table if exists uniqdataquery1""").collect sql(s"""drop table if exists uniqdataquery11""").collect } //Carbon-Loading-Optimizations-Global-Sort-01-01-46 test("Carbon-Loading-Optimizations-Global-Sort-01-01-46", Include) { sql(s"""drop table if exists uniqdataquery1""").collect sql(s"""drop table if exists uniqdataquery11""").collect sql(s"""CREATE TABLE uniqdataquery1 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT')""").collect sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdataquery1 OPTIONS('DELIMITER'=',','QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect sql(s"""drop table if exists uniqdataquery11""").collect sql(s"""CREATE TABLE uniqdataquery11 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT')""").collect sql(s"""LOAD DATA INPATH '$resourcesPath/Data/batchsort/1000_UniqData.csv' into table uniqdataquery11 OPTIONS('DELIMITER'=',','QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect sql(s"""select uniqdataquery1.CUST_ID from uniqdataquery1 LEFT join uniqdataquery11 where uniqdataquery1.CUST_ID > 10000""").collect sql(s"""drop table if exists uniqdataquery1""").collect sql(s"""drop table if exists uniqdataquery11""").collect } //Carbon-Loading-Optimizations-Global-Sort-01-01-47 test("Carbon-Loading-Optimizations-Global-Sort-01-01-47", Include) { sql(s"""drop table if exists uniqdataquery1""").collect sql(s"""drop table if exists uniqdataquery11""").collect sql(s"""CREATE TABLE uniqdataquery1 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT')""").collect sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdataquery1 OPTIONS('DELIMITER'=',','QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect sql(s"""drop table if exists uniqdataquery11""").collect sql(s"""CREATE TABLE uniqdataquery11 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT')""").collect sql(s"""LOAD DATA INPATH '$resourcesPath/Data/batchsort/1000_UniqData.csv' into table uniqdataquery11 OPTIONS('DELIMITER'=',','QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect sql(s"""select uniqdataquery1.CUST_ID from uniqdataquery1 FULL JOIN uniqdataquery11 where uniqdataquery1.CUST_ID=uniqdataquery11.CUST_ID""").collect sql(s"""drop table if exists uniqdataquery1""").collect sql(s"""drop table if exists uniqdataquery11""").collect } //Carbon-Loading-Optimizations-Global-Sort-01-01-48 test("Carbon-Loading-Optimizations-Global-Sort-01-01-48", Include) { sql(s"""drop table if exists uniqdataquery1""").collect sql(s"""drop table if exists uniqdataquery11""").collect sql(s"""CREATE TABLE uniqdataquery1 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT')""").collect sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdataquery1 OPTIONS('DELIMITER'=',','QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect sql(s"""drop table if exists uniqdataquery11""").collect sql(s"""CREATE TABLE uniqdataquery11 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT')""").collect sql(s"""LOAD DATA INPATH '$resourcesPath/Data/batchsort/1000_UniqData.csv' into table uniqdataquery11 OPTIONS('DELIMITER'=',','QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect sql(s"""select broadcast.cust_id from uniqdataquery1 broadcast join uniqdataquery11 where broadcast.cust_id > 10900""").collect sql(s"""drop table if exists uniqdataquery1""").collect sql(s"""drop table if exists uniqdataquery11""").collect } //Carbon-Loading-Optimizations-Global-Sort-01-01-49 test("Carbon-Loading-Optimizations-Global-Sort-01-01-49", Include) { sql(s"""drop table if exists uniqdataquery1""").collect sql(s"""CREATE TABLE uniqdataquery1 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT')""").collect sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdataquery1 OPTIONS('DELIMITER'=',','QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect sql(s"""select * from uniqdataquery1 where cust_id > 10544 sort by cust_id asc""").collect sql(s"""drop table if exists uniqdataquery1""").collect } //Carbon-Loading-Optimizations-Global-Sort-01-01-50 test("Carbon-Loading-Optimizations-Global-Sort-01-01-50", Include) { sql(s"""drop table if exists uniqdataquery1""").collect sql(s"""CREATE TABLE uniqdataquery1 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT')""").collect sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdataquery1 OPTIONS('DELIMITER'=',','QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect sql(s"""select * from uniqdataquery1 where cust_id > 10544 sort by cust_name desc""").collect sql(s"""drop table if exists uniqdataquery1""").collect } //Carbon-Loading-Optimizations-Global-Sort-01-01-51 test("Carbon-Loading-Optimizations-Global-Sort-01-01-51", Include) { sql(s"""drop table if exists uniqdataquery1""").collect sql(s"""CREATE TABLE uniqdataquery1 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT')""").collect sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdataquery1 OPTIONS('DELIMITER'=',','QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect sql(s"""select * from uniqdataquery1 where cust_id > 10544 sort by cust_name desc, cust_id asc""").collect sql(s"""drop table if exists uniqdataquery1""").collect } //Carbon-Loading-Optimizations-Global-Sort-01-01-52 test("Carbon-Loading-Optimizations-Global-Sort-01-01-52", Include) { sql(s"""drop table if exists uniqdataquery1""").collect sql(s"""CREATE TABLE uniqdataquery1 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT')""").collect sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdataquery1 OPTIONS('DELIMITER'=',','QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect sql(s"""select cust_id,avg(cust_id) from uniqdataquery1 where cust_id IN (select cust_id from uniqdataquery1) group by cust_id""").collect sql(s"""drop table if exists uniqdataquery1""").collect } //Carbon-Loading-Optimizations-Global-Sort-01-01-54 test("Carbon-Loading-Optimizations-Global-Sort-01-01-54", Include) { sql(s"""drop table if exists uniqdataquery1""").collect sql(s"""CREATE TABLE uniqdataquery1 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata tblproperties('sort_columns'='')""").collect sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdataquery1 OPTIONS('DELIMITER'=',','QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect sql(s"""select cust_id,avg(cust_id) from uniqdataquery1 where cust_id IN (select cust_id from uniqdataquery1) group by cust_id""").collect sql(s"""drop table if exists uniqdataquery1""").collect } override def afterAll: Unit = { sql(s"""drop table if exists uniqdata11""").collect sql(s"""drop table if exists uniqdataquery1""").collect } override def afterEach: Unit = { sql(s"""drop table if exists uniqdata11""").collect sql(s"""drop table if exists uniqdataquery1""").collect } }
jackylk/incubator-carbondata
integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/GlobalSortTestCase.scala
Scala
apache-2.0
53,114
/* * Copyright 2014, by Vladimir Kostyukov and Contributors. * * This file is a part of a Finch library that may be found at * * https://github.com/finagle/finch * * Licensed under the Apache License, Version 2.0 (the "License"); * You may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * Contributor(s): * Ryan Plessner * Jens Halm */ package io.finch import _root_.argonaut.{EncodeJson, Json, Parse, DecodeJson} import io.finch.request.DecodeRequest import io.finch.request.RequestError import io.finch.response.EncodeResponse import com.twitter.util.{Try, Throw, Return} package object argonaut { /** * @param decode The argonaut ''DecodeJson'' to use for decoding * @tparam A The type of data that the ''DecodeJson'' will decode into * @return Create a Finch ''DecodeRequest'' from an argonaut ''DecodeJson'' */ implicit def decodeArgonaut[A](implicit decode: DecodeJson[A]): DecodeRequest[A] = DecodeRequest( Parse.decodeEither(_).fold( error => Throw(new RequestError(error)), Return(_) ) ) /** * @param encode The argonaut ''EncodeJson'' to use for decoding * @tparam A The type of data that the ''EncodeJson'' will encode use to create the json string * @return Create a Finch ''EncodeJson'' from an argonaut ''EncodeJson'' */ implicit def encodeArgonaut[A](implicit encode: EncodeJson[A]): EncodeResponse[A] = EncodeResponse("application/json")(encode.encode(_).nospaces) }
trane/finch
argonaut/src/main/scala/io/finch/argonaut/package.scala
Scala
apache-2.0
1,911
/* * SPDX-License-Identifier: Apache-2.0 * * Copyright 2015-2021 Andre White. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.truthencode.ddo.api.model.effect trait FullEffectInfo extends BasicEffectInfo with DetailedEffectInfo with ValueInfo
adarro/ddo-calc
subprojects/common/ddo-modeling/src/main/scala/io/truthencode/ddo/api/model/effect/FullEffectInfo.scala
Scala
apache-2.0
773
package com.twitter.zipkin.aggregate object RunHadoopJob extends App { com.twitter.scalding.Tool.main(Array("com.twitter.zipkin.aggregate.ZipkinAggregateJob","--hdfs") ++ args) }
travisbrown/zipkin
zipkin-aggregate/src/main/scala/com/twitter/zipkin/aggregate/RunHadoopJob.scala
Scala
apache-2.0
182
/* * Copyright (c) 2016 SnappyData, Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you * may not use this file except in compliance with the License. You * may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. See accompanying * LICENSE file. */ package org.apache.spark.sql.sources import scala.annotation.tailrec import scala.collection.mutable.ArrayBuffer import com.gemstone.gemfire.internal.cache.{AbstractRegion, ColocationHelper, PartitionedRegion} import com.pivotal.gemfirexd.internal.engine.Misc import org.apache.spark.sql.hive.SnappyStoreHiveCatalog import org.apache.spark.sql.{AnalysisException, SnappySession} import org.apache.spark.sql.catalyst.analysis.{UnresolvedAlias, UnresolvedAttribute, UnresolvedFunction, UnresolvedGenerator, UnresolvedStar} import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeMap, AttributeReference, AttributeSet, Coalesce, Expression, Literal, PredicateHelper, SubqueryExpression, UnresolvedWindowExpression} import org.apache.spark.sql.catalyst.optimizer.ReorderJoin import org.apache.spark.sql.catalyst.planning.ExtractFiltersAndInnerJoins._ import org.apache.spark.sql.catalyst.{expressions, plans} import org.apache.spark.sql.catalyst.plans.Inner import org.apache.spark.sql.catalyst.plans.logical.{Join, LogicalPlan, SubqueryAlias} import org.apache.spark.sql.collection.ToolsCallbackInit import org.apache.spark.sql.execution.columnar.impl.{BaseColumnFormatRelation, ColumnFormatRelation, IndexColumnFormatRelation} import org.apache.spark.sql.execution.datasources.LogicalRelation import org.apache.spark.sql.execution.row.RowFormatRelation import org.apache.spark.sql.sources.Entity.{INDEX, INDEX_RELATION, TABLE} object RuleUtils extends PredicateHelper { private def getIndex(catalog: SnappyStoreHiveCatalog, name: String) = { val relation = catalog.lookupRelation(catalog.newQualifiedTableName(name)) relation match { case LogicalRelation(i: IndexColumnFormatRelation, _, _) => Some(relation) case _ => None } } def fetchIndexes(snappySession: SnappySession, table: LogicalPlan): Seq[(LogicalPlan, Seq[LogicalPlan])] = table.collect { case l@LogicalRelation(p: ParentRelation, _, _) => val catalog = snappySession.sessionCatalog (l.asInstanceOf[LogicalPlan], p.getDependents(catalog).flatMap(getIndex(catalog, _))) } def getJoinKeys(left: LogicalPlan, right: LogicalPlan, joinConditions: Seq[Expression]): Seq[(Expression, Expression)] = { // see caller notes about mixed referenced join keys getting filtered out. val joinedRefs = left.outputSet ++ right.outputSet val predicates = joinConditions.flatMap(splitConjunctivePredicates). filter(_.references.subsetOf(joinedRefs)) predicates. filterNot(_.references.subsetOf(left.outputSet)). filterNot(_.references.subsetOf(right.outputSet)). flatMap { case expressions.EqualTo(l, r) if canEvaluate(l, left) && canEvaluate(r, right) => Some((l, r)) case expressions.EqualTo(l, r) if canEvaluate(l, right) && canEvaluate(r, left) => Some((r, l)) // Replace null with default value for joining key, then those rows with null in it could // be joined together case expressions.EqualNullSafe(l, r) if canEvaluate(l, left) && canEvaluate(r, right) => Some((Coalesce(Seq(l, Literal.default(l.dataType))), Coalesce(Seq(r, Literal.default(r.dataType))))) case expressions.EqualNullSafe(l, r) if canEvaluate(l, right) && canEvaluate(r, left) => Some((Coalesce(Seq(r, Literal.default(r.dataType))), Coalesce(Seq(l, Literal.default(l.dataType))))) case other => None } } @tailrec def canTraverseLeftToRight(source: Seq[LogicalPlan], target: LogicalPlan, replicatedReachablePaths: Seq[List[LogicalPlan]]): Boolean = { if (source.isEmpty) { return false } else if (source.exists(_ == target)) { true } else if (replicatedReachablePaths.isEmpty) { false } else { var currentReachablePaths = replicatedReachablePaths val newChains = source.flatMap { rep1 => val (otherSide, remainingPaths) = currentReachablePaths.foldLeft( (Seq.empty[LogicalPlan], currentReachablePaths)) { case ((otherKey, current), plan) => plan match { case l :: r :: o if o.isEmpty & (l == rep1) => ((otherKey ++ Some(r)), current.filterNot(_ == plan)) case l :: r :: o if o.isEmpty & (r == rep1) => ((otherKey ++ Some(l)), current.filterNot(_ == plan)) case _ => ((otherKey, current)) } } currentReachablePaths = remainingPaths otherSide } canTraverseLeftToRight(newChains, target, currentReachablePaths) } } protected[sql] def applyDefaultAction[A](entity: (PartialPlan, A), withFilters: Boolean) (implicit snappySession: SnappySession, addToDefault: (PartialPlan, A) => PartialPlan): PartialPlan = entity match { // handles replicated & non-colocated logical plan case (finalPlan, table: LogicalPlan) if !finalPlan.replaced.contains(table) => val (tableFilters, _) = RuleUtils.partitionBy(table.outputSet, finalPlan.conditions) if (tableFilters.isEmpty && withFilters) { return finalPlan } val joinRefs = finalPlan.outputSet ++ table.outputSet val (tableJoinConditions, otherJoinConditions) = RuleUtils.partitionBy(joinRefs, finalPlan.conditions) val pTabOrIndex = RuleUtils.chooseIndexForFilter(table, tableFilters) if ((tableJoinConditions.toSet -- tableFilters.toSet).nonEmpty || finalPlan.curPlan == null) { val newPlan = finalPlan.copy( curPlan = RuleUtils.createJoin(finalPlan.curPlan, pTabOrIndex.map(_.index) .getOrElse(table), tableJoinConditions), replaced = finalPlan.replaced ++ pTabOrIndex, outputSet = joinRefs, input = finalPlan.input.filterNot(_ == table), conditions = if (finalPlan.curPlan == null) finalPlan.conditions else otherJoinConditions) addToDefault(newPlan, table.asInstanceOf[A]) } else { finalPlan } // handles colocated with filters replacement case (finalPlan, replacement: Replacement) if !finalPlan.replaced.contains(replacement) => val (tableFilters, _) = RuleUtils.partitionBy(replacement.table.outputSet, finalPlan.conditions) if (tableFilters.isEmpty && withFilters) { return finalPlan } val joinRefs = finalPlan.outputSet ++ replacement.table.outputSet val (pTabJoinConditions, otherJoinConditions) = RuleUtils.partitionBy(joinRefs, finalPlan.conditions) assert((pTabJoinConditions.toSet -- tableFilters.toSet).nonEmpty || finalPlan.curPlan == null, s"joinConditions ${pTabJoinConditions.mkString(" && ")} " + s"filterConditions ${tableFilters.mkString(" && ")}") val newPlan = finalPlan.copy( curPlan = RuleUtils.createJoin(finalPlan.curPlan, replacement.index, pTabJoinConditions), replaced = finalPlan.replaced ++ Some(replacement), outputSet = joinRefs, input = finalPlan.input.filterNot(_ == replacement.table), conditions = if (finalPlan.curPlan == null) finalPlan.conditions else otherJoinConditions) addToDefault(newPlan, replacement.asInstanceOf[A]) } protected[sql] def createJoin(curPlan: LogicalPlan, planToAdd: LogicalPlan, toJoinWith: Seq[Expression]) = if (curPlan == null) { planToAdd } else { assert(toJoinWith.nonEmpty, "We shouldn't favor this in between because it creates cartesian" + " product.") Join(curPlan, planToAdd, Inner, toJoinWith.reduceLeftOption(expressions.And)) } protected[sql] def partitionBy(allColumns: AttributeSet, expressions: Seq[Expression]): (Seq[Expression], Seq[Expression]) = expressions.partition(e => e.references.subsetOf(allColumns) && !SubqueryExpression.hasCorrelatedSubquery(e)) protected[sql] def returnPlan(partial: PartialPlan) = CompletePlan(ReorderJoin.createOrderedJoin( if (partial.curPlan == null) partial.input else Seq(partial.curPlan) ++ partial.input, partial.conditions), partial.replaced ++ partial.input.map(t => Replacement(t, t))) protected[sql] def chooseIndexForFilter(child: LogicalPlan, conditions: Seq[Expression]) (implicit snappySession: SnappySession) = { val columnGroups = conditions.collect { case expressions.EqualTo(l, r) => l.collectFirst { case a: AttributeReference => a }.orElse { r.collectFirst { case a: AttributeReference => a } } case expressions.EqualNullSafe(l, r) => l.collectFirst { case a: AttributeReference => a } .orElse { r.collectFirst { case a: AttributeReference => a } } }.groupBy(_.map(_.qualifier)).collect { case (table, cols) if table.nonEmpty & table.get.nonEmpty => ( table.get.get, cols.collect { case a if a.nonEmpty => a.get }) } val satisfyingPartitionColumns = for { (table, indexes) <- RuleUtils.fetchIndexes(snappySession, child) filterCols <- columnGroups.collectFirst { case (t, predicates) if predicates.nonEmpty => table match { case LogicalRelation(b: ColumnFormatRelation, _, _) if b.table.indexOf(t) > 0 => predicates case SubqueryAlias(alias, _) if alias.equals(t) => predicates case _ => Seq.empty } } if filterCols.nonEmpty matchedIndexes = indexes.collect { case idx@LogicalRelation(ir: IndexColumnFormatRelation, _, _) if ir.partitionColumns.length <= filterCols.length & ir.partitionColumns.forall(p => filterCols.exists(f => f.name.equalsIgnoreCase(p))) => (ir.partitionColumns.length, idx.asInstanceOf[LogicalPlan]) } if matchedIndexes.nonEmpty } yield { Replacement(table, matchedIndexes.maxBy(_._1)._2) } if (satisfyingPartitionColumns.isEmpty) { None } else { Some(satisfyingPartitionColumns.maxBy { r => r.index.statistics.sizeInBytes }) } } } object Entity { type TABLE = LogicalPlan type INDEX_RELATION = BaseColumnFormatRelation type INDEX = LogicalPlan def isColocated(left: LogicalPlan, right: LogicalPlan): Boolean = { val leftRelation = unwrapBaseColumnRelation(left) val rightRelation = unwrapBaseColumnRelation(right) if (leftRelation.isEmpty || rightRelation.isEmpty) { return false } val leftRegion = Misc.getRegionForTable(leftRelation.get.resolvedName, true) val leftLeader = leftRegion.asInstanceOf[AbstractRegion] match { case pr: PartitionedRegion => ColocationHelper.getLeaderRegionName(pr) } val rightRegion = Misc.getRegionForTable(rightRelation.get.resolvedName, true) val rightLeader = rightRegion.asInstanceOf[AbstractRegion] match { case pr: PartitionedRegion => ColocationHelper.getLeaderRegionName(pr) } leftLeader.equals(rightLeader) } def unwrapBaseColumnRelation( plan: LogicalPlan): Option[BaseColumnFormatRelation] = plan collectFirst { case LogicalRelation(relation: BaseColumnFormatRelation, _, _) => relation case SubqueryAlias(alias, LogicalRelation(relation: BaseColumnFormatRelation, _, _)) => relation } private def findR(p: Any) = p match { case UnresolvedAttribute(_) | UnresolvedFunction(_, _, _) | UnresolvedAlias(_, _) | UnresolvedWindowExpression(_, _) | UnresolvedGenerator(_, _) | UnresolvedStar(_) => true case _ => false } def hasUnresolvedReferences(plan: LogicalPlan): Boolean = plan.find { l => l.productIterator.exists(findR) || l.expressions.exists(e => findR(e) || e.productIterator.exists(findR) || e.references.exists(findR) ) }.nonEmpty def replaceAttribute(condition: Expression, attributeMapping: Map[Attribute, Attribute]): Expression = { condition.transformUp { case a: Attribute => attributeMapping.find({ case (source, _) => source.exprId == a.exprId }).map({ case (t, i) => i.withQualifier(t.qualifier) }).getOrElse(a) } } } object HasColocatedEntities { type ReturnType = ( Seq[(INDEX_RELATION, INDEX_RELATION)], Seq[ReplacementSet] ) def unapply(tables: (LogicalPlan, LogicalPlan))(implicit snappySession: SnappySession): Option[ReturnType] = { val (left, right) = tables /** now doing a one-to-one mapping of lefttable and its indexes with * right table and its indexes. Following example explains the combination * generator. * * val l = Seq(1, 2, 3) * val r = Seq(4, 5) * l.zip(Seq.fill(l.length)(r)).flatMap { * case (leftElement, rightList) => rightList.flatMap { e => * Seq((leftElement, e)) * } * }.foreach(println) * will output : * (1, 4) * (1, 5) * (2, 4) * (2, 5) * (3, 4) * (3, 5) * */ val leftRightEntityMapping = for { (leftTable, leftIndexes) <- RuleUtils.fetchIndexes(snappySession, left) (rightTable, rightIndexes) <- RuleUtils.fetchIndexes(snappySession, right) leftSeq = Seq(leftTable) ++ leftIndexes rightSeq = Seq(rightTable) ++ rightIndexes } yield { leftSeq.zip(Seq.fill(leftSeq.length)(rightSeq)) flatMap { case (leftElement, rightList) => rightList.flatMap(e => Seq((leftElement, e))) } } // right now not expecting multiple tables in left & right hand side. // assert(leftRightEntityMapping.size <= 1) val mappings = leftRightEntityMapping.flatMap { mappedElements => val (leftTable, rightTable) = mappedElements(0) // first pairing is always (table, table) for { (leftPlan, rightPlan) <- mappedElements leftRelation = Entity.unwrapBaseColumnRelation(leftPlan) if leftRelation.nonEmpty rightRelation = Entity.unwrapBaseColumnRelation(rightPlan) if rightRelation.nonEmpty if Entity.isColocated(leftPlan, rightPlan) } yield { val leftReplacement = leftTable match { case _: LogicalRelation => Replacement(leftTable, leftPlan) case subquery@SubqueryAlias(alias, _) => Replacement(subquery, SubqueryAlias(alias, leftPlan)) } val rightReplacement = rightTable match { case _: LogicalRelation => Replacement(rightTable, rightPlan) case subquery@SubqueryAlias(alias, _) => Replacement(subquery, SubqueryAlias(alias, rightPlan)) } ((leftRelation.get, rightRelation.get), ReplacementSet(ArrayBuffer(leftReplacement, rightReplacement), Seq.empty)) } } if (mappings.nonEmpty) { Some(mappings.unzip) } else { None } } } /** * Table to table or Table to index replacement. */ case class Replacement(table: TABLE, index: INDEX, isPartitioned: Boolean = true) extends PredicateHelper { def isReplacable: Boolean = table != index val indexAttributes = index.output.collect { case ar: AttributeReference => ar } val tableToIndexAttributeMap = AttributeMap(table.output.map { case f: AttributeReference => val newA = indexAttributes.find(_.name.equalsIgnoreCase(f.name)). getOrElse(throw new IllegalStateException( s"Field $f not found in ${indexAttributes}")) (f, newA) case a => throw new AssertionError(s"UnHandled Attribute ${a} in table" + s" ${table.output.mkString(",")}") }) private var _replacedEntity: LogicalPlan = null def numPartitioningCols: Int = index match { case LogicalRelation(b: BaseColumnFormatRelation, _, _) => b.partitionColumns.length case _ => 0 } override def toString: String = { "" + (table match { case LogicalRelation(b: BaseColumnFormatRelation, _, _) => b.table case _ => table.toString() }) + " ----> " + (index match { case LogicalRelation(b: BaseColumnFormatRelation, _, _) => b.table case LogicalRelation(r: RowFormatRelation, _, _) => r.table case _ => index.toString() }) } def mappedConditions(conditions: Seq[Expression]): Seq[Expression] = conditions.map(Entity.replaceAttribute(_, tableToIndexAttributeMap)) protected[sources] def replacedPlan(conditions: Seq[Expression]): LogicalPlan = { if (_replacedEntity == null) { val tableConditions = conditions.filter(canEvaluate(_, table)) _replacedEntity = if (tableConditions.isEmpty) { index } else { plans.logical.Filter(mappedConditions(tableConditions).reduce(expressions.And), index) } } _replacedEntity } def estimatedSize(conditions: Seq[Expression]): BigInt = replacedPlan(conditions).statistics.sizeInBytes } /** * A set of possible replacements of table to indexes. * <br> * <strong>Note:</strong> The chain if consists of multiple partitioned tables, they must satisfy * colocation criteria. * * @param chain Multiple replacements. * @param conditions user provided join + filter conditions. */ case class ReplacementSet(chain: ArrayBuffer[Replacement], conditions: Seq[Expression]) extends Ordered[ReplacementSet] with PredicateHelper { lazy val bestJoinOrder: Seq[Replacement] = { val (part, rep) = chain.partition(_.isPartitioned) // pick minimum number of replicated tables required to fulfill colocated join order. val feasibleJoinPlan = Seq.range(0, chain.length - part.length + 1).flatMap(elem => rep.combinations(elem).map(part ++ _). flatMap(_.permutations).filter(hasJoinConditions)).filter(_.nonEmpty) if(feasibleJoinPlan.isEmpty) { Seq.empty } else { val all = feasibleJoinPlan.sortBy { jo => estimateSize(jo) }(implicitly[Ordering[BigInt]].reverse) all.head } } lazy val bestPlanEstimatedSize = estimateSize(bestJoinOrder) lazy val bestJoinOrderConditions = joinConditions(bestJoinOrder) private def joinConditions(joinOrder: Seq[Replacement]) = { val refs = joinOrder.map(_.table.outputSet).reduce(_ ++ _) conditions.filter(_.references.subsetOf(refs)) } private def estimateSize(joinOrder: Seq[Replacement]): BigInt = { if (joinOrder.isEmpty) { return BigInt(0) } var newConditions = joinConditions(joinOrder) newConditions = joinOrder.foldLeft(newConditions) { case (nc, e) => e.mappedConditions(nc) } val sz = joinOrder.map(_.replacedPlan(conditions)).zipWithIndex.foldLeft(BigInt(0)) { case (tot, (table, depth)) if depth == 2 => tot + table.statistics.sizeInBytes case (tot, (table, depth)) => tot + (table.statistics.sizeInBytes * depth) } sz } private def hasJoinConditions(replacements: Seq[Replacement]): Boolean = { replacements.sliding(2).forall(_.toList match { case table1 :: table2 :: _ => RuleUtils.getJoinKeys(table1.table, table2.table, conditions).nonEmpty case _ => false }) } override def equals(other: Any): Boolean = other match { case cr: ReplacementSet if chain.nonEmpty & cr.chain.nonEmpty => Entity.isColocated(chain(0).index, cr.chain(0).index) case _ => false } def merge(current: ReplacementSet): ReplacementSet = { current.chain.foreach { r => chain.find(_.index == r.index) match { case None => chain += r case _ => } } this } def numPartitioningColumns: Int = chain.headOption.map(_.numPartitioningCols).getOrElse(0) override def toString: String = chain.mkString("\n") override def compare(that: ReplacementSet): Int = bestJoinOrder.length compareTo that.bestJoinOrder.length match { case 0 => // for equal length chain, sort by smallest size bestPlanEstimatedSize compare that.bestPlanEstimatedSize match { // in case sizes are same (like in unit test we made it equal) pick the greater one case 0 => -(bestJoinOrderConditions.length compare that.bestJoinOrderConditions.length) case r => r } case c => -c // sort by largest chain } } /** * This we have to copy from spark patterns.scala because we want handle single table with * filters as well. * * This will have another advantage later if we decide to move our rule to the last instead of * injecting just after ReorderJoin, whereby additional nodes like Project requires handling. */ object ExtractFiltersAndInnerJoins extends PredicateHelper { // flatten all inner joins, which are next to each other def flattenJoin(plan: LogicalPlan): (Seq[LogicalPlan], Seq[Expression]) = plan match { case Join(left, right, Inner, cond) => val (plans, conditions) = flattenJoin(left) (plans ++ Seq(right), conditions ++ cond.toSeq) case plans.logical.Filter(filterCondition, j@Join(left, right, Inner, joinCondition)) => val (plans, conditions) = flattenJoin(j) (plans, conditions ++ splitConjunctivePredicates(filterCondition)) case _ => (Seq(plan), Seq()) } def unapply(plan: LogicalPlan): // tables, joinConditions, filterConditions Option[(Seq[LogicalPlan], Seq[Expression])] = plan match { case [email protected](filterCondition, j@Join(_, _, Inner, _)) => Some(flattenJoin(f)) case j@Join(_, _, Inner, _) => Some(flattenJoin(j)) case [email protected](filterCondition, child) => Some(Seq(child), splitConjunctivePredicates(filterCondition)) case _ => None } } trait SubPlan { def currentColocatedGroup: ReplacementSet = throw new AnalysisException("Unexpected call") } case class PartialPlan(curPlan: LogicalPlan, replaced: Seq[Replacement], outputSet: AttributeSet, input: Seq[LogicalPlan], conditions: Seq[Expression], colocatedGroups: Seq[ReplacementSet], partitioned: Seq[LogicalPlan], replicates: Seq[LogicalPlan], others: Seq[LogicalPlan]) extends SubPlan { var curColocatedIndex = 0 override def currentColocatedGroup: ReplacementSet = colocatedGroups(curColocatedIndex) override def toString: String = if (curPlan != null) curPlan.toString() else "No Plan yet" /** * Apply on multiple entities one by one validating common conditions. */ def /:[A](plansToApply: Seq[A]) (specializedHandling: PartialFunction[(PartialPlan, A), PartialPlan]) (implicit snappySession: SnappySession): SubPlan = { (this /: plansToApply) { case (finalPlan, _) if finalPlan.input.isEmpty => finalPlan case (finalPlan, _: LogicalPlan) if finalPlan.input.size == 1 => finalPlan // ApplyRest will take care of last table and all filters. case (finalPlan, table: LogicalPlan) if finalPlan.replaced.contains(table) => finalPlan case (finalPlan, replacement: Replacement) if finalPlan.replaced.contains(replacement) => finalPlan case (partial, table) if specializedHandling.isDefinedAt(partial, table) => specializedHandling.lift(partial, table).get } } } case class CompletePlan(plan: LogicalPlan, replaced: Seq[Replacement]) extends SubPlan
vjr/snappydata
core/src/main/scala/org/apache/spark/sql/sources/RuleUtils.scala
Scala
apache-2.0
23,829
/* * Copyright 2012-2014 Comcast Cable Communications Management, LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.comcast.xfinity.sirius.api.impl.membership import com.comcast.xfinity.sirius.NiceTest import akka.actor.{ActorSystem, ActorRef} import org.scalatest.BeforeAndAfterAll import akka.testkit.TestProbe import akka.agent.Agent import scala.concurrent.ExecutionContext.Implicits.global class MembershipHelperTest extends NiceTest with BeforeAndAfterAll { implicit val as = ActorSystem("MembershipHelperTest") override def afterAll(): Unit = { as.terminate() } describe("MembershipHelper") { describe("getRandomMember") { val localActorRef = TestProbe().ref val remoteActorRef = TestProbe().ref it("should send back a Member != the MembershipActor we asked...3 times in a row") { val membership: Agent[Map[String, Option[ActorRef]]] = Agent(Map("local" -> Some(localActorRef), "remote" -> Some(remoteActorRef))) val membershipHelper: MembershipHelper = MembershipHelper(membership, localActorRef) val data = membershipHelper.getRandomMember assert(data.get === remoteActorRef) val data2 = membershipHelper.getRandomMember assert(data2.get === remoteActorRef) val data3 = membershipHelper.getRandomMember assert(data3.get === remoteActorRef) } it("should send back a Failure if the only ActorRef in the MembershipMap is equal to the caller") { val membership: Agent[Map[String, Option[ActorRef]]] = Agent(Map("local" -> Some(localActorRef))) val membershipHelper: MembershipHelper = MembershipHelper(membership, localActorRef) val data = membershipHelper.getRandomMember assert(data.isFailure) } it("should send back a Failure if the membershipMap is empty") { val membership = Agent(Map[String, Option[ActorRef]]()) val membershipHelper: MembershipHelper = MembershipHelper(membership, localActorRef) val data = membershipHelper.getRandomMember assert(data.isFailure) } it("should send back a Failure if all values are currently None or local") { val membership = Agent(Map[String, Option[ActorRef]]("badactor" -> None, "similarlybad" -> None, "local" -> Some(localActorRef))) val membershipHelper: MembershipHelper = MembershipHelper(membership, localActorRef) val data = membershipHelper.getRandomMember assert(data.isFailure) } } describe("getClusterInfo") { it("should return only members that have ActorRefs associated") { val (probe1, probe2) = (TestProbe(), TestProbe()) val membership = Agent(Map[String, Option[ActorRef]]( "nothere" -> None, "here" -> Some(probe1.ref), "there" -> Some(probe2.ref) )) val underTest = MembershipHelper(membership, TestProbe().ref) val activeMembers = underTest.getClusterInfo.activeMembers assert(2 === activeMembers.size) assert(activeMembers.contains(probe1.ref)) assert(activeMembers.contains(probe2.ref)) } it("should properly calculate simpleMajority for 0 members") { val membership = Agent(Map[String, Option[ActorRef]]()) val underTest = MembershipHelper(membership, TestProbe().ref) assert(1 === underTest.getClusterInfo.simpleMajority) } it("should properly calculate simpleMajority for 1 members") { val membership = Agent(Map[String, Option[ActorRef]]( "1" -> Some(TestProbe().ref) )) val underTest = MembershipHelper(membership, TestProbe().ref) assert(1 === underTest.getClusterInfo.simpleMajority) } it("should properly calculate simpleMajority for 2 members") { val membership = Agent(Map[String, Option[ActorRef]]( "1" -> Some(TestProbe().ref), "2" -> Some(TestProbe().ref) )) val underTest = MembershipHelper(membership, TestProbe().ref) assert(2 === underTest.getClusterInfo.simpleMajority) } it("should properly calculate simpleMajority for 3 members") { val membership = Agent(Map[String, Option[ActorRef]]( "1" -> Some(TestProbe().ref), "2" -> Some(TestProbe().ref), "3" -> Some(TestProbe().ref) )) val underTest = MembershipHelper(membership, TestProbe().ref) assert(2 === underTest.getClusterInfo.simpleMajority) } } } }
Comcast/sirius
src/test/scala/com/comcast/xfinity/sirius/api/impl/membership/MembershipHelperTest.scala
Scala
apache-2.0
5,061
package io.sphere.json package generic import io.sphere.json.JSON import scala.reflect.macros.blackbox private[generic] object JSONMacros { private def collectKnownSubtypes(c: blackbox.Context)( s: c.universe.Symbol): Set[c.universe.Symbol] = if (s.isModule || s.isModuleClass) Set(s) else if (s.isClass) { val cs = s.asClass if (cs.isCaseClass) Set(cs) else if ((cs.isTrait || cs.isAbstract) && cs.isSealed) cs.knownDirectSubclasses.flatMap(collectKnownSubtypes(c)(_)) else Set.empty } else Set.empty def jsonProductApply(c: blackbox.Context)( tpe: c.universe.Type, classSym: c.universe.ClassSymbol): c.universe.Tree = { import c.universe._ if (classSym.isCaseClass && !classSym.isModuleClass) { val classSymType = classSym.toType val argList = classSymType.member(termNames.CONSTRUCTOR).asMethod.paramLists.head val modifiers = Modifiers(Flag.PARAM) val (argDefs, args) = (for ((a, i) <- argList.zipWithIndex) yield { val argType = classSymType.member(a.name).typeSignatureIn(tpe) val termName = TermName("x" + i) val argTree = ValDef(modifiers, termName, TypeTree(argType), EmptyTree) (argTree, Ident(termName)) }).unzip val applyBlock = Block( Nil, Function( argDefs, Apply(Select(Ident(classSym.companion), TermName("apply")), args) )) Apply( Select( reify(io.sphere.json.generic.`package`).tree, TermName("jsonProduct") ), applyBlock :: Nil ) } else if (classSym.isCaseClass && classSym.isModuleClass) { Apply( Select( reify(io.sphere.json.generic.`package`).tree, TermName("jsonProduct0") ), Ident(classSym.name.toTermName) :: Nil ) } else if (classSym.isModuleClass) { Apply( Select( reify(io.sphere.json.generic.`package`).tree, TermName("jsonSingleton") ), Ident(classSym.name.toTermName) :: Nil ) } else c.abort(c.enclosingPosition, "Not a case class or (case) object") } def deriveSingletonJSON_impl[A: c.WeakTypeTag](c: blackbox.Context): c.Expr[JSON[A]] = { import c.universe._ val tpe = weakTypeOf[A] val symbol = tpe.typeSymbol def singletonTree(classSym: c.universe.ClassSymbol): Tree = if (classSym.isModuleClass) { Apply( Select( reify(io.sphere.json.generic.`package`).tree, TermName("jsonSingleton") ), Ident(classSym.name.toTermName) :: Nil ) } else c.abort(c.enclosingPosition, "Only case Objects are supported.") if (!symbol.isClass) c.abort(c.enclosingPosition, "Can only enumerate values of a sealed trait or class.") else if (!symbol.asClass.isSealed) c.abort(c.enclosingPosition, "Can only enumerate values of a sealed trait or class.") else { val subtypes = collectKnownSubtypes(c)(symbol) val idents = Ident(symbol.name) :: subtypes.map { s => if (s.isModuleClass) TypeTree(s.asClass.toType) else Ident(s.name) }.toList if (idents.size == 1) c.abort(c.enclosingPosition, "Subtypes not found.") else { val instanceDefs = subtypes.zipWithIndex.collect { case (symbol, i) if symbol.isClass && symbol.asClass.isModuleClass => if (symbol.asClass.typeParams.nonEmpty) c.abort( c.enclosingPosition, "Types with type parameters cannot (yet) be derived as part of a sum type") else { ValDef( Modifiers(Flag.IMPLICIT), TermName("json" + i), AppliedTypeTree( Ident(TypeName("JSON")), Ident(symbol) :: Nil ), singletonTree(symbol.asClass) ) } }.toList c.Expr[JSON[A]]( Block( instanceDefs, Apply( TypeApply( Select( reify(io.sphere.json.generic.`package`).tree, TermName("jsonSingletonEnumSwitch") ), idents ), reify(Nil).tree :: Nil ) ) ) } } } def deriveJSON_impl[A: c.WeakTypeTag](c: blackbox.Context): c.Expr[JSON[A]] = { import c.universe._ val tpe = weakTypeOf[A] val symbol = tpe.typeSymbol if (tpe <:< weakTypeOf[Enumeration#Value]) { val TypeRef(pre, _, _) = tpe c.Expr[JSON[A]]( Apply( Select( reify(io.sphere.json.generic.`package`).tree, TermName("jsonEnum") ), Ident(pre.typeSymbol.name.toTermName) :: Nil )) } else if (symbol.isClass && (symbol.asClass.isCaseClass || symbol.asClass.isModuleClass)) // product type or singleton c.Expr[JSON[A]](jsonProductApply(c)(tpe, symbol.asClass)) else { // sum type if (!symbol.isClass) c.abort( c.enclosingPosition, "Can only enumerate values of a sealed trait or class." ) else if (!symbol.asClass.isSealed) c.abort( c.enclosingPosition, "Can only enumerate values of a sealed trait or class." ) else { val subtypes = collectKnownSubtypes(c)(symbol) val idents = Ident(symbol.name) :: subtypes.map { s => if (s.isModuleClass) New(TypeTree(s.asClass.toType)) else Ident(s.name) }.toList if (idents.size == 1) c.abort(c.enclosingPosition, "Subtypes not found.") else { val instanceDefs = subtypes.zipWithIndex.collect { case (symbol, i) if symbol.isClass && symbol.asClass.isCaseClass => if (symbol.asClass.typeParams.nonEmpty) { c.abort( c.enclosingPosition, "Types with type parameters cannot (yet) be derived as part of a sum type") } else { ValDef( Modifiers(Flag.IMPLICIT), TermName("json" + i), AppliedTypeTree( Ident(TypeName("JSON")), Ident(symbol) :: Nil ), jsonProductApply(c)(tpe, symbol.asClass) ) } }.toList c.Expr[JSON[A]]( Block( instanceDefs, Apply( TypeApply( Select( reify(io.sphere.json.generic.`package`).tree, TermName("jsonTypeSwitch") ), idents ), reify(Nil).tree :: Nil ) ) ) } } } } }
sphereio/sphere-scala-libs
json/json-derivation/src/main/scala/io/sphere/json/generic/JSONMacros.scala
Scala
apache-2.0
6,911
package io.mem0r1es.trank.pipeline import io.mem0r1es.trank.pipeline.NER._ import org.scalatest.FlatSpec import scala.io.Source class NERSpec extends FlatSpec { "A NER" should "extract entity labels" in { val content = Source.fromFile("src/test/resources/exascale.info.txt").mkString val entities = runNER(content) assert(entities contains ("Switzerland")) assert(entities contains ("University of Fribourg")) assert(entities contains ("Big Data")) } it should "not fail with content without Named Entities" in { val content = "Just some basic text without any named entities." val entities = runNER(content) assert(entities isEmpty) } it should "not fail with empty content" in { val entities = runNER("") assert(entities isEmpty) } }
homerquan/TRank
src/test/scala/io/mem0r1es/trank/pipeline/NERSpec.scala
Scala
apache-2.0
793
/* * Copyright 2012 Twitter Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.twitter.zipkin.builder import com.twitter.common.zookeeper.ServerSetImpl import com.twitter.finagle.zookeeper.ZookeeperServerSetCluster import com.twitter.logging.Logger import com.twitter.ostrich.admin.RuntimeEnvironment import com.twitter.zipkin.thriftscala import com.twitter.zipkin.query.adjusters.{NullAdjuster, TimeSkewAdjuster, Adjuster} import com.twitter.zipkin.query.ZipkinQuery import com.twitter.zipkin.storage.Store import java.net.InetSocketAddress case class QueryServiceBuilder( storeBuilder: Builder[Store], serverSetPaths: List[(ZooKeeperClientBuilder, String)] = List.empty, serverBuilder: ZipkinServerBuilder = ZipkinServerBuilder(9411, 9901) ) extends Builder[RuntimeEnvironment => ZipkinQuery] { private val adjusterMap: Map[thriftscala.Adjust, Adjuster] = Map ( thriftscala.Adjust.Nothing -> NullAdjuster, thriftscala.Adjust.TimeSkew -> new TimeSkewAdjuster() ) def addServerSetPath(p: (ZooKeeperClientBuilder, String)) = copy(serverSetPaths = serverSetPaths :+ p) def apply(): (RuntimeEnvironment) => ZipkinQuery = (runtime: RuntimeEnvironment) => { val log = Logger.get() serverBuilder.apply().apply(runtime) val address = new InetSocketAddress(serverBuilder.serverAddress, serverBuilder.serverPort) val store = storeBuilder.apply() /* Register server sets */ serverSetPaths foreach { case (zkClientBuilder, path) => log.info("Registering serverset: %s".format(path)) val zkClient = zkClientBuilder.apply() val serverSet = new ServerSetImpl(zkClient, path) val cluster = new ZookeeperServerSetCluster(serverSet) cluster.join(address) } new ZipkinQuery(address, store.storage, store.index, store.aggregates, adjusterMap, serverBuilder.statsReceiver, serverBuilder.tracer) } }
cogitate/twitter-zipkin-uuid
zipkin-query-core/src/main/scala/com/twitter/zipkin/builder/QueryServiceBuilder.scala
Scala
apache-2.0
2,403
import scala.of.coq.lang._ import Nat._ import Pairs._ import MoreLists._ import scala.concurrent.Future import MoreFutures._ object FutureMergeSort { def split[A](l: List[A]): (List[A], List[A]) = l match { case Nil => (Nil, Nil) case x :: Nil => (x :: Nil, Nil) case x :: y :: xs => { val (l1, l2) = split(xs) (x :: l1, y :: l2) } } def merge(z: (List[Nat], List[Nat])): List[Nat] = { val (l1, l2) = z l1 match { case Nil => l2 case x1 :: l1_ => l2 match { case Nil => l1 case x2 :: l2_ => if (x1 <= x2) x1 :: merge((l1_, l2)) else x2 :: merge((l1, l2_)) } } } def msort(l: List[Nat])(n: Nat): Future[List[Nat]] = (l, n) match { case (Nil, _) => future(Nil) case (x :: Nil, _) => future(x :: Nil) case (x :: y :: _, S(n1)) => { val (l1, l2) = split(l) fut_flat_map((r1: List[Nat]) => fut_map((r2: List[Nat]) => merge((r1, r2)))(msort(l2)(n1)))(msort(l1)(n1)) } case (_ :: _ :: _, Zero) => future(Nil) } def mergeSort(l: List[Nat]): Future[List[Nat]] = msort(l)(length(l)) }
JBakouny/Scallina
packaged-examples/snapshot/future-merge-sort/scallina/FutureMergeSort.scala
Scala
gpl-3.0
1,143
import org.cdent.World._ World().greet()
ingydotnet/cdent-py
dev-tests/hello-world/hello_world.scala
Scala
bsd-2-clause
42
package io.github.edadma.numbers object Platform { }
edadma/numbers
native/src/main/scala/io/github/edadma/numbers/Platform.scala
Scala
mit
56
package hotpepper4s.raw import hotpepper4s.{CodeName, Results} /** * Food Category * * @author ponkotuy * date: 2013/12/30 */ case class FoodCategoryResults( private val api_version: String, private val results_available: Int, private val results_returned: String, private val results_start: Int, private val food_category: List[FoodCategory]) extends Results[FoodCategory]{ def apiVersion: String = api_version def resultsAvailable: Int = results_available def resultsReturned: String = results_returned def resultsStart: Int = results_start def data: List[FoodCategory] = food_category def foodCategory = food_category } case class FoodCategory(code: String, name: String) extends CodeName
ponkotuy/hotpepper4s
src/main/scala/hotpepper4s/raw/FoodCategoryResults.scala
Scala
mit
732
/* * Copyright 2013-2015 Websudos, Limited. * * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * - Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * - Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * - Explicit consent must be obtained from the copyright owner, Websudos Limited before any redistribution is made. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ import com.twitter.sbt._ import com.twitter.scrooge.ScroogeSBT import sbt.Keys._ import sbt._ object Build extends Build { val UtilVersion = "0.9.11" val DatastaxDriverVersion = "2.2.0-rc3" val ScalaTestVersion = "2.2.4" val ShapelessVersion = "2.2.4" val FinagleVersion = "6.25.0" val TwitterUtilVersion = "6.24.0" val ScroogeVersion = "3.17.0" val ScalatraVersion = "2.3.0" val PlayVersion = "2.4.0-M1" val Json4SVersion = "3.2.11" val ScalaMeterVersion = "0.6" val SparkCassandraVersion = "1.2.0-alpha3" val ThriftVersion = "0.5.0" val DieselEngineVersion = "0.2.2" val mavenPublishSettings : Seq[Def.Setting[_]] = Seq( credentials += Credentials(Path.userHome / ".ivy2" / ".credentials"), publishMavenStyle := true, publishTo <<= version.apply { v => val nexus = "https://oss.sonatype.org/" if (v.trim.endsWith("SNAPSHOT")) Some("snapshots" at nexus + "content/repositories/snapshots") else Some("releases" at nexus + "service/local/staging/deploy/maven2") }, licenses += ("Apache-2.0", url("https://www.apache.org/licenses/LICENSE-2.0")), publishArtifact in Test := false, pomIncludeRepository := { _ => true }, pomExtra := <url>https://github.com/websudos/phantom</url> <scm> <url>[email protected]:websudos/phantom.git</url> <connection>scm:git:[email protected]:websudos/phantom.git</connection> </scm> <developers> <developer> <id>alexflav</id> <name>Flavian Alexandru</name> <url>http://github.com/alexflav23</url> </developer> </developers> ) def liftVersion(scalaVersion: String): String = { scalaVersion match { case "2.10.5" => "3.0-M1" case _ => "3.0-M2" } } val PerformanceTest = config("perf").extend(Test) def performanceFilter(name: String): Boolean = name endsWith "PerformanceTest" val publishSettings: Seq[Def.Setting[_]] = Seq( publishMavenStyle := true, bintray.BintrayKeys.bintrayOrganization := Some("websudos"), bintray.BintrayKeys.bintrayRepository := "oss-releases", bintray.BintrayKeys.bintrayReleaseOnPublish in ThisBuild := true, publishArtifact in Test := false, pomIncludeRepository := { _ => true}, licenses += ("Apache-2.0", url("https://www.apache.org/licenses/LICENSE-2.0")) ) val sharedSettings: Seq[Def.Setting[_]] = Defaults.coreDefaultSettings ++ Seq( organization := "com.websudos", version := "1.12.2", scalaVersion := "2.11.7", crossScalaVersions := Seq("2.10.5", "2.11.7"), resolvers ++= Seq( "Typesafe repository snapshots" at "http://repo.typesafe.com/typesafe/snapshots/", "Typesafe repository releases" at "http://repo.typesafe.com/typesafe/releases/", "Sonatype repo" at "https://oss.sonatype.org/content/groups/scala-tools/", "Sonatype releases" at "https://oss.sonatype.org/content/repositories/releases", "Sonatype snapshots" at "https://oss.sonatype.org/content/repositories/snapshots", "Sonatype staging" at "http://oss.sonatype.org/content/repositories/staging", "Java.net Maven2 Repository" at "http://download.java.net/maven/2/", "Twitter Repository" at "http://maven.twttr.com", Resolver.bintrayRepo("websudos", "oss-releases") ), scalacOptions ++= Seq( "-language:postfixOps", "-language:implicitConversions", "-language:reflectiveCalls", "-language:higherKinds", "-language:existentials", "-Yinline-warnings", "-Xlint", "-deprecation", "-feature", "-unchecked" ), fork in Test := false, javaOptions in Test ++= Seq("-Xmx2G"), testFrameworks in PerformanceTest := Seq(new TestFramework("org.scalameter.ScalaMeterFramework")), testOptions in Test := Seq(Tests.Filter(x => !performanceFilter(x))), testOptions in PerformanceTest := Seq(Tests.Filter(x => performanceFilter(x))), fork in PerformanceTest := false ) ++ net.virtualvoid.sbt.graph.Plugin.graphSettings ++ publishSettings ++ VersionManagement.newSettings lazy val phantom = Project( id = "phantom", base = file("."), settings = sharedSettings ).configs( PerformanceTest ).settings( inConfig(PerformanceTest)(Defaults.testTasks): _* ).settings( name := "phantom" ).aggregate( phantomDsl, phantomExample, phantomConnectors, // phantomScalatraTest, phantomTestKit, phantomThrift, phantomUdt, phantomZookeeper ) lazy val phantomDsl = Project( id = "phantom-dsl", base = file("phantom-dsl"), settings = Defaults.coreDefaultSettings ++ sharedSettings ++ publishSettings ).configs( PerformanceTest ).settings( inConfig(PerformanceTest)(Defaults.testTasks): _* ).settings( name := "phantom-dsl", testOptions in Test += Tests.Argument("-oF"), logBuffered in Test := false, concurrentRestrictions in Test := Seq( Tags.limit(Tags.ForkedTestGroup, 4) ), libraryDependencies ++= Seq( "org.scala-lang" % "scala-reflect" % scalaVersion.value, "com.websudos" %% "diesel-engine" % DieselEngineVersion, "com.chuusai" %% "shapeless" % ShapelessVersion, "com.twitter" %% "util-core" % TwitterUtilVersion, "com.typesafe.play" %% "play-iteratees" % "2.4.0-M1", "joda-time" % "joda-time" % "2.3", "org.joda" % "joda-convert" % "1.6", "com.datastax.cassandra" % "cassandra-driver-core" % DatastaxDriverVersion, "org.slf4j" % "slf4j-log4j12" % "1.7.12" % "test, provided", "org.scalacheck" %% "scalacheck" % "1.11.5" % "test, provided", "com.websudos" %% "util-testing" % UtilVersion % "test, provided", "net.liftweb" %% "lift-json" % liftVersion(scalaVersion.value) % "test, provided", "com.storm-enroute" %% "scalameter" % ScalaMeterVersion % "test, provided" ) ).dependsOn( phantomTestKit % "test, provided", phantomConnectors ) lazy val phantomConnectors = Project( id = "phantom-connectors", base = file("phantom-connectors"), settings = sharedSettings ).configs(PerformanceTest).settings( name := "phantom-connectors", libraryDependencies ++= Seq( "com.datastax.cassandra" % "cassandra-driver-core" % DatastaxDriverVersion, "com.websudos" %% "util-testing" % UtilVersion % "test, provided" ) ) lazy val phantomUdt = Project( id = "phantom-udt", base = file("phantom-udt"), settings = sharedSettings ).settings( name := "phantom-udt", scalacOptions ++= Seq( "-language:experimental.macros" ), libraryDependencies ++= Seq( "com.websudos" %% "util-testing" % UtilVersion % "test, provided" ) ).dependsOn( phantomDsl, phantomZookeeper, phantomTestKit % "test, provided" ) lazy val phantomThrift = Project( id = "phantom-thrift", base = file("phantom-thrift"), settings = Defaults.coreDefaultSettings ++ sharedSettings ++ publishSettings ++ ScroogeSBT.newSettings ).settings( name := "phantom-thrift", libraryDependencies ++= Seq( "org.apache.thrift" % "libthrift" % ThriftVersion, "com.twitter" %% "scrooge-core" % ScroogeVersion, "com.twitter" %% "scrooge-serializer" % ScroogeVersion, "com.websudos" %% "util-testing" % UtilVersion % "test, provided" ) ).dependsOn( phantomDsl, phantomTestKit % "test, provided" ) lazy val phantomZookeeper = Project( id = "phantom-zookeeper", base = file("phantom-zookeeper"), settings = sharedSettings ).settings( name := "phantom-zookeeper", libraryDependencies ++= Seq( "org.xerial.snappy" % "snappy-java" % "1.1.1.3", "com.websudos" %% "util-testing" % UtilVersion % "test, provided", "com.websudos" %% "util-zookeeper" % UtilVersion % "test, provided" excludeAll ExclusionRule("org.slf4j", "slf4j-jdk14") ) ).dependsOn( phantomConnectors ) lazy val phantomTestKit = Project( id = "phantom-testkit", base = file("phantom-testkit"), settings = sharedSettings ).settings( name := "phantom-testkit", libraryDependencies ++= Seq( "com.twitter" %% "util-core" % TwitterUtilVersion, "com.websudos" %% "util-testing" % UtilVersion ) ).dependsOn( phantomConnectors ) lazy val phantomExample = Project( id = "phantom-example", base = file("phantom-example"), settings = sharedSettings ++ ScroogeSBT.newSettings ).settings( name := "phantom-example" ).dependsOn( phantomDsl, phantomThrift, phantomZookeeper, phantomTestKit ) lazy val phantomScalatraTest = Project( id = "phantom-scalatra-test", base = file("phantom-scalatra-test"), settings = sharedSettings ).settings( name := "phantom-test", fork := false, logBuffered in Test := false, testOptions in Test := Seq(Tests.Filter(s => s.indexOf("IterateeBig") == -1)), concurrentRestrictions in Test := Seq( Tags.limit(Tags.ForkedTestGroup, 4) ) ).settings( libraryDependencies ++= Seq( "org.scalatra" %% "scalatra" % ScalatraVersion, "org.scalatra" %% "scalatra-scalate" % ScalatraVersion, "org.scalatra" %% "scalatra-json" % ScalatraVersion, "org.scalatra" %% "scalatra-specs2" % ScalatraVersion % "test", "org.json4s" %% "json4s-jackson" % Json4SVersion, "org.json4s" %% "json4s-ext" % Json4SVersion, "net.databinder.dispatch" %% "dispatch-core" % "0.11.0" % "test", "net.databinder.dispatch" %% "dispatch-json4s-jackson" % "0.11.0" % "test", "org.eclipse.jetty" % "jetty-webapp" % "8.1.8.v20121106", "org.eclipse.jetty.orbit" % "javax.servlet" % "3.0.0.v201112011016" % "provided;test" artifacts Artifact("javax.servlet", "jar", "jar"), "com.websudos" %% "util-testing" % UtilVersion % "provided" ) ).dependsOn( phantomDsl, phantomThrift, phantomZookeeper, phantomTestKit ) }
analytically/phantom
project/Build.scala
Scala
bsd-2-clause
13,033
package io.surfkit.client import java.util.UUID import io.surfkit.clientlib.webrtc.Peer.PeerInfo import org.scalajs.dom.raw.MouseEvent import scala.scalajs.js import org.scalajs.dom import org.scalajs.dom.{ErrorEvent, CloseEvent, MessageEvent, Event} import org.scalajs.dom.raw.DOMError import scala.scalajs.js import io.surfkit.clientlib.webrtc._ import org.scalajs.dom.experimental.webrtc._ import org.scalajs.dom.experimental.mediastream._ import scala.scalajs.js.annotation.JSExport import scala.scalajs.js.| import scala.concurrent.ExecutionContext.Implicits.global class WebSocketSignaler extends Peer.ModelTransformPeerSignaler[m.RTCSignal]{ val id = (Math.random() * 1000).toInt.toString val `type` = "video" var localPeer = PeerInfo(id, `type`) var ws = new dom.WebSocket(s"ws://${dom.document.location.hostname}:${dom.document.location.port}/ws/${id}") ws.onmessage = { x: MessageEvent => //println(s"WS onmessage ${x.data.toString}") val msg = upickle.default.read[m.Model](x.data.toString) receive(toPeerSignaling(msg.asInstanceOf[m.RTCSignal])) } ws.onopen = { x: Event => println("WS connection connected") } ws.onerror = { x: ErrorEvent => println("some error has occured " + x.message) } ws.onclose = { x: CloseEvent => println("WS connection CLOSED !!") } implicit def modelToPeer(p:m.Signaling.PeerInfo):Peer.PeerInfo = Peer.PeerInfo(p.id, p.`type`) implicit def peerToModel(p:Peer.PeerInfo):m.Signaling.PeerInfo = m.Signaling.PeerInfo(p.id, p.`type`) override def toPeerSignaling(model:m.RTCSignal):Peer.Signaling = model match{ case m.Signaling.Join(r, l,name) => Peer.Join(r, l, name) case m.Signaling.Room(r, l, name, members) => import js.JSConverters._ val peers = members.map(p => Peer.PeerInfo(id = p.id, `type` = p.`type`)) Peer.Room(r, l, name, peers.toJSArray) case m.Signaling.Offer(r, l, offer, room) => //println(s"toPeerSignaling offer ${offer}") Peer.Offer(r, l, new RTCSessionDescription(RTCSessionDescriptionInit(offer.`type`.asInstanceOf[RTCSdpType], offer.sdp)), room) case m.Signaling.Candidate(r, l, c) => Peer.Candidate(r, l, new RTCIceCandidate(RTCIceCandidateInit(c.candidate, c.sdpMid, c.sdpMLineIndex.toDouble))) case m.Signaling.Answer(r, l, answer) => Peer.Answer(r, l, new RTCSessionDescription(RTCSessionDescriptionInit(answer.`type`.asInstanceOf[RTCSdpType], answer.sdp))) case m.Signaling.Error(r, l, error) => Peer.Error(r, l, error) case _ => Peer.Error(Peer.PeerInfo("", ""), Peer.PeerInfo("", ""), "Unknown signaling type") } override def fromPeerSignaling(s:Peer.Signaling):m.RTCSignal = s match{ case Peer.Join(r, l, name) => m.Signaling.Join(r, l, name) case Peer.Room(r, l, name, members) => val peers = members.map(p => m.Signaling.PeerInfo(id = p.id, `type` = p.`type`)) m.Signaling.Room(r, l, name, peers.toSet) case Peer.Answer(r, l, answer) => m.Signaling.Answer(r, l, m.Signaling.RTCSessionDescription(answer.`type`.toString, answer.sdp)) case Peer.Offer(r, l, offer, room) => m.Signaling.Offer(r, l, m.Signaling.RTCSessionDescription(offer.`type`.toString, offer.sdp), room) case Peer.Candidate(r, l, c) => m.Signaling.Candidate(r, l, m.Signaling.RTCIceCandidate(c.candidate, c.sdpMLineIndex.toInt, c.sdpMid)) case Peer.Error(r, l, error) => m.Signaling.Error(r, l, error) case _ => m.Signaling.Error(m.Signaling.PeerInfo("", ""), m.Signaling.PeerInfo("", ""), "Unknown signaling type") } override def sendModel(s:m.RTCSignal) = ws.send(upickle.default.write(s)) } object WebRTCMain extends js.JSApp { def main(): Unit = { val signaler = new WebSocketSignaler val local = dom.document.getElementById("local").asInstanceOf[dom.html.Video] val txtPeerId = dom.document.getElementById("peerId") txtPeerId.innerHTML = signaler.id val iceServers: String | js.Array[String] = "turn:turn.conversant.im:443" val rtcConfiguration = RTCConfiguration( iceServers = js.Array[RTCIceServer]( //RTCIceServer(urls = "stun:stun.l.google.com:19302"), //RTCIceServer(urls = "turn:turn.conversant.im:443", username="turnuser", credential = "turnpass") RTCIceServer(urls = iceServers) ) ) val webRTC = new SimpleWebRTC[m.RTCSignal,WebSocketSignaler](signaler, rtcConfiguration) webRTC.peerStreamAdded = { peer => println("TODO: add the remote video to the page") val remoteVideoElm = dom.document.createElement("video").asInstanceOf[dom.html.Video] peer.streams.headOption.foreach{ s => println(s"peerStreamAdded ADDING STREAM ${s}") val remoteDyn = (remoteVideoElm.asInstanceOf[js.Dynamic]) remoteDyn.srcObject = s remoteDyn.play() } dom.document.getElementById("playground").appendChild(remoteVideoElm) } val constraintTrue: Boolean | MediaTrackConstraints = true val bCall = dom.document.getElementById("bCall").asInstanceOf[dom.html.Button] bCall.onclick = { me: MouseEvent => webRTC.startLocalVideo(MediaStreamConstraints(constraintTrue, constraintTrue),local).foreach { s => webRTC.joinRoom("test").foreach { room: Peer.Room => println(s"You have joined the room... ${room.name}") } } } } @JSExport def advanced(): Unit = { println("ADVANCED...") val signaler = new WebSocketSignaler val local = dom.document.getElementById("local").asInstanceOf[dom.html.Video] val txtPeerId = dom.document.getElementById("peerId") txtPeerId.innerHTML = signaler.id val iceServers: String | js.Array[String] = "turn:turn.conversant.im:443" val rtcConfiguration = RTCConfiguration( iceServers = js.Array[RTCIceServer]( //RTCIceServer(urls = "stun:stun.l.google.com:19302"), //RTCIceServer(urls = "turn:turn.conversant.im:443", username="turnuser", credential = "turnpass") RTCIceServer(urls = iceServers) ) ) val webRTC = new SMWebRTC[m.RTCSignal,WebSocketSignaler](signaler, rtcConfiguration) webRTC.peerStreamAdded = { peer => println("TODO: add the remote video to the page") val remoteVideoElm = dom.document.createElement("video").asInstanceOf[dom.html.Video] peer.streams.headOption.foreach{ s => println(s"peerStreamAdded ADDING STREAM ${s}") val remoteDyn = (remoteVideoElm.asInstanceOf[js.Dynamic]) remoteDyn.srcObject = s remoteDyn.play() } dom.document.getElementById("playground").appendChild(remoteVideoElm) } val constraintTrue: Boolean | MediaTrackConstraints = true val bJoin = dom.document.getElementById("bJoin").asInstanceOf[dom.html.Button] bJoin.onclick = { me: MouseEvent => val txtRoom = dom.document.getElementById("room").asInstanceOf[dom.html.Input] webRTC.joinRoom(txtRoom.value) } val bCall = dom.document.getElementById("bCall").asInstanceOf[dom.html.Button] bCall.onclick = { me: MouseEvent => val txtRoom = dom.document.getElementById("room").asInstanceOf[dom.html.Input] webRTC.call(txtRoom.value) } webRTC.onJoinRoom = { (name, peers) => println(s"Joined Room ${name}") println("members ============") peers.foreach(println) println("====================") } webRTC.onRing = { (name, peer) => println("Ring Ring !!!") println(s"Call coming from room ${name}") println("Auto Answer") webRTC.call(name) } } }
coreyauger/scala-webrtc-example
client-webrtc/src/main/scala/io/surfkit/client/WebRTCMain.scala
Scala
mit
7,632
object ErrorFile { val value = "someVal" def main(args: Array[String]): Unit = { println(value } }
bazelbuild/rules_scala
test_version/test_reporter/ErrorFile.scala
Scala
apache-2.0
110
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.catalyst.expressions import java.text.ParseException import java.time.{DateTimeException, LocalDate, LocalDateTime, ZoneId, ZoneOffset} import java.time.format.DateTimeParseException import java.util.Locale import org.apache.commons.text.StringEscapeUtils import org.apache.spark.SparkDateTimeException import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.catalyst.analysis.{ExpressionBuilder, FunctionRegistry} import org.apache.spark.sql.catalyst.expressions.codegen._ import org.apache.spark.sql.catalyst.expressions.codegen.Block._ import org.apache.spark.sql.catalyst.trees.TreePattern._ import org.apache.spark.sql.catalyst.util.{DateTimeUtils, LegacyDateFormats, TimestampFormatter} import org.apache.spark.sql.catalyst.util.DateTimeConstants._ import org.apache.spark.sql.catalyst.util.DateTimeUtils._ import org.apache.spark.sql.catalyst.util.LegacyDateFormats.SIMPLE_DATE_FORMAT import org.apache.spark.sql.errors.{QueryCompilationErrors, QueryExecutionErrors} import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.types._ import org.apache.spark.sql.types.DayTimeIntervalType.DAY import org.apache.spark.unsafe.types.{CalendarInterval, UTF8String} /** * Common base class for time zone aware expressions. */ trait TimeZoneAwareExpression extends Expression { /** The expression is only resolved when the time zone has been set. */ override lazy val resolved: Boolean = childrenResolved && checkInputDataTypes().isSuccess && timeZoneId.isDefined final override val nodePatterns: Seq[TreePattern] = Seq(TIME_ZONE_AWARE_EXPRESSION) ++ nodePatternsInternal // Subclasses can override this function to provide more TreePatterns. def nodePatternsInternal(): Seq[TreePattern] = Seq() /** the timezone ID to be used to evaluate value. */ def timeZoneId: Option[String] /** Returns a copy of this expression with the specified timeZoneId. */ def withTimeZone(timeZoneId: String): TimeZoneAwareExpression @transient lazy val zoneId: ZoneId = DateTimeUtils.getZoneId(timeZoneId.get) def zoneIdForType(dataType: DataType): ZoneId = dataType match { case _: TimestampNTZType => java.time.ZoneOffset.UTC case _ => zoneId } } trait TimestampFormatterHelper extends TimeZoneAwareExpression { protected def formatString: Expression protected def isParsing: Boolean // Whether the timestamp formatter is for TimestampNTZType. // If yes, the formatter is always `Iso8601TimestampFormatter`. protected def forTimestampNTZ: Boolean = false @transient final protected lazy val formatterOption: Option[TimestampFormatter] = if (formatString.foldable) { Option(formatString.eval()).map(fmt => getFormatter(fmt.toString)) } else None final protected def getFormatter(fmt: String): TimestampFormatter = { TimestampFormatter( format = fmt, zoneId = zoneId, legacyFormat = SIMPLE_DATE_FORMAT, isParsing = isParsing, forTimestampNTZ = forTimestampNTZ) } } @ExpressionDescription( usage = "_FUNC_() - Returns the current session local timezone.", examples = """ Examples: > SELECT _FUNC_(); Asia/Shanghai """, group = "datetime_funcs", since = "3.1.0") case class CurrentTimeZone() extends LeafExpression with Unevaluable { override def nullable: Boolean = false override def dataType: DataType = StringType override def prettyName: String = "current_timezone" final override val nodePatterns: Seq[TreePattern] = Seq(CURRENT_LIKE) } /** * Returns the current date at the start of query evaluation. * There is no code generation since this expression should get constant folded by the optimizer. */ // scalastyle:off line.size.limit @ExpressionDescription( usage = """ _FUNC_() - Returns the current date at the start of query evaluation. All calls of current_date within the same query return the same value. _FUNC_ - Returns the current date at the start of query evaluation. """, examples = """ Examples: > SELECT _FUNC_(); 2020-04-25 > SELECT _FUNC_; 2020-04-25 """, note = """ The syntax without braces has been supported since 2.0.1. """, group = "datetime_funcs", since = "1.5.0") // scalastyle:on line.size.limit case class CurrentDate(timeZoneId: Option[String] = None) extends LeafExpression with TimeZoneAwareExpression with CodegenFallback { def this() = this(None) override def foldable: Boolean = true override def nullable: Boolean = false override def dataType: DataType = DateType final override def nodePatternsInternal(): Seq[TreePattern] = Seq(CURRENT_LIKE) override def withTimeZone(timeZoneId: String): TimeZoneAwareExpression = copy(timeZoneId = Option(timeZoneId)) override def eval(input: InternalRow): Any = currentDate(zoneId) override def prettyName: String = "current_date" } abstract class CurrentTimestampLike() extends LeafExpression with CodegenFallback { override def foldable: Boolean = true override def nullable: Boolean = false override def dataType: DataType = TimestampType override def eval(input: InternalRow): Any = currentTimestamp() final override val nodePatterns: Seq[TreePattern] = Seq(CURRENT_LIKE) } /** * Returns the current timestamp at the start of query evaluation. * There is no code generation since this expression should get constant folded by the optimizer. */ // scalastyle:off line.size.limit @ExpressionDescription( usage = """ _FUNC_() - Returns the current timestamp at the start of query evaluation. All calls of current_timestamp within the same query return the same value. _FUNC_ - Returns the current timestamp at the start of query evaluation. """, examples = """ Examples: > SELECT _FUNC_(); 2020-04-25 15:49:11.914 > SELECT _FUNC_; 2020-04-25 15:49:11.914 """, note = """ The syntax without braces has been supported since 2.0.1. """, group = "datetime_funcs", since = "1.5.0") // scalastyle:on line.size.limit case class CurrentTimestamp() extends CurrentTimestampLike { override def prettyName: String = "current_timestamp" } @ExpressionDescription( usage = "_FUNC_() - Returns the current timestamp at the start of query evaluation.", examples = """ Examples: > SELECT _FUNC_(); 2020-04-25 15:49:11.914 """, group = "datetime_funcs", since = "1.6.0") case class Now() extends CurrentTimestampLike { override def prettyName: String = "now" } /** * Returns the current timestamp without time zone at the start of query evaluation. * There is no code generation since this expression should get constant folded by the optimizer. */ // scalastyle:off line.size.limit @ExpressionDescription( usage = """ _FUNC_() - Returns the current timestamp without time zone at the start of query evaluation. All calls of localtimestamp within the same query return the same value. _FUNC_ - Returns the current local date-time at the session time zone at the start of query evaluation. """, examples = """ Examples: > SELECT _FUNC_(); 2020-04-25 15:49:11.914 """, group = "datetime_funcs", since = "3.3.0") case class LocalTimestamp(timeZoneId: Option[String] = None) extends LeafExpression with TimeZoneAwareExpression with CodegenFallback { def this() = this(None) override def foldable: Boolean = true override def nullable: Boolean = false override def dataType: DataType = TimestampNTZType final override def nodePatternsInternal(): Seq[TreePattern] = Seq(CURRENT_LIKE) override def withTimeZone(timeZoneId: String): TimeZoneAwareExpression = copy(timeZoneId = Option(timeZoneId)) override def eval(input: InternalRow): Any = localDateTimeToMicros(LocalDateTime.now(zoneId)) override def prettyName: String = "localtimestamp" } /** * Expression representing the current batch time, which is used by StreamExecution to * 1. prevent optimizer from pushing this expression below a stateful operator * 2. allow IncrementalExecution to substitute this expression with a Literal(timestamp) * * There is no code generation since this expression should be replaced with a literal. */ case class CurrentBatchTimestamp( timestampMs: Long, dataType: DataType, timeZoneId: Option[String] = None) extends LeafExpression with TimeZoneAwareExpression with Nondeterministic with CodegenFallback { def this(timestampMs: Long, dataType: DataType) = this(timestampMs, dataType, None) override def nullable: Boolean = false override def withTimeZone(timeZoneId: String): TimeZoneAwareExpression = copy(timeZoneId = Option(timeZoneId)) final override def nodePatternsInternal(): Seq[TreePattern] = Seq(CURRENT_LIKE) override def prettyName: String = "current_batch_timestamp" override protected def initializeInternal(partitionIndex: Int): Unit = {} /** * Need to return literal value in order to support compile time expression evaluation * e.g., select(current_date()) */ override protected def evalInternal(input: InternalRow): Any = toLiteral.value def toLiteral: Literal = { val timestampUs = millisToMicros(timestampMs) dataType match { case _: TimestampType => Literal(timestampUs, TimestampType) case _: TimestampNTZType => Literal(convertTz(timestampUs, ZoneOffset.UTC, zoneId), TimestampNTZType) case _: DateType => Literal(microsToDays(timestampUs, zoneId), DateType) } } } /** * Adds a number of days to startdate. */ @ExpressionDescription( usage = "_FUNC_(start_date, num_days) - Returns the date that is `num_days` after `start_date`.", examples = """ Examples: > SELECT _FUNC_('2016-07-30', 1); 2016-07-31 """, group = "datetime_funcs", since = "1.5.0") case class DateAdd(startDate: Expression, days: Expression) extends BinaryExpression with ExpectsInputTypes with NullIntolerant { override def left: Expression = startDate override def right: Expression = days override def inputTypes: Seq[AbstractDataType] = Seq(DateType, TypeCollection(IntegerType, ShortType, ByteType)) override def dataType: DataType = DateType override def nullSafeEval(start: Any, d: Any): Any = { start.asInstanceOf[Int] + d.asInstanceOf[Number].intValue() } override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { nullSafeCodeGen(ctx, ev, (sd, d) => { s"""${ev.value} = $sd + $d;""" }) } override def prettyName: String = "date_add" override protected def withNewChildrenInternal( newLeft: Expression, newRight: Expression): DateAdd = copy(startDate = newLeft, days = newRight) } /** * Subtracts a number of days to startdate. */ @ExpressionDescription( usage = "_FUNC_(start_date, num_days) - Returns the date that is `num_days` before `start_date`.", examples = """ Examples: > SELECT _FUNC_('2016-07-30', 1); 2016-07-29 """, group = "datetime_funcs", since = "1.5.0") case class DateSub(startDate: Expression, days: Expression) extends BinaryExpression with ExpectsInputTypes with NullIntolerant { override def left: Expression = startDate override def right: Expression = days override def inputTypes: Seq[AbstractDataType] = Seq(DateType, TypeCollection(IntegerType, ShortType, ByteType)) override def dataType: DataType = DateType override def nullSafeEval(start: Any, d: Any): Any = { start.asInstanceOf[Int] - d.asInstanceOf[Number].intValue() } override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { nullSafeCodeGen(ctx, ev, (sd, d) => { s"""${ev.value} = $sd - $d;""" }) } override def prettyName: String = "date_sub" override protected def withNewChildrenInternal( newLeft: Expression, newRight: Expression): DateSub = copy(startDate = newLeft, days = newRight) } trait GetTimeField extends UnaryExpression with TimeZoneAwareExpression with ImplicitCastInputTypes with NullIntolerant { val func: (Long, ZoneId) => Any val funcName: String @transient protected lazy val zoneIdInEval: ZoneId = zoneIdForType(child.dataType) override def inputTypes: Seq[AbstractDataType] = Seq(AnyTimestampType) override def dataType: DataType = IntegerType override protected def nullSafeEval(timestamp: Any): Any = { func(timestamp.asInstanceOf[Long], zoneIdInEval) } override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { val zid = ctx.addReferenceObj("zoneId", zoneIdInEval, classOf[ZoneId].getName) val dtu = DateTimeUtils.getClass.getName.stripSuffix("$") defineCodeGen(ctx, ev, c => s"$dtu.$funcName($c, $zid)") } } @ExpressionDescription( usage = "_FUNC_(timestamp) - Returns the hour component of the string/timestamp.", examples = """ Examples: > SELECT _FUNC_('2009-07-30 12:58:59'); 12 """, group = "datetime_funcs", since = "1.5.0") case class Hour(child: Expression, timeZoneId: Option[String] = None) extends GetTimeField { def this(child: Expression) = this(child, None) override def withTimeZone(timeZoneId: String): Hour = copy(timeZoneId = Option(timeZoneId)) override val func = DateTimeUtils.getHours override val funcName = "getHours" override protected def withNewChildInternal(newChild: Expression): Hour = copy(child = newChild) } @ExpressionDescription( usage = "_FUNC_(timestamp) - Returns the minute component of the string/timestamp.", examples = """ Examples: > SELECT _FUNC_('2009-07-30 12:58:59'); 58 """, group = "datetime_funcs", since = "1.5.0") case class Minute(child: Expression, timeZoneId: Option[String] = None) extends GetTimeField { def this(child: Expression) = this(child, None) override def withTimeZone(timeZoneId: String): Minute = copy(timeZoneId = Option(timeZoneId)) override val func = DateTimeUtils.getMinutes override val funcName = "getMinutes" override protected def withNewChildInternal(newChild: Expression): Minute = copy(child = newChild) } @ExpressionDescription( usage = "_FUNC_(timestamp) - Returns the second component of the string/timestamp.", examples = """ Examples: > SELECT _FUNC_('2009-07-30 12:58:59'); 59 """, group = "datetime_funcs", since = "1.5.0") case class Second(child: Expression, timeZoneId: Option[String] = None) extends GetTimeField { def this(child: Expression) = this(child, None) override def withTimeZone(timeZoneId: String): Second = copy(timeZoneId = Option(timeZoneId)) override val func = DateTimeUtils.getSeconds override val funcName = "getSeconds" override protected def withNewChildInternal(newChild: Expression): Second = copy(child = newChild) } case class SecondWithFraction(child: Expression, timeZoneId: Option[String] = None) extends GetTimeField { def this(child: Expression) = this(child, None) // 2 digits for seconds, and 6 digits for the fractional part with microsecond precision. override def dataType: DataType = DecimalType(8, 6) override def withTimeZone(timeZoneId: String): SecondWithFraction = copy(timeZoneId = Option(timeZoneId)) override val func = DateTimeUtils.getSecondsWithFraction override val funcName = "getSecondsWithFraction" override protected def withNewChildInternal(newChild: Expression): SecondWithFraction = copy(child = newChild) } trait GetDateField extends UnaryExpression with ImplicitCastInputTypes with NullIntolerant { val func: Int => Any val funcName: String override def inputTypes: Seq[AbstractDataType] = Seq(DateType) override def dataType: DataType = IntegerType override protected def nullSafeEval(date: Any): Any = { func(date.asInstanceOf[Int]) } override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { val dtu = DateTimeUtils.getClass.getName.stripSuffix("$") defineCodeGen(ctx, ev, c => s"$dtu.$funcName($c)") } } @ExpressionDescription( usage = "_FUNC_(date) - Returns the day of year of the date/timestamp.", examples = """ Examples: > SELECT _FUNC_('2016-04-09'); 100 """, group = "datetime_funcs", since = "1.5.0") case class DayOfYear(child: Expression) extends GetDateField { override val func = DateTimeUtils.getDayInYear override val funcName = "getDayInYear" override protected def withNewChildInternal(newChild: Expression): DayOfYear = copy(child = newChild) } @ExpressionDescription( usage = "_FUNC_(days) - Create date from the number of days since 1970-01-01.", examples = """ Examples: > SELECT _FUNC_(1); 1970-01-02 """, group = "datetime_funcs", since = "3.1.0") case class DateFromUnixDate(child: Expression) extends UnaryExpression with ImplicitCastInputTypes with NullIntolerant { override def inputTypes: Seq[AbstractDataType] = Seq(IntegerType) override def dataType: DataType = DateType override def nullSafeEval(input: Any): Any = input.asInstanceOf[Int] override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = defineCodeGen(ctx, ev, c => c) override def prettyName: String = "date_from_unix_date" override protected def withNewChildInternal(newChild: Expression): DateFromUnixDate = copy(child = newChild) } @ExpressionDescription( usage = "_FUNC_(date) - Returns the number of days since 1970-01-01.", examples = """ Examples: > SELECT _FUNC_(DATE("1970-01-02")); 1 """, group = "datetime_funcs", since = "3.1.0") case class UnixDate(child: Expression) extends UnaryExpression with ExpectsInputTypes with NullIntolerant { override def inputTypes: Seq[AbstractDataType] = Seq(DateType) override def dataType: DataType = IntegerType override def nullSafeEval(input: Any): Any = input.asInstanceOf[Int] override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = defineCodeGen(ctx, ev, c => c) override def prettyName: String = "unix_date" override protected def withNewChildInternal(newChild: Expression): UnixDate = copy(child = newChild) } abstract class IntegralToTimestampBase extends UnaryExpression with ExpectsInputTypes with NullIntolerant { protected def upScaleFactor: Long override def inputTypes: Seq[AbstractDataType] = Seq(IntegralType) override def dataType: DataType = TimestampType override def nullSafeEval(input: Any): Any = { Math.multiplyExact(input.asInstanceOf[Number].longValue(), upScaleFactor) } override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { if (upScaleFactor == 1) { defineCodeGen(ctx, ev, c => c) } else { defineCodeGen(ctx, ev, c => s"java.lang.Math.multiplyExact($c, ${upScaleFactor}L)") } } } // scalastyle:off line.size.limit @ExpressionDescription( usage = "_FUNC_(seconds) - Creates timestamp from the number of seconds (can be fractional) since UTC epoch.", examples = """ Examples: > SELECT _FUNC_(1230219000); 2008-12-25 07:30:00 > SELECT _FUNC_(1230219000.123); 2008-12-25 07:30:00.123 """, group = "datetime_funcs", since = "3.1.0") // scalastyle:on line.size.limit case class SecondsToTimestamp(child: Expression) extends UnaryExpression with ExpectsInputTypes with NullIntolerant { override def inputTypes: Seq[AbstractDataType] = Seq(NumericType) override def dataType: DataType = TimestampType override def nullable: Boolean = child.dataType match { case _: FloatType | _: DoubleType => true case _ => child.nullable } @transient private lazy val evalFunc: Any => Any = child.dataType match { case _: IntegralType => input => Math.multiplyExact(input.asInstanceOf[Number].longValue(), MICROS_PER_SECOND) case _: DecimalType => input => val operand = new java.math.BigDecimal(MICROS_PER_SECOND) input.asInstanceOf[Decimal].toJavaBigDecimal.multiply(operand).longValueExact() case _: FloatType => input => val f = input.asInstanceOf[Float] if (f.isNaN || f.isInfinite) null else (f.toDouble * MICROS_PER_SECOND).toLong case _: DoubleType => input => val d = input.asInstanceOf[Double] if (d.isNaN || d.isInfinite) null else (d * MICROS_PER_SECOND).toLong } override def nullSafeEval(input: Any): Any = evalFunc(input) override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = child.dataType match { case _: IntegralType => defineCodeGen(ctx, ev, c => s"java.lang.Math.multiplyExact($c, ${MICROS_PER_SECOND}L)") case _: DecimalType => val operand = s"new java.math.BigDecimal($MICROS_PER_SECOND)" defineCodeGen(ctx, ev, c => s"$c.toJavaBigDecimal().multiply($operand).longValueExact()") case other => val castToDouble = if (other.isInstanceOf[FloatType]) "(double)" else "" nullSafeCodeGen(ctx, ev, c => { val typeStr = CodeGenerator.boxedType(other) s""" |if ($typeStr.isNaN($c) || $typeStr.isInfinite($c)) { | ${ev.isNull} = true; |} else { | ${ev.value} = (long)($castToDouble$c * $MICROS_PER_SECOND); |} |""".stripMargin }) } override def prettyName: String = "timestamp_seconds" override protected def withNewChildInternal(newChild: Expression): SecondsToTimestamp = copy(child = newChild) } // scalastyle:off line.size.limit @ExpressionDescription( usage = "_FUNC_(milliseconds) - Creates timestamp from the number of milliseconds since UTC epoch.", examples = """ Examples: > SELECT _FUNC_(1230219000123); 2008-12-25 07:30:00.123 """, group = "datetime_funcs", since = "3.1.0") // scalastyle:on line.size.limit case class MillisToTimestamp(child: Expression) extends IntegralToTimestampBase { override def upScaleFactor: Long = MICROS_PER_MILLIS override def prettyName: String = "timestamp_millis" override protected def withNewChildInternal(newChild: Expression): MillisToTimestamp = copy(child = newChild) } // scalastyle:off line.size.limit @ExpressionDescription( usage = "_FUNC_(microseconds) - Creates timestamp from the number of microseconds since UTC epoch.", examples = """ Examples: > SELECT _FUNC_(1230219000123123); 2008-12-25 07:30:00.123123 """, group = "datetime_funcs", since = "3.1.0") // scalastyle:on line.size.limit case class MicrosToTimestamp(child: Expression) extends IntegralToTimestampBase { override def upScaleFactor: Long = 1L override def prettyName: String = "timestamp_micros" override protected def withNewChildInternal(newChild: Expression): MicrosToTimestamp = copy(child = newChild) } abstract class TimestampToLongBase extends UnaryExpression with ExpectsInputTypes with NullIntolerant { protected def scaleFactor: Long override def inputTypes: Seq[AbstractDataType] = Seq(TimestampType) override def dataType: DataType = LongType override def nullSafeEval(input: Any): Any = { Math.floorDiv(input.asInstanceOf[Number].longValue(), scaleFactor) } override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { if (scaleFactor == 1) { defineCodeGen(ctx, ev, c => c) } else { defineCodeGen(ctx, ev, c => s"java.lang.Math.floorDiv($c, ${scaleFactor}L)") } } } // scalastyle:off line.size.limit @ExpressionDescription( usage = "_FUNC_(timestamp) - Returns the number of seconds since 1970-01-01 00:00:00 UTC. Truncates higher levels of precision.", examples = """ Examples: > SELECT _FUNC_(TIMESTAMP('1970-01-01 00:00:01Z')); 1 """, group = "datetime_funcs", since = "3.1.0") // scalastyle:on line.size.limit case class UnixSeconds(child: Expression) extends TimestampToLongBase { override def scaleFactor: Long = MICROS_PER_SECOND override def prettyName: String = "unix_seconds" override protected def withNewChildInternal(newChild: Expression): UnixSeconds = copy(child = newChild) } // Internal expression used to get the raw UTC timestamp in pandas API on Spark. // This is to work around casting timestamp_ntz to long disallowed by ANSI. case class CastTimestampNTZToLong(child: Expression) extends TimestampToLongBase { override def inputTypes: Seq[AbstractDataType] = Seq(TimestampNTZType) override def scaleFactor: Long = MICROS_PER_SECOND override def prettyName: String = "cast_timestamp_ntz_to_long" override protected def withNewChildInternal(newChild: Expression): CastTimestampNTZToLong = copy(child = newChild) } // scalastyle:off line.size.limit @ExpressionDescription( usage = "_FUNC_(timestamp) - Returns the number of milliseconds since 1970-01-01 00:00:00 UTC. Truncates higher levels of precision.", examples = """ Examples: > SELECT _FUNC_(TIMESTAMP('1970-01-01 00:00:01Z')); 1000 """, group = "datetime_funcs", since = "3.1.0") // scalastyle:on line.size.limit case class UnixMillis(child: Expression) extends TimestampToLongBase { override def scaleFactor: Long = MICROS_PER_MILLIS override def prettyName: String = "unix_millis" override protected def withNewChildInternal(newChild: Expression): UnixMillis = copy(child = newChild) } // scalastyle:off line.size.limit @ExpressionDescription( usage = "_FUNC_(timestamp) - Returns the number of microseconds since 1970-01-01 00:00:00 UTC.", examples = """ Examples: > SELECT _FUNC_(TIMESTAMP('1970-01-01 00:00:01Z')); 1000000 """, group = "datetime_funcs", since = "3.1.0") // scalastyle:on line.size.limit case class UnixMicros(child: Expression) extends TimestampToLongBase { override def scaleFactor: Long = 1L override def prettyName: String = "unix_micros" override protected def withNewChildInternal(newChild: Expression): UnixMicros = copy(child = newChild) } @ExpressionDescription( usage = "_FUNC_(date) - Returns the year component of the date/timestamp.", examples = """ Examples: > SELECT _FUNC_('2016-07-30'); 2016 """, group = "datetime_funcs", since = "1.5.0") case class Year(child: Expression) extends GetDateField { override val func = DateTimeUtils.getYear override val funcName = "getYear" override protected def withNewChildInternal(newChild: Expression): Year = copy(child = newChild) } case class YearOfWeek(child: Expression) extends GetDateField { override val func = DateTimeUtils.getWeekBasedYear override val funcName = "getWeekBasedYear" override protected def withNewChildInternal(newChild: Expression): YearOfWeek = copy(child = newChild) } @ExpressionDescription( usage = "_FUNC_(date) - Returns the quarter of the year for date, in the range 1 to 4.", examples = """ Examples: > SELECT _FUNC_('2016-08-31'); 3 """, group = "datetime_funcs", since = "1.5.0") case class Quarter(child: Expression) extends GetDateField { override val func = DateTimeUtils.getQuarter override val funcName = "getQuarter" override protected def withNewChildInternal(newChild: Expression): Quarter = copy(child = newChild) } @ExpressionDescription( usage = "_FUNC_(date) - Returns the month component of the date/timestamp.", examples = """ Examples: > SELECT _FUNC_('2016-07-30'); 7 """, group = "datetime_funcs", since = "1.5.0") case class Month(child: Expression) extends GetDateField { override val func = DateTimeUtils.getMonth override val funcName = "getMonth" override protected def withNewChildInternal(newChild: Expression): Month = copy(child = newChild) } @ExpressionDescription( usage = "_FUNC_(date) - Returns the day of month of the date/timestamp.", examples = """ Examples: > SELECT _FUNC_('2009-07-30'); 30 """, group = "datetime_funcs", since = "1.5.0") case class DayOfMonth(child: Expression) extends GetDateField { override val func = DateTimeUtils.getDayOfMonth override val funcName = "getDayOfMonth" override protected def withNewChildInternal(newChild: Expression): DayOfMonth = copy(child = newChild) } // scalastyle:off line.size.limit @ExpressionDescription( usage = "_FUNC_(date) - Returns the day of the week for date/timestamp (1 = Sunday, 2 = Monday, ..., 7 = Saturday).", examples = """ Examples: > SELECT _FUNC_('2009-07-30'); 5 """, group = "datetime_funcs", since = "2.3.0") // scalastyle:on line.size.limit case class DayOfWeek(child: Expression) extends GetDateField { override val func = DateTimeUtils.getDayOfWeek override val funcName = "getDayOfWeek" override protected def withNewChildInternal(newChild: Expression): DayOfWeek = copy(child = newChild) } // scalastyle:off line.size.limit @ExpressionDescription( usage = "_FUNC_(date) - Returns the day of the week for date/timestamp (0 = Monday, 1 = Tuesday, ..., 6 = Sunday).", examples = """ Examples: > SELECT _FUNC_('2009-07-30'); 3 """, group = "datetime_funcs", since = "2.4.0") // scalastyle:on line.size.limit case class WeekDay(child: Expression) extends GetDateField { override val func = DateTimeUtils.getWeekDay override val funcName = "getWeekDay" override protected def withNewChildInternal(newChild: Expression): WeekDay = copy(child = newChild) } // scalastyle:off line.size.limit @ExpressionDescription( usage = "_FUNC_(date) - Returns the week of the year of the given date. A week is considered to start on a Monday and week 1 is the first week with >3 days.", examples = """ Examples: > SELECT _FUNC_('2008-02-20'); 8 """, group = "datetime_funcs", since = "1.5.0") // scalastyle:on line.size.limit case class WeekOfYear(child: Expression) extends GetDateField { override val func = DateTimeUtils.getWeekOfYear override val funcName = "getWeekOfYear" override protected def withNewChildInternal(newChild: Expression): WeekOfYear = copy(child = newChild) } // scalastyle:off line.size.limit @ExpressionDescription( usage = "_FUNC_(timestamp, fmt) - Converts `timestamp` to a value of string in the format specified by the date format `fmt`.", arguments = """ Arguments: * timestamp - A date/timestamp or string to be converted to the given format. * fmt - Date/time format pattern to follow. See <a href="https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html">Datetime Patterns</a> for valid date and time format patterns. """, examples = """ Examples: > SELECT _FUNC_('2016-04-08', 'y'); 2016 """, group = "datetime_funcs", since = "1.5.0") // scalastyle:on line.size.limit case class DateFormatClass(left: Expression, right: Expression, timeZoneId: Option[String] = None) extends BinaryExpression with TimestampFormatterHelper with ImplicitCastInputTypes with NullIntolerant { def this(left: Expression, right: Expression) = this(left, right, None) override def dataType: DataType = StringType override def inputTypes: Seq[AbstractDataType] = Seq(TimestampType, StringType) override def withTimeZone(timeZoneId: String): TimeZoneAwareExpression = copy(timeZoneId = Option(timeZoneId)) override protected def nullSafeEval(timestamp: Any, format: Any): Any = { val formatter = formatterOption.getOrElse(getFormatter(format.toString)) UTF8String.fromString(formatter.format(timestamp.asInstanceOf[Long])) } override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { formatterOption.map { tf => val timestampFormatter = ctx.addReferenceObj("timestampFormatter", tf) defineCodeGen(ctx, ev, (timestamp, _) => { s"""UTF8String.fromString($timestampFormatter.format($timestamp))""" }) }.getOrElse { val tf = TimestampFormatter.getClass.getName.stripSuffix("$") val ldf = LegacyDateFormats.getClass.getName.stripSuffix("$") val zid = ctx.addReferenceObj("zoneId", zoneId, classOf[ZoneId].getName) defineCodeGen(ctx, ev, (timestamp, format) => { s"""|UTF8String.fromString($tf$$.MODULE$$.apply( | $format.toString(), | $zid, | $ldf$$.MODULE$$.SIMPLE_DATE_FORMAT(), | false) |.format($timestamp))""".stripMargin }) } } override def prettyName: String = "date_format" override protected def formatString: Expression = right override protected def isParsing: Boolean = false override protected def withNewChildrenInternal( newLeft: Expression, newRight: Expression): DateFormatClass = copy(left = newLeft, right = newRight) } /** * Converts time string with given pattern. * Deterministic version of [[UnixTimestamp]], must have at least one parameter. */ // scalastyle:off line.size.limit @ExpressionDescription( usage = "_FUNC_(timeExp[, fmt]) - Returns the UNIX timestamp of the given time.", arguments = """ Arguments: * timeExp - A date/timestamp or string which is returned as a UNIX timestamp. * fmt - Date/time format pattern to follow. Ignored if `timeExp` is not a string. Default value is "yyyy-MM-dd HH:mm:ss". See <a href="https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html">Datetime Patterns</a> for valid date and time format patterns. """, examples = """ Examples: > SELECT _FUNC_('2016-04-08', 'yyyy-MM-dd'); 1460098800 """, group = "datetime_funcs", since = "1.6.0") // scalastyle:on line.size.limit case class ToUnixTimestamp( timeExp: Expression, format: Expression, timeZoneId: Option[String] = None, failOnError: Boolean = SQLConf.get.ansiEnabled) extends UnixTime { def this(timeExp: Expression, format: Expression) = this(timeExp, format, None, SQLConf.get.ansiEnabled) override def left: Expression = timeExp override def right: Expression = format override def withTimeZone(timeZoneId: String): TimeZoneAwareExpression = copy(timeZoneId = Option(timeZoneId)) def this(time: Expression) = { this(time, Literal(TimestampFormatter.defaultPattern)) } override def prettyName: String = "to_unix_timestamp" override protected def withNewChildrenInternal( newLeft: Expression, newRight: Expression): ToUnixTimestamp = copy(timeExp = newLeft, format = newRight) } // scalastyle:off line.size.limit /** * Converts time string with given pattern to Unix time stamp (in seconds), returns null if fail. * See <a href="https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html">Datetime Patterns</a>. * Note that hive Language Manual says it returns 0 if fail, but in fact it returns null. * If the second parameter is missing, use "yyyy-MM-dd HH:mm:ss". * If no parameters provided, the first parameter will be current_timestamp. * If the first parameter is a Date or Timestamp instead of String, we will ignore the * second parameter. */ @ExpressionDescription( usage = "_FUNC_([timeExp[, fmt]]) - Returns the UNIX timestamp of current or specified time.", arguments = """ Arguments: * timeExp - A date/timestamp or string. If not provided, this defaults to current time. * fmt - Date/time format pattern to follow. Ignored if `timeExp` is not a string. Default value is "yyyy-MM-dd HH:mm:ss". See <a href="https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html"> Datetime Patterns</a> for valid date and time format patterns. """, examples = """ Examples: > SELECT _FUNC_(); 1476884637 > SELECT _FUNC_('2016-04-08', 'yyyy-MM-dd'); 1460041200 """, group = "datetime_funcs", since = "1.5.0") // scalastyle:on line.size.limit case class UnixTimestamp( timeExp: Expression, format: Expression, timeZoneId: Option[String] = None, failOnError: Boolean = SQLConf.get.ansiEnabled) extends UnixTime { def this(timeExp: Expression, format: Expression) = this(timeExp, format, None, SQLConf.get.ansiEnabled) override def left: Expression = timeExp override def right: Expression = format override def withTimeZone(timeZoneId: String): TimeZoneAwareExpression = copy(timeZoneId = Option(timeZoneId)) def this(time: Expression) = { this(time, Literal(TimestampFormatter.defaultPattern)) } def this() = { this(CurrentTimestamp()) } override def prettyName: String = "unix_timestamp" override protected def withNewChildrenInternal( newLeft: Expression, newRight: Expression): UnixTimestamp = copy(timeExp = newLeft, format = newRight) } /** * Gets a timestamp from a string or a date. */ case class GetTimestamp( left: Expression, right: Expression, override val dataType: DataType, timeZoneId: Option[String] = None, failOnError: Boolean = SQLConf.get.ansiEnabled) extends ToTimestamp { override val forTimestampNTZ: Boolean = dataType == TimestampNTZType override protected def downScaleFactor: Long = 1 override def withTimeZone(timeZoneId: String): TimeZoneAwareExpression = copy(timeZoneId = Some(timeZoneId)) override protected def withNewChildrenInternal( newLeft: Expression, newRight: Expression): Expression = copy(left = newLeft, right = newRight) } /** * Parses a column to a timestamp without time zone based on the supplied format. */ // scalastyle:off line.size.limit @ExpressionDescription( usage = """ _FUNC_(timestamp_str[, fmt]) - Parses the `timestamp_str` expression with the `fmt` expression to a timestamp without time zone. Returns null with invalid input. By default, it follows casting rules to a timestamp if the `fmt` is omitted. """, arguments = """ Arguments: * timestamp_str - A string to be parsed to timestamp without time zone. * fmt - Timestamp format pattern to follow. See <a href="https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html">Datetime Patterns</a> for valid date and time format patterns. """, examples = """ Examples: > SELECT _FUNC_('2016-12-31 00:12:00'); 2016-12-31 00:12:00 > SELECT _FUNC_('2016-12-31', 'yyyy-MM-dd'); 2016-12-31 00:00:00 """, group = "datetime_funcs", since = "3.3.0") // scalastyle:on line.size.limit object ParseToTimestampNTZExpressionBuilder extends ExpressionBuilder { override def build(funcName: String, expressions: Seq[Expression]): Expression = { val numArgs = expressions.length if (numArgs == 1 || numArgs == 2) { ParseToTimestamp(expressions(0), expressions.drop(1).lastOption, TimestampNTZType) } else { throw QueryCompilationErrors.invalidFunctionArgumentNumberError(Seq(1, 2), funcName, numArgs) } } } /** * Parses a column to a timestamp with local time zone based on the supplied format. */ // scalastyle:off line.size.limit @ExpressionDescription( usage = """ _FUNC_(timestamp_str[, fmt]) - Parses the `timestamp_str` expression with the `fmt` expression to a timestamp with local time zone. Returns null with invalid input. By default, it follows casting rules to a timestamp if the `fmt` is omitted. """, arguments = """ Arguments: * timestamp_str - A string to be parsed to timestamp with local time zone. * fmt - Timestamp format pattern to follow. See <a href="https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html">Datetime Patterns</a> for valid date and time format patterns. """, examples = """ Examples: > SELECT _FUNC_('2016-12-31 00:12:00'); 2016-12-31 00:12:00 > SELECT _FUNC_('2016-12-31', 'yyyy-MM-dd'); 2016-12-31 00:00:00 """, group = "datetime_funcs", since = "3.3.0") // scalastyle:on line.size.limit object ParseToTimestampLTZExpressionBuilder extends ExpressionBuilder { override def build(funcName: String, expressions: Seq[Expression]): Expression = { val numArgs = expressions.length if (numArgs == 1 || numArgs == 2) { ParseToTimestamp(expressions(0), expressions.drop(1).lastOption, TimestampType) } else { throw QueryCompilationErrors.invalidFunctionArgumentNumberError(Seq(1, 2), funcName, numArgs) } } } abstract class ToTimestamp extends BinaryExpression with TimestampFormatterHelper with ExpectsInputTypes { def failOnError: Boolean // The result of the conversion to timestamp is microseconds divided by this factor. // For example if the factor is 1000000, the result of the expression is in seconds. protected def downScaleFactor: Long override protected def formatString: Expression = right override protected def isParsing = true override def forTimestampNTZ: Boolean = left.dataType == TimestampNTZType override def inputTypes: Seq[AbstractDataType] = Seq(TypeCollection(StringType, DateType, TimestampType, TimestampNTZType), StringType) override def dataType: DataType = LongType override def nullable: Boolean = if (failOnError) children.exists(_.nullable) else true private def isParseError(e: Throwable): Boolean = e match { case _: DateTimeParseException | _: DateTimeException | _: ParseException => true case _ => false } override def eval(input: InternalRow): Any = { val t = left.eval(input) if (t == null) { null } else { left.dataType match { case DateType => daysToMicros(t.asInstanceOf[Int], zoneId) / downScaleFactor case TimestampType | TimestampNTZType => t.asInstanceOf[Long] / downScaleFactor case StringType => val fmt = right.eval(input) if (fmt == null) { null } else { val formatter = formatterOption.getOrElse(getFormatter(fmt.toString)) try { if (forTimestampNTZ) { formatter.parseWithoutTimeZone(t.asInstanceOf[UTF8String].toString) } else { formatter.parse(t.asInstanceOf[UTF8String].toString) / downScaleFactor } } catch { case e: DateTimeParseException if failOnError => throw QueryExecutionErrors.ansiDateTimeParseError(e) case e: DateTimeException if failOnError => throw QueryExecutionErrors.ansiDateTimeError(e) case e: ParseException if failOnError => throw QueryExecutionErrors.ansiParseError(e) case e if isParseError(e) => null } } } } } override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { val javaType = CodeGenerator.javaType(dataType) def parseErrorBranch(method: String): String = if (failOnError) { s"throw QueryExecutionErrors.$method(e);" } else { s"${ev.isNull} = true;" } val parseMethod = if (forTimestampNTZ) { "parseWithoutTimeZone" } else { "parse" } val downScaleCode = if (forTimestampNTZ) { "" } else { s"/ $downScaleFactor" } left.dataType match { case StringType => formatterOption.map { fmt => val df = classOf[TimestampFormatter].getName val formatterName = ctx.addReferenceObj("formatter", fmt, df) nullSafeCodeGen(ctx, ev, (datetimeStr, _) => s""" |try { | ${ev.value} = $formatterName.$parseMethod($datetimeStr.toString()) $downScaleCode; |} catch (java.time.format.DateTimeParseException e) { | ${parseErrorBranch("ansiDateTimeParseError")} |} catch (java.time.DateTimeException e) { | ${parseErrorBranch("ansiDateTimeError")} |} catch (java.text.ParseException e) { | ${parseErrorBranch("ansiParseError")} |} |""".stripMargin) }.getOrElse { val zid = ctx.addReferenceObj("zoneId", zoneId, classOf[ZoneId].getName) val tf = TimestampFormatter.getClass.getName.stripSuffix("$") val ldf = LegacyDateFormats.getClass.getName.stripSuffix("$") val timestampFormatter = ctx.freshName("timestampFormatter") nullSafeCodeGen(ctx, ev, (string, format) => s""" |$tf $timestampFormatter = $tf$$.MODULE$$.apply( | $format.toString(), | $zid, | $ldf$$.MODULE$$.SIMPLE_DATE_FORMAT(), | true); |try { | ${ev.value} = $timestampFormatter.$parseMethod($string.toString()) $downScaleCode; |} catch (java.time.format.DateTimeParseException e) { | ${parseErrorBranch("ansiDateTimeParseError")} |} catch (java.time.DateTimeException e) { | ${parseErrorBranch("ansiDateTimeError")} |} catch (java.text.ParseException e) { | ${parseErrorBranch("ansiParseError")} |} |""".stripMargin) } case TimestampType | TimestampNTZType => val eval1 = left.genCode(ctx) ev.copy(code = code""" ${eval1.code} boolean ${ev.isNull} = ${eval1.isNull}; $javaType ${ev.value} = ${CodeGenerator.defaultValue(dataType)}; if (!${ev.isNull}) { ${ev.value} = ${eval1.value} / $downScaleFactor; }""") case DateType => val zid = ctx.addReferenceObj("zoneId", zoneId, classOf[ZoneId].getName) val dtu = DateTimeUtils.getClass.getName.stripSuffix("$") val eval1 = left.genCode(ctx) ev.copy(code = code""" ${eval1.code} boolean ${ev.isNull} = ${eval1.isNull}; $javaType ${ev.value} = ${CodeGenerator.defaultValue(dataType)}; if (!${ev.isNull}) { ${ev.value} = $dtu.daysToMicros(${eval1.value}, $zid) / $downScaleFactor; }""") } } } abstract class UnixTime extends ToTimestamp { override val downScaleFactor: Long = MICROS_PER_SECOND } /** * Converts the number of seconds from unix epoch (1970-01-01 00:00:00 UTC) to a string * representing the timestamp of that moment in the current system time zone in the given * format. If the format is missing, using format like "1970-01-01 00:00:00". * Note that Hive Language Manual says it returns 0 if fail, but in fact it returns null. */ // scalastyle:off line.size.limit @ExpressionDescription( usage = "_FUNC_(unix_time[, fmt]) - Returns `unix_time` in the specified `fmt`.", arguments = """ Arguments: * unix_time - UNIX Timestamp to be converted to the provided format. * fmt - Date/time format pattern to follow. See <a href="https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html">Datetime Patterns</a> for valid date and time format patterns. The 'yyyy-MM-dd HH:mm:ss' pattern is used if omitted. """, examples = """ Examples: > SELECT _FUNC_(0, 'yyyy-MM-dd HH:mm:ss'); 1969-12-31 16:00:00 > SELECT _FUNC_(0); 1969-12-31 16:00:00 """, group = "datetime_funcs", since = "1.5.0") // scalastyle:on line.size.limit case class FromUnixTime(sec: Expression, format: Expression, timeZoneId: Option[String] = None) extends BinaryExpression with TimestampFormatterHelper with ImplicitCastInputTypes with NullIntolerant { def this(sec: Expression, format: Expression) = this(sec, format, None) override def left: Expression = sec override def right: Expression = format override def prettyName: String = "from_unixtime" def this(unix: Expression) = { this(unix, Literal(TimestampFormatter.defaultPattern)) } override def dataType: DataType = StringType override def nullable: Boolean = true override def inputTypes: Seq[AbstractDataType] = Seq(LongType, StringType) override def withTimeZone(timeZoneId: String): TimeZoneAwareExpression = copy(timeZoneId = Option(timeZoneId)) override def nullSafeEval(seconds: Any, format: Any): Any = { val fmt = formatterOption.getOrElse(getFormatter(format.toString)) UTF8String.fromString(fmt.format(seconds.asInstanceOf[Long] * MICROS_PER_SECOND)) } override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { formatterOption.map { f => val formatterName = ctx.addReferenceObj("formatter", f) defineCodeGen(ctx, ev, (seconds, _) => s"UTF8String.fromString($formatterName.format($seconds * 1000000L))") }.getOrElse { val tf = TimestampFormatter.getClass.getName.stripSuffix("$") val ldf = LegacyDateFormats.getClass.getName.stripSuffix("$") val zid = ctx.addReferenceObj("zoneId", zoneId, classOf[ZoneId].getName) defineCodeGen(ctx, ev, (seconds, format) => s""" |UTF8String.fromString( | $tf$$.MODULE$$.apply($format.toString(), | $zid, | $ldf$$.MODULE$$.SIMPLE_DATE_FORMAT(), | false).format($seconds * 1000000L)) |""".stripMargin) } } override protected def formatString: Expression = format override protected def isParsing: Boolean = false override protected def withNewChildrenInternal( newLeft: Expression, newRight: Expression): FromUnixTime = copy(sec = newLeft, format = newRight) } /** * Returns the last day of the month which the date belongs to. */ @ExpressionDescription( usage = "_FUNC_(date) - Returns the last day of the month which the date belongs to.", examples = """ Examples: > SELECT _FUNC_('2009-01-12'); 2009-01-31 """, group = "datetime_funcs", since = "1.5.0") case class LastDay(startDate: Expression) extends UnaryExpression with ImplicitCastInputTypes with NullIntolerant { override def child: Expression = startDate override def inputTypes: Seq[AbstractDataType] = Seq(DateType) override def dataType: DataType = DateType override def nullSafeEval(date: Any): Any = { DateTimeUtils.getLastDayOfMonth(date.asInstanceOf[Int]) } override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { val dtu = DateTimeUtils.getClass.getName.stripSuffix("$") defineCodeGen(ctx, ev, sd => s"$dtu.getLastDayOfMonth($sd)") } override def prettyName: String = "last_day" override protected def withNewChildInternal(newChild: Expression): LastDay = copy(startDate = newChild) } /** * Returns the first date which is later than startDate and named as dayOfWeek. * For example, NextDay(2015-07-27, Sunday) would return 2015-08-02, which is the first * Sunday later than 2015-07-27. * * Allowed "dayOfWeek" is defined in [[DateTimeUtils.getDayOfWeekFromString]]. */ // scalastyle:off line.size.limit @ExpressionDescription( usage = """_FUNC_(start_date, day_of_week) - Returns the first date which is later than `start_date` and named as indicated. The function returns NULL if at least one of the input parameters is NULL. When both of the input parameters are not NULL and day_of_week is an invalid input, the function throws IllegalArgumentException if `spark.sql.ansi.enabled` is set to true, otherwise NULL. """, examples = """ Examples: > SELECT _FUNC_('2015-01-14', 'TU'); 2015-01-20 """, group = "datetime_funcs", since = "1.5.0") // scalastyle:on line.size.limit case class NextDay( startDate: Expression, dayOfWeek: Expression, failOnError: Boolean = SQLConf.get.ansiEnabled) extends BinaryExpression with ImplicitCastInputTypes with NullIntolerant { override def left: Expression = startDate override def right: Expression = dayOfWeek def this(left: Expression, right: Expression) = this(left, right, SQLConf.get.ansiEnabled) override def inputTypes: Seq[AbstractDataType] = Seq(DateType, StringType) override def dataType: DataType = DateType override def nullable: Boolean = true override def nullSafeEval(start: Any, dayOfW: Any): Any = { try { val dow = DateTimeUtils.getDayOfWeekFromString(dayOfW.asInstanceOf[UTF8String]) val sd = start.asInstanceOf[Int] DateTimeUtils.getNextDateForDayOfWeek(sd, dow) } catch { case e: IllegalArgumentException => if (failOnError) { throw QueryExecutionErrors.ansiIllegalArgumentError(e) } else { null } } } private def dateTimeUtilClass: String = DateTimeUtils.getClass.getName.stripSuffix("$") private def nextDayGenCode( ev: ExprCode, dayOfWeekTerm: String, sd: String, dowS: String): String = { val failOnErrorBranch = if (failOnError) { "throw QueryExecutionErrors.ansiIllegalArgumentError(e);" } else { s"${ev.isNull} = true;" } s""" |try { | int $dayOfWeekTerm = $dateTimeUtilClass.getDayOfWeekFromString($dowS); | ${ev.value} = $dateTimeUtilClass.getNextDateForDayOfWeek($sd, $dayOfWeekTerm); |} catch (IllegalArgumentException e) { | $failOnErrorBranch |} |""".stripMargin } override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { nullSafeCodeGen(ctx, ev, (sd, dowS) => { val dayOfWeekTerm = ctx.freshName("dayOfWeek") if (dayOfWeek.foldable) { val input = dayOfWeek.eval().asInstanceOf[UTF8String] if (input eq null) { s"""${ev.isNull} = true;""" } else { try { val dayOfWeekValue = DateTimeUtils.getDayOfWeekFromString(input) s"${ev.value} = $dateTimeUtilClass.getNextDateForDayOfWeek($sd, $dayOfWeekValue);" } catch { case _: IllegalArgumentException => nextDayGenCode(ev, dayOfWeekTerm, sd, dowS) } } } else { nextDayGenCode(ev, dayOfWeekTerm, sd, dowS) } }) } override def prettyName: String = "next_day" override protected def withNewChildrenInternal( newLeft: Expression, newRight: Expression): NextDay = copy(startDate = newLeft, dayOfWeek = newRight) } /** * Adds an interval to timestamp. */ case class TimeAdd(start: Expression, interval: Expression, timeZoneId: Option[String] = None) extends BinaryExpression with TimeZoneAwareExpression with ExpectsInputTypes with NullIntolerant { def this(start: Expression, interval: Expression) = this(start, interval, None) override def left: Expression = start override def right: Expression = interval override def toString: String = s"$left + $right" override def sql: String = s"${left.sql} + ${right.sql}" override def inputTypes: Seq[AbstractDataType] = Seq(AnyTimestampType, TypeCollection(CalendarIntervalType, DayTimeIntervalType)) override def dataType: DataType = start.dataType override def withTimeZone(timeZoneId: String): TimeZoneAwareExpression = copy(timeZoneId = Option(timeZoneId)) @transient private lazy val zoneIdInEval: ZoneId = zoneIdForType(left.dataType) override def nullSafeEval(start: Any, interval: Any): Any = right.dataType match { case _: DayTimeIntervalType => timestampAddDayTime(start.asInstanceOf[Long], interval.asInstanceOf[Long], zoneIdInEval) case CalendarIntervalType => val i = interval.asInstanceOf[CalendarInterval] timestampAddInterval(start.asInstanceOf[Long], i.months, i.days, i.microseconds, zoneIdInEval) } override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { val zid = ctx.addReferenceObj("zoneId", zoneIdInEval, classOf[ZoneId].getName) val dtu = DateTimeUtils.getClass.getName.stripSuffix("$") interval.dataType match { case _: DayTimeIntervalType => defineCodeGen(ctx, ev, (sd, dt) => s"""$dtu.timestampAddDayTime($sd, $dt, $zid)""") case CalendarIntervalType => defineCodeGen(ctx, ev, (sd, i) => { s"""$dtu.timestampAddInterval($sd, $i.months, $i.days, $i.microseconds, $zid)""" }) } } override protected def withNewChildrenInternal( newLeft: Expression, newRight: Expression): TimeAdd = copy(start = newLeft, interval = newRight) } /** * Subtract an interval from timestamp or date, which is only used to give a pretty sql string * for `datetime - interval` operations */ case class DatetimeSub( start: Expression, interval: Expression, replacement: Expression) extends RuntimeReplaceable with InheritAnalysisRules { override def parameters: Seq[Expression] = Seq(start, interval) override def makeSQLString(childrenSQL: Seq[String]): String = { childrenSQL.mkString(" - ") } override def toString: String = s"$start - $interval" override protected def withNewChildInternal(newChild: Expression): Expression = { copy(replacement = newChild) } } /** * Adds date and an interval. * * When ansi mode is on, the microseconds part of interval needs to be 0, otherwise a runtime * [[IllegalArgumentException]] will be raised. * When ansi mode is off, if the microseconds part of interval is 0, we perform date + interval * for better performance. if the microseconds part is not 0, then the date will be converted to a * timestamp to add with the whole interval parts. */ case class DateAddInterval( start: Expression, interval: Expression, timeZoneId: Option[String] = None, ansiEnabled: Boolean = SQLConf.get.ansiEnabled) extends BinaryExpression with ExpectsInputTypes with TimeZoneAwareExpression with NullIntolerant { override def left: Expression = start override def right: Expression = interval override def toString: String = s"$left + $right" override def sql: String = s"${left.sql} + ${right.sql}" override def inputTypes: Seq[AbstractDataType] = Seq(DateType, CalendarIntervalType) override def dataType: DataType = DateType override def nullSafeEval(start: Any, interval: Any): Any = { val itvl = interval.asInstanceOf[CalendarInterval] if (ansiEnabled || itvl.microseconds == 0) { DateTimeUtils.dateAddInterval(start.asInstanceOf[Int], itvl) } else { val startTs = DateTimeUtils.daysToMicros(start.asInstanceOf[Int], zoneId) val resultTs = DateTimeUtils.timestampAddInterval( startTs, itvl.months, itvl.days, itvl.microseconds, zoneId) DateTimeUtils.microsToDays(resultTs, zoneId) } } override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { val dtu = DateTimeUtils.getClass.getName.stripSuffix("$") nullSafeCodeGen(ctx, ev, (sd, i) => if (ansiEnabled) { s"""${ev.value} = $dtu.dateAddInterval($sd, $i);""" } else { val zid = ctx.addReferenceObj("zoneId", zoneId, classOf[ZoneId].getName) val startTs = ctx.freshName("startTs") val resultTs = ctx.freshName("resultTs") s""" |if ($i.microseconds == 0) { | ${ev.value} = $dtu.dateAddInterval($sd, $i); |} else { | long $startTs = $dtu.daysToMicros($sd, $zid); | long $resultTs = | $dtu.timestampAddInterval($startTs, $i.months, $i.days, $i.microseconds, $zid); | ${ev.value} = $dtu.microsToDays($resultTs, $zid); |} |""".stripMargin }) } override def withTimeZone(timeZoneId: String): TimeZoneAwareExpression = copy(timeZoneId = Option(timeZoneId)) override protected def withNewChildrenInternal( newLeft: Expression, newRight: Expression): DateAddInterval = copy(start = newLeft, interval = newRight) } sealed trait UTCTimestamp extends BinaryExpression with ImplicitCastInputTypes with NullIntolerant { val func: (Long, String) => Long val funcName: String override def inputTypes: Seq[AbstractDataType] = Seq(TimestampType, StringType) override def dataType: DataType = TimestampType override def nullSafeEval(time: Any, timezone: Any): Any = { func(time.asInstanceOf[Long], timezone.asInstanceOf[UTF8String].toString) } override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { val dtu = DateTimeUtils.getClass.getName.stripSuffix("$") if (right.foldable) { val tz = right.eval().asInstanceOf[UTF8String] if (tz == null) { ev.copy(code = code""" |boolean ${ev.isNull} = true; |long ${ev.value} = 0; """.stripMargin) } else { val tzClass = classOf[ZoneId].getName val dtu = DateTimeUtils.getClass.getName.stripSuffix("$") val escapedTz = StringEscapeUtils.escapeJava(tz.toString) val tzTerm = ctx.addMutableState(tzClass, "tz", v => s"""$v = $dtu.getZoneId("$escapedTz");""") val utcTerm = "java.time.ZoneOffset.UTC" val (fromTz, toTz) = this match { case _: FromUTCTimestamp => (utcTerm, tzTerm) case _: ToUTCTimestamp => (tzTerm, utcTerm) } val eval = left.genCode(ctx) ev.copy(code = code""" |${eval.code} |boolean ${ev.isNull} = ${eval.isNull}; |long ${ev.value} = 0; |if (!${ev.isNull}) { | ${ev.value} = $dtu.convertTz(${eval.value}, $fromTz, $toTz); |} """.stripMargin) } } else { defineCodeGen(ctx, ev, (timestamp, format) => { s"""$dtu.$funcName($timestamp, $format.toString())""" }) } } } /** * This is a common function for databases supporting TIMESTAMP WITHOUT TIMEZONE. This function * takes a timestamp which is timezone-agnostic, and interprets it as a timestamp in UTC, and * renders that timestamp as a timestamp in the given time zone. * * However, timestamp in Spark represents number of microseconds from the Unix epoch, which is not * timezone-agnostic. So in Spark this function just shift the timestamp value from UTC timezone to * the given timezone. * * This function may return confusing result if the input is a string with timezone, e.g. * '2018-03-13T06:18:23+00:00'. The reason is that, Spark firstly cast the string to timestamp * according to the timezone in the string, and finally display the result by converting the * timestamp to string according to the session local timezone. */ // scalastyle:off line.size.limit @ExpressionDescription( usage = "_FUNC_(timestamp, timezone) - Given a timestamp like '2017-07-14 02:40:00.0', interprets it as a time in UTC, and renders that time as a timestamp in the given time zone. For example, 'GMT+1' would yield '2017-07-14 03:40:00.0'.", examples = """ Examples: > SELECT _FUNC_('2016-08-31', 'Asia/Seoul'); 2016-08-31 09:00:00 """, group = "datetime_funcs", since = "1.5.0") // scalastyle:on line.size.limit case class FromUTCTimestamp(left: Expression, right: Expression) extends UTCTimestamp { override val func = DateTimeUtils.fromUTCTime override val funcName: String = "fromUTCTime" override val prettyName: String = "from_utc_timestamp" override protected def withNewChildrenInternal( newLeft: Expression, newRight: Expression): FromUTCTimestamp = copy(left = newLeft, right = newRight) } /** * This is a common function for databases supporting TIMESTAMP WITHOUT TIMEZONE. This function * takes a timestamp which is timezone-agnostic, and interprets it as a timestamp in the given * timezone, and renders that timestamp as a timestamp in UTC. * * However, timestamp in Spark represents number of microseconds from the Unix epoch, which is not * timezone-agnostic. So in Spark this function just shift the timestamp value from the given * timezone to UTC timezone. * * This function may return confusing result if the input is a string with timezone, e.g. * '2018-03-13T06:18:23+00:00'. The reason is that, Spark firstly cast the string to timestamp * according to the timezone in the string, and finally display the result by converting the * timestamp to string according to the session local timezone. */ // scalastyle:off line.size.limit @ExpressionDescription( usage = "_FUNC_(timestamp, timezone) - Given a timestamp like '2017-07-14 02:40:00.0', interprets it as a time in the given time zone, and renders that time as a timestamp in UTC. For example, 'GMT+1' would yield '2017-07-14 01:40:00.0'.", examples = """ Examples: > SELECT _FUNC_('2016-08-31', 'Asia/Seoul'); 2016-08-30 15:00:00 """, group = "datetime_funcs", since = "1.5.0") // scalastyle:on line.size.limit case class ToUTCTimestamp(left: Expression, right: Expression) extends UTCTimestamp { override val func = DateTimeUtils.toUTCTime override val funcName: String = "toUTCTime" override val prettyName: String = "to_utc_timestamp" override protected def withNewChildrenInternal( newLeft: Expression, newRight: Expression): ToUTCTimestamp = copy(left = newLeft, right = newRight) } abstract class AddMonthsBase extends BinaryExpression with ImplicitCastInputTypes with NullIntolerant { override def dataType: DataType = DateType override def nullSafeEval(start: Any, months: Any): Any = { DateTimeUtils.dateAddMonths(start.asInstanceOf[Int], months.asInstanceOf[Int]) } override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { val dtu = DateTimeUtils.getClass.getName.stripSuffix("$") defineCodeGen(ctx, ev, (sd, m) => { s"""$dtu.dateAddMonths($sd, $m)""" }) } } /** * Returns the date that is num_months after start_date. */ // scalastyle:off line.size.limit @ExpressionDescription( usage = "_FUNC_(start_date, num_months) - Returns the date that is `num_months` after `start_date`.", examples = """ Examples: > SELECT _FUNC_('2016-08-31', 1); 2016-09-30 """, group = "datetime_funcs", since = "1.5.0") // scalastyle:on line.size.limit case class AddMonths(startDate: Expression, numMonths: Expression) extends AddMonthsBase { override def left: Expression = startDate override def right: Expression = numMonths override def inputTypes: Seq[AbstractDataType] = Seq(DateType, IntegerType) override def prettyName: String = "add_months" override protected def withNewChildrenInternal( newLeft: Expression, newRight: Expression): AddMonths = copy(startDate = newLeft, numMonths = newRight) } // Adds the year-month interval to the date case class DateAddYMInterval(date: Expression, interval: Expression) extends AddMonthsBase { override def left: Expression = date override def right: Expression = interval override def inputTypes: Seq[AbstractDataType] = Seq(DateType, YearMonthIntervalType) override def toString: String = s"$left + $right" override def sql: String = s"${left.sql} + ${right.sql}" override protected def withNewChildrenInternal( newLeft: Expression, newRight: Expression): DateAddYMInterval = copy(date = newLeft, interval = newRight) } // Adds the year-month interval to the timestamp case class TimestampAddYMInterval( timestamp: Expression, interval: Expression, timeZoneId: Option[String] = None) extends BinaryExpression with TimeZoneAwareExpression with ExpectsInputTypes with NullIntolerant { def this(timestamp: Expression, interval: Expression) = this(timestamp, interval, None) override def left: Expression = timestamp override def right: Expression = interval override def toString: String = s"$left + $right" override def sql: String = s"${left.sql} + ${right.sql}" override def inputTypes: Seq[AbstractDataType] = Seq(AnyTimestampType, YearMonthIntervalType) override def dataType: DataType = timestamp.dataType override def withTimeZone(timeZoneId: String): TimeZoneAwareExpression = copy(timeZoneId = Option(timeZoneId)) @transient private lazy val zoneIdInEval: ZoneId = zoneIdForType(left.dataType) override def nullSafeEval(micros: Any, months: Any): Any = { timestampAddMonths(micros.asInstanceOf[Long], months.asInstanceOf[Int], zoneIdInEval) } override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { val zid = ctx.addReferenceObj("zoneId", zoneIdInEval, classOf[ZoneId].getName) val dtu = DateTimeUtils.getClass.getName.stripSuffix("$") defineCodeGen(ctx, ev, (micros, months) => { s"""$dtu.timestampAddMonths($micros, $months, $zid)""" }) } override protected def withNewChildrenInternal( newLeft: Expression, newRight: Expression): TimestampAddYMInterval = copy(timestamp = newLeft, interval = newRight) } /** * Returns number of months between times `timestamp1` and `timestamp2`. * If `timestamp1` is later than `timestamp2`, then the result is positive. * If `timestamp1` and `timestamp2` are on the same day of month, or both * are the last day of month, time of day will be ignored. Otherwise, the * difference is calculated based on 31 days per month, and rounded to * 8 digits unless roundOff=false. */ // scalastyle:off line.size.limit @ExpressionDescription( usage = """ _FUNC_(timestamp1, timestamp2[, roundOff]) - If `timestamp1` is later than `timestamp2`, then the result is positive. If `timestamp1` and `timestamp2` are on the same day of month, or both are the last day of month, time of day will be ignored. Otherwise, the difference is calculated based on 31 days per month, and rounded to 8 digits unless roundOff=false. """, examples = """ Examples: > SELECT _FUNC_('1997-02-28 10:30:00', '1996-10-30'); 3.94959677 > SELECT _FUNC_('1997-02-28 10:30:00', '1996-10-30', false); 3.9495967741935485 """, group = "datetime_funcs", since = "1.5.0") // scalastyle:on line.size.limit case class MonthsBetween( date1: Expression, date2: Expression, roundOff: Expression, timeZoneId: Option[String] = None) extends TernaryExpression with TimeZoneAwareExpression with ImplicitCastInputTypes with NullIntolerant { def this(date1: Expression, date2: Expression) = this(date1, date2, Literal.TrueLiteral, None) def this(date1: Expression, date2: Expression, roundOff: Expression) = this(date1, date2, roundOff, None) override def first: Expression = date1 override def second: Expression = date2 override def third: Expression = roundOff override def inputTypes: Seq[AbstractDataType] = Seq(TimestampType, TimestampType, BooleanType) override def dataType: DataType = DoubleType override def withTimeZone(timeZoneId: String): TimeZoneAwareExpression = copy(timeZoneId = Option(timeZoneId)) override def nullSafeEval(t1: Any, t2: Any, roundOff: Any): Any = { DateTimeUtils.monthsBetween( t1.asInstanceOf[Long], t2.asInstanceOf[Long], roundOff.asInstanceOf[Boolean], zoneId) } override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { val zid = ctx.addReferenceObj("zoneId", zoneId, classOf[ZoneId].getName) val dtu = DateTimeUtils.getClass.getName.stripSuffix("$") defineCodeGen(ctx, ev, (d1, d2, roundOff) => { s"""$dtu.monthsBetween($d1, $d2, $roundOff, $zid)""" }) } override def prettyName: String = "months_between" override protected def withNewChildrenInternal( newFirst: Expression, newSecond: Expression, newThird: Expression): MonthsBetween = copy(date1 = newFirst, date2 = newSecond, roundOff = newThird) } /** * Parses a column to a date based on the given format. */ // scalastyle:off line.size.limit @ExpressionDescription( usage = """ _FUNC_(date_str[, fmt]) - Parses the `date_str` expression with the `fmt` expression to a date. Returns null with invalid input. By default, it follows casting rules to a date if the `fmt` is omitted. """, arguments = """ Arguments: * date_str - A string to be parsed to date. * fmt - Date format pattern to follow. See <a href="https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html">Datetime Patterns</a> for valid date and time format patterns. """, examples = """ Examples: > SELECT _FUNC_('2009-07-30 04:17:52'); 2009-07-30 > SELECT _FUNC_('2016-12-31', 'yyyy-MM-dd'); 2016-12-31 """, group = "datetime_funcs", since = "1.5.0") // scalastyle:on line.size.limit case class ParseToDate( left: Expression, format: Option[Expression], timeZoneId: Option[String] = None) extends RuntimeReplaceable with ImplicitCastInputTypes with TimeZoneAwareExpression { override lazy val replacement: Expression = format.map { f => Cast(GetTimestamp(left, f, TimestampType, timeZoneId), DateType, timeZoneId) }.getOrElse(Cast(left, DateType, timeZoneId)) // backwards compatibility def this(left: Expression, format: Expression) = { this(left, Option(format)) } def this(left: Expression) = { this(left, None) } override def prettyName: String = "to_date" override def withTimeZone(timeZoneId: String): TimeZoneAwareExpression = copy(timeZoneId = Some(timeZoneId)) override def nodePatternsInternal(): Seq[TreePattern] = Seq(RUNTIME_REPLACEABLE) override def children: Seq[Expression] = left +: format.toSeq override def inputTypes: Seq[AbstractDataType] = { // Note: ideally this function should only take string input, but we allow more types here to // be backward compatible. TypeCollection(StringType, DateType, TimestampType, TimestampNTZType) +: format.map(_ => StringType).toSeq } override protected def withNewChildrenInternal( newChildren: IndexedSeq[Expression]): Expression = { if (format.isDefined) { copy(left = newChildren.head, format = Some(newChildren.last)) } else { copy(left = newChildren.head) } } } /** * Parses a column to a timestamp based on the supplied format. */ // scalastyle:off line.size.limit @ExpressionDescription( usage = """ _FUNC_(timestamp_str[, fmt]) - Parses the `timestamp_str` expression with the `fmt` expression to a timestamp. Returns null with invalid input. By default, it follows casting rules to a timestamp if the `fmt` is omitted. The result data type is consistent with the value of configuration `spark.sql.timestampType`. """, arguments = """ Arguments: * timestamp_str - A string to be parsed to timestamp. * fmt - Timestamp format pattern to follow. See <a href="https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html">Datetime Patterns</a> for valid date and time format patterns. """, examples = """ Examples: > SELECT _FUNC_('2016-12-31 00:12:00'); 2016-12-31 00:12:00 > SELECT _FUNC_('2016-12-31', 'yyyy-MM-dd'); 2016-12-31 00:00:00 """, group = "datetime_funcs", since = "2.2.0") // scalastyle:on line.size.limit case class ParseToTimestamp( left: Expression, format: Option[Expression], override val dataType: DataType, timeZoneId: Option[String] = None) extends RuntimeReplaceable with ImplicitCastInputTypes with TimeZoneAwareExpression { override lazy val replacement: Expression = format.map { f => GetTimestamp(left, f, dataType, timeZoneId) }.getOrElse(Cast(left, dataType, timeZoneId)) def this(left: Expression, format: Expression) = { this(left, Option(format), SQLConf.get.timestampType) } def this(left: Expression) = this(left, None, SQLConf.get.timestampType) override def nodeName: String = "to_timestamp" override def nodePatternsInternal(): Seq[TreePattern] = Seq(RUNTIME_REPLACEABLE) override def withTimeZone(timeZoneId: String): TimeZoneAwareExpression = copy(timeZoneId = Some(timeZoneId)) override def children: Seq[Expression] = left +: format.toSeq override def inputTypes: Seq[AbstractDataType] = { // Note: ideally this function should only take string input, but we allow more types here to // be backward compatible. TypeCollection(StringType, DateType, TimestampType, TimestampNTZType) +: format.map(_ => StringType).toSeq } override protected def withNewChildrenInternal( newChildren: IndexedSeq[Expression]): Expression = { if (format.isDefined) { copy(left = newChildren.head, format = Some(newChildren.last)) } else { copy(left = newChildren.head) } } } trait TruncInstant extends BinaryExpression with ImplicitCastInputTypes { val instant: Expression val format: Expression override def nullable: Boolean = true private lazy val truncLevel: Int = DateTimeUtils.parseTruncLevel(format.eval().asInstanceOf[UTF8String]) /** * @param input internalRow (time) * @param minLevel Minimum level that can be used for truncation (e.g WEEK for Date input) * @param truncFunc function: (time, level) => time */ protected def evalHelper(input: InternalRow, minLevel: Int)( truncFunc: (Any, Int) => Any): Any = { val level = if (format.foldable) { truncLevel } else { DateTimeUtils.parseTruncLevel(format.eval().asInstanceOf[UTF8String]) } if (level < minLevel) { // unknown format or too small level null } else { val t = instant.eval(input) if (t == null) { null } else { truncFunc(t, level) } } } protected def codeGenHelper( ctx: CodegenContext, ev: ExprCode, minLevel: Int, orderReversed: Boolean = false)( truncFunc: (String, String) => String) : ExprCode = { val dtu = DateTimeUtils.getClass.getName.stripSuffix("$") val javaType = CodeGenerator.javaType(dataType) if (format.foldable) { if (truncLevel < minLevel) { ev.copy(code = code""" boolean ${ev.isNull} = true; $javaType ${ev.value} = ${CodeGenerator.defaultValue(dataType)};""") } else { val t = instant.genCode(ctx) val truncFuncStr = truncFunc(t.value, truncLevel.toString) ev.copy(code = code""" ${t.code} boolean ${ev.isNull} = ${t.isNull}; $javaType ${ev.value} = ${CodeGenerator.defaultValue(dataType)}; if (!${ev.isNull}) { ${ev.value} = $dtu.$truncFuncStr; }""") } } else { nullSafeCodeGen(ctx, ev, (left, right) => { val form = ctx.freshName("form") val (dateVal, fmt) = if (orderReversed) { (right, left) } else { (left, right) } val truncFuncStr = truncFunc(dateVal, form) s""" int $form = $dtu.parseTruncLevel($fmt); if ($form < $minLevel) { ${ev.isNull} = true; } else { ${ev.value} = $dtu.$truncFuncStr } """ }) } } } /** * Returns date truncated to the unit specified by the format. */ // scalastyle:off line.size.limit @ExpressionDescription( usage = """ _FUNC_(date, fmt) - Returns `date` with the time portion of the day truncated to the unit specified by the format model `fmt`. """, arguments = """ Arguments: * date - date value or valid date string * fmt - the format representing the unit to be truncated to - "YEAR", "YYYY", "YY" - truncate to the first date of the year that the `date` falls in - "QUARTER" - truncate to the first date of the quarter that the `date` falls in - "MONTH", "MM", "MON" - truncate to the first date of the month that the `date` falls in - "WEEK" - truncate to the Monday of the week that the `date` falls in """, examples = """ Examples: > SELECT _FUNC_('2019-08-04', 'week'); 2019-07-29 > SELECT _FUNC_('2019-08-04', 'quarter'); 2019-07-01 > SELECT _FUNC_('2009-02-12', 'MM'); 2009-02-01 > SELECT _FUNC_('2015-10-27', 'YEAR'); 2015-01-01 """, group = "datetime_funcs", since = "1.5.0") // scalastyle:on line.size.limit case class TruncDate(date: Expression, format: Expression) extends TruncInstant { override def left: Expression = date override def right: Expression = format override def inputTypes: Seq[AbstractDataType] = Seq(DateType, StringType) override def dataType: DataType = DateType override def prettyName: String = "trunc" override val instant = date override def eval(input: InternalRow): Any = { evalHelper(input, minLevel = MIN_LEVEL_OF_DATE_TRUNC) { (d: Any, level: Int) => DateTimeUtils.truncDate(d.asInstanceOf[Int], level) } } override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { codeGenHelper(ctx, ev, minLevel = MIN_LEVEL_OF_DATE_TRUNC) { (date: String, fmt: String) => s"truncDate($date, $fmt);" } } override protected def withNewChildrenInternal( newLeft: Expression, newRight: Expression): TruncDate = copy(date = newLeft, format = newRight) } /** * Returns timestamp truncated to the unit specified by the format. */ // scalastyle:off line.size.limit @ExpressionDescription( usage = """ _FUNC_(fmt, ts) - Returns timestamp `ts` truncated to the unit specified by the format model `fmt`. """, arguments = """ Arguments: * fmt - the format representing the unit to be truncated to - "YEAR", "YYYY", "YY" - truncate to the first date of the year that the `ts` falls in, the time part will be zero out - "QUARTER" - truncate to the first date of the quarter that the `ts` falls in, the time part will be zero out - "MONTH", "MM", "MON" - truncate to the first date of the month that the `ts` falls in, the time part will be zero out - "WEEK" - truncate to the Monday of the week that the `ts` falls in, the time part will be zero out - "DAY", "DD" - zero out the time part - "HOUR" - zero out the minute and second with fraction part - "MINUTE"- zero out the second with fraction part - "SECOND" - zero out the second fraction part - "MILLISECOND" - zero out the microseconds - "MICROSECOND" - everything remains * ts - datetime value or valid timestamp string """, examples = """ Examples: > SELECT _FUNC_('YEAR', '2015-03-05T09:32:05.359'); 2015-01-01 00:00:00 > SELECT _FUNC_('MM', '2015-03-05T09:32:05.359'); 2015-03-01 00:00:00 > SELECT _FUNC_('DD', '2015-03-05T09:32:05.359'); 2015-03-05 00:00:00 > SELECT _FUNC_('HOUR', '2015-03-05T09:32:05.359'); 2015-03-05 09:00:00 > SELECT _FUNC_('MILLISECOND', '2015-03-05T09:32:05.123456'); 2015-03-05 09:32:05.123 """, group = "datetime_funcs", since = "2.3.0") // scalastyle:on line.size.limit case class TruncTimestamp( format: Expression, timestamp: Expression, timeZoneId: Option[String] = None) extends TruncInstant with TimeZoneAwareExpression { override def left: Expression = format override def right: Expression = timestamp override def inputTypes: Seq[AbstractDataType] = Seq(StringType, TimestampType) override def dataType: TimestampType = TimestampType override def prettyName: String = "date_trunc" override val instant = timestamp override def withTimeZone(timeZoneId: String): TimeZoneAwareExpression = copy(timeZoneId = Option(timeZoneId)) def this(format: Expression, timestamp: Expression) = this(format, timestamp, None) override def eval(input: InternalRow): Any = { evalHelper(input, minLevel = MIN_LEVEL_OF_TIMESTAMP_TRUNC) { (t: Any, level: Int) => DateTimeUtils.truncTimestamp(t.asInstanceOf[Long], level, zoneId) } } override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { val zid = ctx.addReferenceObj("zoneId", zoneId, classOf[ZoneId].getName) codeGenHelper(ctx, ev, minLevel = MIN_LEVEL_OF_TIMESTAMP_TRUNC, true) { (date: String, fmt: String) => s"truncTimestamp($date, $fmt, $zid);" } } override protected def withNewChildrenInternal( newLeft: Expression, newRight: Expression): TruncTimestamp = copy(format = newLeft, timestamp = newRight) } /** * Returns the number of days from startDate to endDate. */ @ExpressionDescription( usage = "_FUNC_(endDate, startDate) - Returns the number of days from `startDate` to `endDate`.", examples = """ Examples: > SELECT _FUNC_('2009-07-31', '2009-07-30'); 1 > SELECT _FUNC_('2009-07-30', '2009-07-31'); -1 """, group = "datetime_funcs", since = "1.5.0") case class DateDiff(endDate: Expression, startDate: Expression) extends BinaryExpression with ImplicitCastInputTypes with NullIntolerant { override def left: Expression = endDate override def right: Expression = startDate override def inputTypes: Seq[AbstractDataType] = Seq(DateType, DateType) override def dataType: DataType = IntegerType override def nullSafeEval(end: Any, start: Any): Any = { end.asInstanceOf[Int] - start.asInstanceOf[Int] } override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { defineCodeGen(ctx, ev, (end, start) => s"$end - $start") } override protected def withNewChildrenInternal( newLeft: Expression, newRight: Expression): DateDiff = copy(endDate = newLeft, startDate = newRight) } // scalastyle:off line.size.limit @ExpressionDescription( usage = "_FUNC_(year, month, day) - Create date from year, month and day fields. If the configuration `spark.sql.ansi.enabled` is false, the function returns NULL on invalid inputs. Otherwise, it will throw an error instead.", arguments = """ Arguments: * year - the year to represent, from 1 to 9999 * month - the month-of-year to represent, from 1 (January) to 12 (December) * day - the day-of-month to represent, from 1 to 31 """, examples = """ Examples: > SELECT _FUNC_(2013, 7, 15); 2013-07-15 > SELECT _FUNC_(2019, 7, NULL); NULL """, group = "datetime_funcs", since = "3.0.0") // scalastyle:on line.size.limit case class MakeDate( year: Expression, month: Expression, day: Expression, failOnError: Boolean = SQLConf.get.ansiEnabled) extends TernaryExpression with ImplicitCastInputTypes with NullIntolerant { def this(year: Expression, month: Expression, day: Expression) = this(year, month, day, SQLConf.get.ansiEnabled) override def first: Expression = year override def second: Expression = month override def third: Expression = day override def inputTypes: Seq[AbstractDataType] = Seq(IntegerType, IntegerType, IntegerType) override def dataType: DataType = DateType override def nullable: Boolean = if (failOnError) children.exists(_.nullable) else true override def nullSafeEval(year: Any, month: Any, day: Any): Any = { try { val ld = LocalDate.of(year.asInstanceOf[Int], month.asInstanceOf[Int], day.asInstanceOf[Int]) localDateToDays(ld) } catch { case e: java.time.DateTimeException => if (failOnError) throw QueryExecutionErrors.ansiDateTimeError(e) else null } } override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { val dtu = DateTimeUtils.getClass.getName.stripSuffix("$") val failOnErrorBranch = if (failOnError) { "throw QueryExecutionErrors.ansiDateTimeError(e);" } else { s"${ev.isNull} = true;" } nullSafeCodeGen(ctx, ev, (year, month, day) => { s""" try { ${ev.value} = $dtu.localDateToDays(java.time.LocalDate.of($year, $month, $day)); } catch (java.time.DateTimeException e) { $failOnErrorBranch }""" }) } override def prettyName: String = "make_date" override protected def withNewChildrenInternal( newFirst: Expression, newSecond: Expression, newThird: Expression): MakeDate = copy(year = newFirst, month = newSecond, day = newThird) } // scalastyle:off line.size.limit @ExpressionDescription( usage = "_FUNC_(year, month, day, hour, min, sec) - Create local date-time from year, month, day, hour, min, sec fields. If the configuration `spark.sql.ansi.enabled` is false, the function returns NULL on invalid inputs. Otherwise, it will throw an error instead.", arguments = """ Arguments: * year - the year to represent, from 1 to 9999 * month - the month-of-year to represent, from 1 (January) to 12 (December) * day - the day-of-month to represent, from 1 to 31 * hour - the hour-of-day to represent, from 0 to 23 * min - the minute-of-hour to represent, from 0 to 59 * sec - the second-of-minute and its micro-fraction to represent, from 0 to 60. If the sec argument equals to 60, the seconds field is set to 0 and 1 minute is added to the final timestamp. """, examples = """ Examples: > SELECT _FUNC_(2014, 12, 28, 6, 30, 45.887); 2014-12-28 06:30:45.887 > SELECT _FUNC_(2019, 6, 30, 23, 59, 60); 2019-07-01 00:00:00 > SELECT _FUNC_(null, 7, 22, 15, 30, 0); NULL """, group = "datetime_funcs", since = "3.3.0") // scalastyle:on line.size.limit object MakeTimestampNTZExpressionBuilder extends ExpressionBuilder { override def build(funcName: String, expressions: Seq[Expression]): Expression = { val numArgs = expressions.length if (numArgs == 6) { MakeTimestamp( expressions(0), expressions(1), expressions(2), expressions(3), expressions(4), expressions(5), dataType = TimestampNTZType) } else { throw QueryCompilationErrors.invalidFunctionArgumentNumberError(Seq(6), funcName, numArgs) } } } // scalastyle:off line.size.limit @ExpressionDescription( usage = "_FUNC_(year, month, day, hour, min, sec[, timezone]) - Create the current timestamp with local time zone from year, month, day, hour, min, sec and timezone fields. If the configuration `spark.sql.ansi.enabled` is false, the function returns NULL on invalid inputs. Otherwise, it will throw an error instead.", arguments = """ Arguments: * year - the year to represent, from 1 to 9999 * month - the month-of-year to represent, from 1 (January) to 12 (December) * day - the day-of-month to represent, from 1 to 31 * hour - the hour-of-day to represent, from 0 to 23 * min - the minute-of-hour to represent, from 0 to 59 * sec - the second-of-minute and its micro-fraction to represent, from 0 to 60. If the sec argument equals to 60, the seconds field is set to 0 and 1 minute is added to the final timestamp. * timezone - the time zone identifier. For example, CET, UTC and etc. """, examples = """ Examples: > SELECT _FUNC_(2014, 12, 28, 6, 30, 45.887); 2014-12-28 06:30:45.887 > SELECT _FUNC_(2014, 12, 28, 6, 30, 45.887, 'CET'); 2014-12-27 21:30:45.887 > SELECT _FUNC_(2019, 6, 30, 23, 59, 60); 2019-07-01 00:00:00 > SELECT _FUNC_(null, 7, 22, 15, 30, 0); NULL """, group = "datetime_funcs", since = "3.3.0") // scalastyle:on line.size.limit object MakeTimestampLTZExpressionBuilder extends ExpressionBuilder { override def build(funcName: String, expressions: Seq[Expression]): Expression = { val numArgs = expressions.length if (numArgs == 6 || numArgs == 7) { MakeTimestamp( expressions(0), expressions(1), expressions(2), expressions(3), expressions(4), expressions(5), expressions.drop(6).lastOption, dataType = TimestampType) } else { throw QueryCompilationErrors.invalidFunctionArgumentNumberError(Seq(6), funcName, numArgs) } } } // scalastyle:off line.size.limit @ExpressionDescription( usage = "_FUNC_(year, month, day, hour, min, sec[, timezone]) - Create timestamp from year, month, day, hour, min, sec and timezone fields. The result data type is consistent with the value of configuration `spark.sql.timestampType`. If the configuration `spark.sql.ansi.enabled` is false, the function returns NULL on invalid inputs. Otherwise, it will throw an error instead.", arguments = """ Arguments: * year - the year to represent, from 1 to 9999 * month - the month-of-year to represent, from 1 (January) to 12 (December) * day - the day-of-month to represent, from 1 to 31 * hour - the hour-of-day to represent, from 0 to 23 * min - the minute-of-hour to represent, from 0 to 59 * sec - the second-of-minute and its micro-fraction to represent, from 0 to 60. The value can be either an integer like 13 , or a fraction like 13.123. If the sec argument equals to 60, the seconds field is set to 0 and 1 minute is added to the final timestamp. * timezone - the time zone identifier. For example, CET, UTC and etc. """, examples = """ Examples: > SELECT _FUNC_(2014, 12, 28, 6, 30, 45.887); 2014-12-28 06:30:45.887 > SELECT _FUNC_(2014, 12, 28, 6, 30, 45.887, 'CET'); 2014-12-27 21:30:45.887 > SELECT _FUNC_(2019, 6, 30, 23, 59, 60); 2019-07-01 00:00:00 > SELECT _FUNC_(2019, 6, 30, 23, 59, 1); 2019-06-30 23:59:01 > SELECT _FUNC_(null, 7, 22, 15, 30, 0); NULL """, group = "datetime_funcs", since = "3.0.0") // scalastyle:on line.size.limit case class MakeTimestamp( year: Expression, month: Expression, day: Expression, hour: Expression, min: Expression, sec: Expression, timezone: Option[Expression] = None, timeZoneId: Option[String] = None, failOnError: Boolean = SQLConf.get.ansiEnabled, override val dataType: DataType = SQLConf.get.timestampType) extends SeptenaryExpression with TimeZoneAwareExpression with ImplicitCastInputTypes with NullIntolerant { def this( year: Expression, month: Expression, day: Expression, hour: Expression, min: Expression, sec: Expression) = { this(year, month, day, hour, min, sec, None, None, SQLConf.get.ansiEnabled, SQLConf.get.timestampType) } def this( year: Expression, month: Expression, day: Expression, hour: Expression, min: Expression, sec: Expression, timezone: Expression) = { this(year, month, day, hour, min, sec, Some(timezone), None, SQLConf.get.ansiEnabled, SQLConf.get.timestampType) } override def children: Seq[Expression] = Seq(year, month, day, hour, min, sec) ++ timezone // Accept `sec` as DecimalType to avoid loosing precision of microseconds while converting // them to the fractional part of `sec`. For accepts IntegerType as `sec` and integer can be // casted into decimal safely, we use DecimalType(16, 6) which is wider than DecimalType(10, 0). override def inputTypes: Seq[AbstractDataType] = Seq(IntegerType, IntegerType, IntegerType, IntegerType, IntegerType, DecimalType(16, 6)) ++ timezone.map(_ => StringType) override def nullable: Boolean = if (failOnError) children.exists(_.nullable) else true override def withTimeZone(timeZoneId: String): TimeZoneAwareExpression = copy(timeZoneId = Option(timeZoneId)) private def toMicros( year: Int, month: Int, day: Int, hour: Int, min: Int, secAndMicros: Decimal, zoneId: ZoneId): Any = { try { assert(secAndMicros.scale == 6, s"Seconds fraction must have 6 digits for microseconds but got ${secAndMicros.scale}") val unscaledSecFrac = secAndMicros.toUnscaledLong val totalMicros = unscaledSecFrac.toInt // 8 digits cannot overflow Int val seconds = Math.floorDiv(totalMicros, MICROS_PER_SECOND.toInt) val nanos = Math.floorMod(totalMicros, MICROS_PER_SECOND.toInt) * NANOS_PER_MICROS.toInt val ldt = if (seconds == 60) { if (nanos == 0) { // This case of sec = 60 and nanos = 0 is supported for compatibility with PostgreSQL LocalDateTime.of(year, month, day, hour, min, 0, 0).plusMinutes(1) } else { throw QueryExecutionErrors.invalidFractionOfSecondError() } } else { LocalDateTime.of(year, month, day, hour, min, seconds, nanos) } if (dataType == TimestampType) { instantToMicros(ldt.atZone(zoneId).toInstant) } else { localDateTimeToMicros(ldt) } } catch { case e: SparkDateTimeException if failOnError => throw e case e: DateTimeException if failOnError => throw QueryExecutionErrors.ansiDateTimeError(e) case _: DateTimeException => null } } override def nullSafeEval( year: Any, month: Any, day: Any, hour: Any, min: Any, sec: Any, timezone: Option[Any]): Any = { val zid = timezone .map(tz => DateTimeUtils.getZoneId(tz.asInstanceOf[UTF8String].toString)) .getOrElse(zoneId) toMicros( year.asInstanceOf[Int], month.asInstanceOf[Int], day.asInstanceOf[Int], hour.asInstanceOf[Int], min.asInstanceOf[Int], sec.asInstanceOf[Decimal], zid) } override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { val dtu = DateTimeUtils.getClass.getName.stripSuffix("$") val zid = ctx.addReferenceObj("zoneId", zoneId, classOf[ZoneId].getName) val d = Decimal.getClass.getName.stripSuffix("$") val failOnErrorBranch = if (failOnError) { "throw QueryExecutionErrors.ansiDateTimeError(e);" } else { s"${ev.isNull} = true;" } val failOnSparkErrorBranch = if (failOnError) "throw e;" else s"${ev.isNull} = true;" nullSafeCodeGen(ctx, ev, (year, month, day, hour, min, secAndNanos, timezone) => { val zoneId = timezone.map(tz => s"$dtu.getZoneId(${tz}.toString())").getOrElse(zid) val toMicrosCode = if (dataType == TimestampType) { s""" |java.time.Instant instant = ldt.atZone($zoneId).toInstant(); |${ev.value} = $dtu.instantToMicros(instant); |""".stripMargin } else { s"${ev.value} = $dtu.localDateTimeToMicros(ldt);" } s""" try { org.apache.spark.sql.types.Decimal secFloor = $secAndNanos.floor(); org.apache.spark.sql.types.Decimal nanosPerSec = $d$$.MODULE$$.apply(1000000000L, 10, 0); int nanos = (($secAndNanos.$$minus(secFloor)).$$times(nanosPerSec)).toInt(); int seconds = secFloor.toInt(); java.time.LocalDateTime ldt; if (seconds == 60) { if (nanos == 0) { ldt = java.time.LocalDateTime.of( $year, $month, $day, $hour, $min, 0, 0).plusMinutes(1); } else { throw QueryExecutionErrors.invalidFractionOfSecondError(); } } else { ldt = java.time.LocalDateTime.of($year, $month, $day, $hour, $min, seconds, nanos); } $toMicrosCode } catch (org.apache.spark.SparkDateTimeException e) { $failOnSparkErrorBranch } catch (java.time.DateTimeException e) { $failOnErrorBranch }""" }) } override def nodeName: String = "make_timestamp" // override def children: Seq[Expression] = Seq(year, month, day, hour, min, sec) ++ timezone override protected def withNewChildrenInternal( newChildren: IndexedSeq[Expression]): MakeTimestamp = { val timezoneOpt = if (timezone.isDefined) Some(newChildren(6)) else None copy( year = newChildren(0), month = newChildren(1), day = newChildren(2), hour = newChildren(3), min = newChildren(4), sec = newChildren(5), timezone = timezoneOpt) } } object DatePart { def parseExtractField( extractField: String, source: Expression): Expression = extractField.toUpperCase(Locale.ROOT) match { case "YEAR" | "Y" | "YEARS" | "YR" | "YRS" => Year(source) case "YEAROFWEEK" => YearOfWeek(source) case "QUARTER" | "QTR" => Quarter(source) case "MONTH" | "MON" | "MONS" | "MONTHS" => Month(source) case "WEEK" | "W" | "WEEKS" => WeekOfYear(source) case "DAY" | "D" | "DAYS" => DayOfMonth(source) case "DAYOFWEEK" | "DOW" => DayOfWeek(source) case "DAYOFWEEK_ISO" | "DOW_ISO" => Add(WeekDay(source), Literal(1)) case "DOY" => DayOfYear(source) case "HOUR" | "H" | "HOURS" | "HR" | "HRS" => Hour(source) case "MINUTE" | "M" | "MIN" | "MINS" | "MINUTES" => Minute(source) case "SECOND" | "S" | "SEC" | "SECONDS" | "SECS" => SecondWithFraction(source) case _ => throw QueryCompilationErrors.literalTypeUnsupportedForSourceTypeError(extractField, source) } } // scalastyle:off line.size.limit @ExpressionDescription( usage = "_FUNC_(field, source) - Extracts a part of the date/timestamp or interval source.", arguments = """ Arguments: * field - selects which part of the source should be extracted, and supported string values are as same as the fields of the equivalent function `EXTRACT`. * source - a date/timestamp or interval column from where `field` should be extracted """, examples = """ Examples: > SELECT _FUNC_('YEAR', TIMESTAMP '2019-08-12 01:00:00.123456'); 2019 > SELECT _FUNC_('week', timestamp'2019-08-12 01:00:00.123456'); 33 > SELECT _FUNC_('doy', DATE'2019-08-12'); 224 > SELECT _FUNC_('SECONDS', timestamp'2019-10-01 00:00:01.000001'); 1.000001 > SELECT _FUNC_('days', interval 5 days 3 hours 7 minutes); 5 > SELECT _FUNC_('seconds', interval 5 hours 30 seconds 1 milliseconds 1 microseconds); 30.001001 > SELECT _FUNC_('MONTH', INTERVAL '2021-11' YEAR TO MONTH); 11 > SELECT _FUNC_('MINUTE', INTERVAL '123 23:55:59.002001' DAY TO SECOND); 55 """, note = """ The _FUNC_ function is equivalent to the SQL-standard function `EXTRACT(field FROM source)` """, group = "datetime_funcs", since = "3.0.0") // scalastyle:on line.size.limit object DatePartExpressionBuilder extends ExpressionBuilder { override def build(funcName: String, expressions: Seq[Expression]): Expression = { val numArgs = expressions.length if (numArgs == 2) { val field = expressions(0) val source = expressions(1) Extract(field, source, Extract.createExpr(funcName, field, source)) } else { throw QueryCompilationErrors.invalidFunctionArgumentNumberError(Seq(2), funcName, numArgs) } } } // scalastyle:off line.size.limit @ExpressionDescription( usage = "_FUNC_(field FROM source) - Extracts a part of the date/timestamp or interval source.", arguments = """ Arguments: * field - selects which part of the source should be extracted - Supported string values of `field` for dates and timestamps are(case insensitive): - "YEAR", ("Y", "YEARS", "YR", "YRS") - the year field - "YEAROFWEEK" - the ISO 8601 week-numbering year that the datetime falls in. For example, 2005-01-02 is part of the 53rd week of year 2004, so the result is 2004 - "QUARTER", ("QTR") - the quarter (1 - 4) of the year that the datetime falls in - "MONTH", ("MON", "MONS", "MONTHS") - the month field (1 - 12) - "WEEK", ("W", "WEEKS") - the number of the ISO 8601 week-of-week-based-year. A week is considered to start on a Monday and week 1 is the first week with >3 days. In the ISO week-numbering system, it is possible for early-January dates to be part of the 52nd or 53rd week of the previous year, and for late-December dates to be part of the first week of the next year. For example, 2005-01-02 is part of the 53rd week of year 2004, while 2012-12-31 is part of the first week of 2013 - "DAY", ("D", "DAYS") - the day of the month field (1 - 31) - "DAYOFWEEK",("DOW") - the day of the week for datetime as Sunday(1) to Saturday(7) - "DAYOFWEEK_ISO",("DOW_ISO") - ISO 8601 based day of the week for datetime as Monday(1) to Sunday(7) - "DOY" - the day of the year (1 - 365/366) - "HOUR", ("H", "HOURS", "HR", "HRS") - The hour field (0 - 23) - "MINUTE", ("M", "MIN", "MINS", "MINUTES") - the minutes field (0 - 59) - "SECOND", ("S", "SEC", "SECONDS", "SECS") - the seconds field, including fractional parts - Supported string values of `field` for interval(which consists of `months`, `days`, `microseconds`) are(case insensitive): - "YEAR", ("Y", "YEARS", "YR", "YRS") - the total `months` / 12 - "MONTH", ("MON", "MONS", "MONTHS") - the total `months` % 12 - "DAY", ("D", "DAYS") - the `days` part of interval - "HOUR", ("H", "HOURS", "HR", "HRS") - how many hours the `microseconds` contains - "MINUTE", ("M", "MIN", "MINS", "MINUTES") - how many minutes left after taking hours from `microseconds` - "SECOND", ("S", "SEC", "SECONDS", "SECS") - how many second with fractions left after taking hours and minutes from `microseconds` * source - a date/timestamp or interval column from where `field` should be extracted """, examples = """ Examples: > SELECT _FUNC_(YEAR FROM TIMESTAMP '2019-08-12 01:00:00.123456'); 2019 > SELECT _FUNC_(week FROM timestamp'2019-08-12 01:00:00.123456'); 33 > SELECT _FUNC_(doy FROM DATE'2019-08-12'); 224 > SELECT _FUNC_(SECONDS FROM timestamp'2019-10-01 00:00:01.000001'); 1.000001 > SELECT _FUNC_(days FROM interval 5 days 3 hours 7 minutes); 5 > SELECT _FUNC_(seconds FROM interval 5 hours 30 seconds 1 milliseconds 1 microseconds); 30.001001 > SELECT _FUNC_(MONTH FROM INTERVAL '2021-11' YEAR TO MONTH); 11 > SELECT _FUNC_(MINUTE FROM INTERVAL '123 23:55:59.002001' DAY TO SECOND); 55 """, note = """ The _FUNC_ function is equivalent to `date_part(field, source)`. """, group = "datetime_funcs", since = "3.0.0") // scalastyle:on line.size.limit case class Extract(field: Expression, source: Expression, replacement: Expression) extends RuntimeReplaceable with InheritAnalysisRules { def this(field: Expression, source: Expression) = this(field, source, Extract.createExpr("extract", field, source)) override def parameters: Seq[Expression] = Seq(field, source) override def makeSQLString(childrenSQL: Seq[String]): String = { getTagValue(FunctionRegistry.FUNC_ALIAS) match { case Some("date_part") => s"$prettyName(${childrenSQL.mkString(", ")})" case _ => s"$prettyName(${childrenSQL.mkString(" FROM ")})" } } override protected def withNewChildInternal(newChild: Expression): Expression = { copy(replacement = newChild) } } object Extract { def createExpr(funcName: String, field: Expression, source: Expression): Expression = { // both string and null literals are allowed. if ((field.dataType == StringType || field.dataType == NullType) && field.foldable) { val fieldStr = field.eval().asInstanceOf[UTF8String] if (fieldStr == null) { Literal(null, DoubleType) } else { source.dataType match { case _: AnsiIntervalType | CalendarIntervalType => ExtractIntervalPart.parseExtractField(fieldStr.toString, source) case _ => DatePart.parseExtractField(fieldStr.toString, source) } } } else { throw QueryCompilationErrors.requireLiteralParameter(funcName, "field", "string") } } } /** * Returns the interval from `right` to `left` timestamps. * - When the SQL config `spark.sql.legacy.interval.enabled` is `true`, * it returns `CalendarIntervalType` in which the months` and `day` field is set to 0 and * the `microseconds` field is initialized to the microsecond difference between * the given timestamps. * - Otherwise the expression returns `DayTimeIntervalType` with the difference in microseconds * between given timestamps. */ case class SubtractTimestamps( left: Expression, right: Expression, legacyInterval: Boolean, timeZoneId: Option[String] = None) extends BinaryExpression with TimeZoneAwareExpression with ExpectsInputTypes with NullIntolerant { def this(endTimestamp: Expression, startTimestamp: Expression) = this(endTimestamp, startTimestamp, SQLConf.get.legacyIntervalEnabled) override def inputTypes: Seq[AbstractDataType] = Seq(AnyTimestampType, AnyTimestampType) override def dataType: DataType = if (legacyInterval) CalendarIntervalType else DayTimeIntervalType() override def withTimeZone(timeZoneId: String): TimeZoneAwareExpression = copy(timeZoneId = Option(timeZoneId)) @transient private lazy val zoneIdInEval: ZoneId = zoneIdForType(left.dataType) @transient private lazy val evalFunc: (Long, Long) => Any = if (legacyInterval) { (leftMicros, rightMicros) => new CalendarInterval(0, 0, leftMicros - rightMicros) } else { (leftMicros, rightMicros) => subtractTimestamps(leftMicros, rightMicros, zoneIdInEval) } override def nullSafeEval(leftMicros: Any, rightMicros: Any): Any = { evalFunc(leftMicros.asInstanceOf[Long], rightMicros.asInstanceOf[Long]) } override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = if (legacyInterval) { defineCodeGen(ctx, ev, (end, start) => s"new org.apache.spark.unsafe.types.CalendarInterval(0, 0, $end - $start)") } else { val zid = ctx.addReferenceObj("zoneId", zoneIdInEval, classOf[ZoneId].getName) val dtu = DateTimeUtils.getClass.getName.stripSuffix("$") defineCodeGen(ctx, ev, (l, r) => s"""$dtu.subtractTimestamps($l, $r, $zid)""") } override def toString: String = s"($left - $right)" override def sql: String = s"(${left.sql} - ${right.sql})" override protected def withNewChildrenInternal( newLeft: Expression, newRight: Expression): SubtractTimestamps = copy(left = newLeft, right = newRight) } object SubtractTimestamps { def apply(left: Expression, right: Expression): SubtractTimestamps = { new SubtractTimestamps(left, right) } } /** * Returns the interval from the `left` date (inclusive) to the `right` date (exclusive). * - When the SQL config `spark.sql.legacy.interval.enabled` is `true`, * it returns `CalendarIntervalType` in which the `microseconds` field is set to 0 and * the `months` and `days` fields are initialized to the difference between the given dates. * - Otherwise the expression returns `DayTimeIntervalType` with the difference in days * between the given dates. */ case class SubtractDates( left: Expression, right: Expression, legacyInterval: Boolean) extends BinaryExpression with ImplicitCastInputTypes with NullIntolerant { def this(left: Expression, right: Expression) = this(left, right, SQLConf.get.legacyIntervalEnabled) override def inputTypes: Seq[AbstractDataType] = Seq(DateType, DateType) override def dataType: DataType = { if (legacyInterval) CalendarIntervalType else DayTimeIntervalType(DAY) } @transient private lazy val evalFunc: (Int, Int) => Any = if (legacyInterval) { (leftDays: Int, rightDays: Int) => subtractDates(leftDays, rightDays) } else { (leftDays: Int, rightDays: Int) => Math.multiplyExact(Math.subtractExact(leftDays, rightDays), MICROS_PER_DAY) } override def nullSafeEval(leftDays: Any, rightDays: Any): Any = { evalFunc(leftDays.asInstanceOf[Int], rightDays.asInstanceOf[Int]) } override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = if (legacyInterval) { defineCodeGen(ctx, ev, (leftDays, rightDays) => { val dtu = DateTimeUtils.getClass.getName.stripSuffix("$") s"$dtu.subtractDates($leftDays, $rightDays)" }) } else { val m = classOf[Math].getName defineCodeGen(ctx, ev, (leftDays, rightDays) => s"$m.multiplyExact($m.subtractExact($leftDays, $rightDays), ${MICROS_PER_DAY}L)") } override def toString: String = s"($left - $right)" override def sql: String = s"(${left.sql} - ${right.sql})" override protected def withNewChildrenInternal( newLeft: Expression, newRight: Expression): SubtractDates = copy(left = newLeft, right = newRight) } object SubtractDates { def apply(left: Expression, right: Expression): SubtractDates = new SubtractDates(left, right) } // scalastyle:off line.size.limit @ExpressionDescription( usage = "_FUNC_(sourceTz, targetTz, sourceTs) - Converts the timestamp without time zone `sourceTs` from the `sourceTz` time zone to `targetTz`. ", arguments = """ Arguments: * sourceTz - the time zone for the input timestamp * targetTz - the time zone to which the input timestamp should be converted * sourceTs - a timestamp without time zone """, examples = """ Examples: > SELECT _FUNC_('Europe/Amsterdam', 'America/Los_Angeles', timestamp_ntz'2021-12-06 00:00:00'); 2021-12-05 15:00:00 """, group = "datetime_funcs", since = "3.3.0") // scalastyle:on line.size.limit case class ConvertTimezone( sourceTz: Expression, targetTz: Expression, sourceTs: Expression) extends TernaryExpression with ImplicitCastInputTypes with NullIntolerant { override def first: Expression = sourceTz override def second: Expression = targetTz override def third: Expression = sourceTs override def inputTypes: Seq[AbstractDataType] = Seq(StringType, StringType, TimestampNTZType) override def dataType: DataType = TimestampNTZType override def nullSafeEval(srcTz: Any, tgtTz: Any, micros: Any): Any = { DateTimeUtils.convertTimestampNtzToAnotherTz( srcTz.asInstanceOf[UTF8String].toString, tgtTz.asInstanceOf[UTF8String].toString, micros.asInstanceOf[Long]) } override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { val dtu = DateTimeUtils.getClass.getName.stripSuffix("$") defineCodeGen(ctx, ev, (srcTz, tgtTz, micros) => s"""$dtu.convertTimestampNtzToAnotherTz($srcTz.toString(), $tgtTz.toString(), $micros)""") } override def prettyName: String = "convert_timezone" override protected def withNewChildrenInternal( newFirst: Expression, newSecond: Expression, newThird: Expression): ConvertTimezone = { copy(sourceTz = newFirst, targetTz = newSecond, sourceTs = newThird) } } // scalastyle:off line.size.limit @ExpressionDescription( usage = "_FUNC_(unit, quantity, timestamp) - Adds the specified number of units to the given timestamp.", arguments = """ Arguments: * unit - this indicates the units of datetime that you want to add. Supported string values of `unit` are (case insensitive): - "YEAR" - "QUARTER" - 3 months - "MONTH" - "WEEK" - 7 days - "DAY", "DAYOFYEAR" - "HOUR" - "MINUTE" - "SECOND" - "MILLISECOND" - "MICROSECOND" * quantity - this is the number of units of time that you want to add. * timestamp - this is a timestamp (w/ or w/o timezone) to which you want to add. """, examples = """ Examples: > SELECT _FUNC_('HOUR', 8, timestamp_ntz'2022-02-11 20:30:00'); 2022-02-12 04:30:00 > SELECT _FUNC_('MONTH', 1, timestamp_ltz'2022-01-31 00:00:00'); 2022-02-28 00:00:00 > SELECT _FUNC_(SECOND, -10, date'2022-01-01'); 2021-12-31 23:59:50 > SELECT _FUNC_(YEAR, 10, timestamp'2000-01-01 01:02:03.123456'); 2010-01-01 01:02:03.123456 """, group = "datetime_funcs", since = "3.3.0") // scalastyle:on line.size.limit case class TimestampAdd( unit: Expression, quantity: Expression, timestamp: Expression, timeZoneId: Option[String] = None) extends TernaryExpression with ImplicitCastInputTypes with NullIntolerant with TimeZoneAwareExpression { def this(unit: Expression, quantity: Expression, timestamp: Expression) = this(unit, quantity, timestamp, None) override def first: Expression = unit override def second: Expression = quantity override def third: Expression = timestamp override def inputTypes: Seq[AbstractDataType] = Seq(StringType, IntegerType, AnyTimestampType) override def dataType: DataType = timestamp.dataType override def withTimeZone(timeZoneId: String): TimeZoneAwareExpression = copy(timeZoneId = Option(timeZoneId)) @transient private lazy val zoneIdInEval: ZoneId = zoneIdForType(timestamp.dataType) override def nullSafeEval(u: Any, q: Any, micros: Any): Any = { DateTimeUtils.timestampAdd( u.asInstanceOf[UTF8String].toString, q.asInstanceOf[Int], micros.asInstanceOf[Long], zoneIdInEval) } override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { val dtu = DateTimeUtils.getClass.getName.stripSuffix("$") val zid = ctx.addReferenceObj("zoneId", zoneIdInEval, classOf[ZoneId].getName) defineCodeGen(ctx, ev, (u, q, micros) => s"""$dtu.timestampAdd($u.toString(), $q, $micros, $zid)""") } override def prettyName: String = "timestampadd" override protected def withNewChildrenInternal( newFirst: Expression, newSecond: Expression, newThird: Expression): TimestampAdd = { copy(unit = newFirst, quantity = newSecond, timestamp = newThird) } } // scalastyle:off line.size.limit @ExpressionDescription( usage = "_FUNC_(unit, startTimestamp, endTimestamp) - Gets the difference between the timestamps `endTimestamp` and `startTimestamp` in the specified units by truncating the fraction part.", arguments = """ Arguments: * unit - this indicates the units of the difference between the given timestamps. Supported string values of `unit` are (case insensitive): - "YEAR" - "QUARTER" - 3 months - "MONTH" - "WEEK" - 7 days - "DAY" - "HOUR" - "MINUTE" - "SECOND" - "MILLISECOND" - "MICROSECOND" * startTimestamp - A timestamp which the expression subtracts from `endTimestamp`. * endTimestamp - A timestamp from which the expression subtracts `startTimestamp`. """, examples = """ Examples: > SELECT _FUNC_('HOUR', timestamp_ntz'2022-02-11 20:30:00', timestamp_ntz'2022-02-12 04:30:00'); 8 > SELECT _FUNC_('MONTH', timestamp_ltz'2022-01-01 00:00:00', timestamp_ltz'2022-02-28 00:00:00'); 1 > SELECT _FUNC_(SECOND, date'2022-01-01', timestamp'2021-12-31 23:59:50'); -10 > SELECT _FUNC_(YEAR, timestamp'2000-01-01 01:02:03.123456', timestamp'2010-01-01 01:02:03.123456'); 10 """, group = "datetime_funcs", since = "3.3.0") // scalastyle:on line.size.limit case class TimestampDiff( unit: Expression, startTimestamp: Expression, endTimestamp: Expression, timeZoneId: Option[String] = None) extends TernaryExpression with ImplicitCastInputTypes with NullIntolerant with TimeZoneAwareExpression { def this(unit: Expression, quantity: Expression, timestamp: Expression) = this(unit, quantity, timestamp, None) override def first: Expression = unit override def second: Expression = startTimestamp override def third: Expression = endTimestamp override def inputTypes: Seq[AbstractDataType] = Seq(StringType, TimestampType, TimestampType) override def dataType: DataType = LongType override def withTimeZone(timeZoneId: String): TimeZoneAwareExpression = copy(timeZoneId = Option(timeZoneId)) @transient private lazy val zoneIdInEval: ZoneId = zoneIdForType(endTimestamp.dataType) override def nullSafeEval(u: Any, startMicros: Any, endMicros: Any): Any = { DateTimeUtils.timestampDiff( u.asInstanceOf[UTF8String].toString, startMicros.asInstanceOf[Long], endMicros.asInstanceOf[Long], zoneIdInEval) } override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { val dtu = DateTimeUtils.getClass.getName.stripSuffix("$") val zid = ctx.addReferenceObj("zoneId", zoneIdInEval, classOf[ZoneId].getName) defineCodeGen(ctx, ev, (u, s, e) => s"""$dtu.timestampDiff($u.toString(), $s, $e, $zid)""") } override def prettyName: String = "timestampdiff" override protected def withNewChildrenInternal( newFirst: Expression, newSecond: Expression, newThird: Expression): TimestampDiff = { copy(unit = newFirst, startTimestamp = newSecond, endTimestamp = newThird) } }
gengliangwang/spark
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala
Scala
apache-2.0
120,726
package com.sfxcode.sapphire.core.value import java.util import javafx.collections._ object FXBeanCollections { def observableMap[K, V <: AnyRef]: ObservableMap[K, FXBean[V]] = FXCollections.observableHashMap[K, FXBean[V]]() def observableList[T <: AnyRef]: ObservableList[FXBean[T]] = FXCollections.observableArrayList[FXBean[T]]() def observableHashSet[T <: AnyRef]: ObservableSet[FXBean[T]] = FXCollections.observableSet(new util.HashSet[FXBean[T]]()) }
sfxcode/sapphire-core
src/main/scala/com/sfxcode/sapphire/core/value/FXBeanCollections.scala
Scala
apache-2.0
482
package templemore.sbt /** * @author Chris Turner */ case class Gem(name: String, version: String, source: String) { def command = "%s --version %s --source %s".format(name, version, source) }
skipoleschris/cucumber-sbt-plugin
src/main/scala/templemore/sbt/Gem.scala
Scala
apache-2.0
198
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.graphx.lib import scala.util.Random import com.github.fommil.netlib.BLAS.{getInstance => blas} import org.apache.spark.graphx._ import org.apache.spark.rdd._ /** Implementation of SVD++ algorithm. */ object SVDPlusPlus { /** Configuration parameters for SVDPlusPlus. */ class Conf( var rank: Int, var maxIters: Int, var minVal: Double, var maxVal: Double, var gamma1: Double, var gamma2: Double, var gamma6: Double, var gamma7: Double) extends Serializable /** * Implement SVD++ based on "Factorization Meets the Neighborhood: * a Multifaceted Collaborative Filtering Model", * available at <a href="http://public.research.att.com/~volinsky/netflix/kdd08koren.pdf"> * here</a>. * * The prediction rule is rui = u + bu + bi + qi*(pu + |N(u)|^^-0.5^^*sum(y)), * see the details on page 6. * * @param edges edges for constructing the graph * * @param conf SVDPlusPlus parameters * * @return a graph with vertex attributes containing the trained model */ def run(edges: RDD[Edge[Double]], conf: Conf) : (Graph[(Array[Double], Array[Double], Double, Double), Double], Double) = { require(conf.maxIters > 0, s"Maximum of iterations must be greater than 0," + s" but got ${conf.maxIters}") require(conf.maxVal > conf.minVal, s"MaxVal must be greater than MinVal," + s" but got {maxVal: ${conf.maxVal}, minVal: ${conf.minVal}}") // Generate default vertex attribute def defaultF(rank: Int): (Array[Double], Array[Double], Double, Double) = { // TODO: use a fixed random seed val v1 = Array.fill(rank)(Random.nextDouble()) val v2 = Array.fill(rank)(Random.nextDouble()) (v1, v2, 0.0, 0.0) } // calculate global rating mean edges.cache() val (rs, rc) = edges.map(e => (e.attr, 1L)).fold((0, 0))((a, b) => (a._1 + b._1, a._2 + b._2)) val u = rs / rc // construct graph var g = Graph.fromEdges(edges, defaultF(conf.rank)).cache() materialize(g) edges.unpersist() // Calculate initial bias and norm val t0 = g.aggregateMessages[(Long, Double)]( ctx => { ctx.sendToSrc((1L, ctx.attr)); ctx.sendToDst((1L, ctx.attr)) }, (g1, g2) => (g1._1 + g2._1, g1._2 + g2._2)) val gJoinT0 = g.outerJoinVertices(t0) { (vid: VertexId, vd: (Array[Double], Array[Double], Double, Double), msg: Option[(Long, Double)]) => (vd._1, vd._2, msg.get._2 / msg.get._1 - u, 1.0 / scala.math.sqrt(msg.get._1)) }.cache() materialize(gJoinT0) g.unpersist() g = gJoinT0 def sendMsgTrainF(conf: Conf, u: Double) (ctx: EdgeContext[ (Array[Double], Array[Double], Double, Double), Double, (Array[Double], Array[Double], Double)]) { val (usr, itm) = (ctx.srcAttr, ctx.dstAttr) val (p, q) = (usr._1, itm._1) val rank = p.length var pred = u + usr._3 + itm._3 + blas.ddot(rank, q, 1, usr._2, 1) pred = math.max(pred, conf.minVal) pred = math.min(pred, conf.maxVal) val err = ctx.attr - pred // updateP = (err * q - conf.gamma7 * p) * conf.gamma2 val updateP = q.clone() blas.dscal(rank, err * conf.gamma2, updateP, 1) blas.daxpy(rank, -conf.gamma7 * conf.gamma2, p, 1, updateP, 1) // updateQ = (err * usr._2 - conf.gamma7 * q) * conf.gamma2 val updateQ = usr._2.clone() blas.dscal(rank, err * conf.gamma2, updateQ, 1) blas.daxpy(rank, -conf.gamma7 * conf.gamma2, q, 1, updateQ, 1) // updateY = (err * usr._4 * q - conf.gamma7 * itm._2) * conf.gamma2 val updateY = q.clone() blas.dscal(rank, err * usr._4 * conf.gamma2, updateY, 1) blas.daxpy(rank, -conf.gamma7 * conf.gamma2, itm._2, 1, updateY, 1) ctx.sendToSrc((updateP, updateY, (err - conf.gamma6 * usr._3) * conf.gamma1)) ctx.sendToDst((updateQ, updateY, (err - conf.gamma6 * itm._3) * conf.gamma1)) } for (i <- 0 until conf.maxIters) { // Phase 1, calculate pu + |N(u)|^(-0.5)*sum(y) for user nodes g.cache() val t1 = g.aggregateMessages[Array[Double]]( ctx => ctx.sendToSrc(ctx.dstAttr._2), (g1, g2) => { val out = g1.clone() blas.daxpy(out.length, 1.0, g2, 1, out, 1) out }) val gJoinT1 = g.outerJoinVertices(t1) { (vid: VertexId, vd: (Array[Double], Array[Double], Double, Double), msg: Option[Array[Double]]) => if (msg.isDefined) { val out = vd._1.clone() blas.daxpy(out.length, vd._4, msg.get, 1, out, 1) (vd._1, out, vd._3, vd._4) } else { vd } }.cache() materialize(gJoinT1) g.unpersist() g = gJoinT1 // Phase 2, update p for user nodes and q, y for item nodes g.cache() val t2 = g.aggregateMessages( sendMsgTrainF(conf, u), (g1: (Array[Double], Array[Double], Double), g2: (Array[Double], Array[Double], Double)) => { val out1 = g1._1.clone() blas.daxpy(out1.length, 1.0, g2._1, 1, out1, 1) val out2 = g2._2.clone() blas.daxpy(out2.length, 1.0, g2._2, 1, out2, 1) (out1, out2, g1._3 + g2._3) }) val gJoinT2 = g.outerJoinVertices(t2) { (vid: VertexId, vd: (Array[Double], Array[Double], Double, Double), msg: Option[(Array[Double], Array[Double], Double)]) => { val out1 = vd._1.clone() blas.daxpy(out1.length, 1.0, msg.get._1, 1, out1, 1) val out2 = vd._2.clone() blas.daxpy(out2.length, 1.0, msg.get._2, 1, out2, 1) (out1, out2, vd._3 + msg.get._3, vd._4) } }.cache() materialize(gJoinT2) g.unpersist() g = gJoinT2 } // calculate error on training set def sendMsgTestF(conf: Conf, u: Double) (ctx: EdgeContext[(Array[Double], Array[Double], Double, Double), Double, Double]) { val (usr, itm) = (ctx.srcAttr, ctx.dstAttr) val (p, q) = (usr._1, itm._1) var pred = u + usr._3 + itm._3 + blas.ddot(q.length, q, 1, usr._2, 1) pred = math.max(pred, conf.minVal) pred = math.min(pred, conf.maxVal) val err = (ctx.attr - pred) * (ctx.attr - pred) ctx.sendToDst(err) } g.cache() val t3 = g.aggregateMessages[Double](sendMsgTestF(conf, u), _ + _) val gJoinT3 = g.outerJoinVertices(t3) { (vid: VertexId, vd: (Array[Double], Array[Double], Double, Double), msg: Option[Double]) => if (msg.isDefined) (vd._1, vd._2, vd._3, msg.get) else vd }.cache() materialize(gJoinT3) g.unpersist() g = gJoinT3 // Convert DoubleMatrix to Array[Double]: val newVertices = g.vertices.mapValues(v => (v._1.toArray, v._2.toArray, v._3, v._4)) (Graph(newVertices, g.edges), u) } /** * Forces materialization of a Graph by count()ing its RDDs. */ private def materialize(g: Graph[_, _]): Unit = { g.vertices.count() g.edges.count() } }
pgandhi999/spark
graphx/src/main/scala/org/apache/spark/graphx/lib/SVDPlusPlus.scala
Scala
apache-2.0
7,850
/** * Copied from http://codereview.stackexchange.com/questions/79267/scala-trywith-that-closes-resources-automatically * with adaption for throwing exceptions as required for running tests */ package uk.ac.cdrc.mintsearch.neo4j import scala.util.control.NonFatal import scala.util.{Failure, Success, Try} object WithResource { def apply[C <: AutoCloseable, R](resource: => C)(f: C => R): R = Try(resource).flatMap(resourceInstance => { try { val returnValue = f(resourceInstance) Try(resourceInstance.close()).map(_ => returnValue) } catch { case NonFatal(exceptionInFunction) => try { resourceInstance.close() Failure(exceptionInFunction) } catch { case NonFatal(exceptionInClose) => exceptionInFunction.addSuppressed(exceptionInClose) Failure(exceptionInFunction) } } }) match { case Failure(ex) => throw ex case Success(x) => x } }
spacelis/mint-search
neo4j-plugin/src/main/scala/uk/ac/cdrc/mintsearch/neo4j/WithResource.scala
Scala
apache-2.0
1,006
package org.jetbrains.plugins.scala.codeInspection.targetNameAnnotation import com.intellij.codeInspection._ import com.intellij.psi.PsiElement import org.jetbrains.plugins.scala.codeInspection.ScalaInspectionBundle class MultipleTargetNameAnnotationsInspection extends TargetNameInspectionBase { override protected val findProblemElement: PartialFunction[PsiElement, ProblemElement] = { case TargetNameAnnotationWithOwner(annotation, owner) if owner.annotations(TargetNameAnnotationFQN).sizeIs > 1 => val quickFix = new RemoveAnnotationQuickFix(annotation, owner) ProblemElement(annotation, quickFix, MultipleTargetNameAnnotationsInspection.message) } } object MultipleTargetNameAnnotationsInspection { private[targetNameAnnotation] val message = ScalaInspectionBundle.message("all.but.last.targetname.annotation.ignored") }
JetBrains/intellij-scala
scala/scala-impl/src/org/jetbrains/plugins/scala/codeInspection/targetNameAnnotation/MultipleTargetNameAnnotationsInspection.scala
Scala
apache-2.0
855
package com.github.diegopacheco.sandbox.scripts.scala.actors object ActorFun extends App { import scala.actors._ import scala.actors.Actor._ val deniro = actor { loop{ receive{ case s:String => println("Got String: " + s) case i:Int => println("Got Int: " + i) case _ => println("Have no idea whats is going on...") } } } deniro ! "Roberto" deniro ! 40 deniro !! false }
diegopacheco/scala-playground
scala-playground/src/com/github/diegopacheco/sandbox/scripts/scala/actors/ActorFun.scala
Scala
unlicense
410
package varys.util import scala.collection.mutable.{ArrayBuffer, SynchronizedMap, HashMap} import scala.util.Random private[varys] class SlaveToBpsMap { val writeBlockRanGen = new Random() val idToBpsMap = new HashMap[String, BpsInfo] with SynchronizedMap[String, BpsInfo] def updateNetworkStats(id: String, newBps: Double) = { val bpsInfo = idToBpsMap.getOrElse(id, new BpsInfo()) bpsInfo.update(newBps) idToBpsMap.put(id, bpsInfo) } def adjustBps(id: String, blockSize: Long) = { val bpsInfo = idToBpsMap.getOrElse(id, new BpsInfo()) bpsInfo.moveToTemp(blockSize) idToBpsMap.put(id, bpsInfo) } def getBps(id: String): Double = { val bpsInfo = idToBpsMap.getOrElse(id, new BpsInfo()) bpsInfo.getBps } def getRandomN(numMachines: Int, adjustBytes: Long): ArrayBuffer[String] = this.synchronized { val retVal = new ArrayBuffer[String] val machines = idToBpsMap.keys.toList assert(numMachines <= machines.size) val wasSelected = Array.ofDim[Boolean](machines.size) var machinesToPick = numMachines while (machinesToPick > 0) { machinesToPick -= 1 var toAdd = -1 while (toAdd == -1) { toAdd = writeBlockRanGen.nextInt(machines.size) if (wasSelected(toAdd)) { toAdd = -1 } } retVal += machines(toAdd) adjustBps(machines(toAdd), adjustBytes) wasSelected(toAdd) = true } retVal } def getTopN(numMachines: Int, adjustBytes: Long): ArrayBuffer[String] = this.synchronized { val retVal = new ArrayBuffer[String] val machines = idToBpsMap.keys.toList assert(numMachines <= machines.size) def compByBps(o1: String, o2: String) = { val o1bps = getBps(o1) val o2bps = getBps(o2) if (o1bps < o2bps) true else false } machines.sortWith(compByBps) for (i <- 0 until numMachines) { retVal += machines(i) adjustBps(machines(i), adjustBytes) } retVal } }
frankfzw/varys
core/src/main/scala/varys/util/SlaveToBpsMap.scala
Scala
apache-2.0
1,996
package pep_075 import common.GCD.gcd object Solution { def isPerimiterOfOnlyOneRightAngleTriangle(p: Int): Boolean = { var (t, k, overflow) = (0, 0, false) for (m <- 2 to math.floor(math.sqrt(p / 2)).toInt; if !overflow) { if ((p / 2) % m == 0) { if (m % 2 == 0) { k = m + 1 } else { k = m + 2 } while (!overflow && (k < 2 * m && k <= p / (2 * m))) { if (p / (2 * m) % k == 0 && gcd(k, m) == 1) { t += 1 overflow = t == 2 } k += 2 } } } t == 1 } def solve(max: Int = 1500000) = (1 to max).par.count(isPerimiterOfOnlyOneRightAngleTriangle) / 2 }
filippovitale/pe
pe-solution/src/main/scala/pep_075/Solution.scala
Scala
mit
699
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.scheduler import java.util.concurrent.LinkedBlockingQueue import java.util.concurrent.atomic.{AtomicBoolean, AtomicLong} import com.codahale.metrics.{Gauge, Timer} import org.apache.spark.{SparkConf, SparkContext} import org.apache.spark.internal.Logging import org.apache.spark.internal.config._ import org.apache.spark.util.Utils /** * An asynchronous queue for events. All events posted to this queue will be delivered to the child * listeners in a separate thread. * * Delivery will only begin when the `start()` method is called. The `stop()` method should be * called when no more events need to be delivered. */ private class AsyncEventQueue( val name: String, conf: SparkConf, metrics: LiveListenerBusMetrics, bus: LiveListenerBus) extends SparkListenerBus with Logging { import AsyncEventQueue._ // Cap the capacity of the queue so we get an explicit error (rather than an OOM exception) if // it's perpetually being added to more quickly than it's being drained. private val eventQueue = new LinkedBlockingQueue[SparkListenerEvent]( conf.get(LISTENER_BUS_EVENT_QUEUE_CAPACITY)) // Keep the event count separately, so that waitUntilEmpty() can be implemented properly; // this allows that method to return only when the events in the queue have been fully // processed (instead of just dequeued). private val eventCount = new AtomicLong() /** A counter for dropped events. It will be reset every time we log it. */ private val droppedEventsCounter = new AtomicLong(0L) /** When `droppedEventsCounter` was logged last time in milliseconds. */ @volatile private var lastReportTimestamp = 0L private val logDroppedEvent = new AtomicBoolean(false) private var sc: SparkContext = null private val started = new AtomicBoolean(false) private val stopped = new AtomicBoolean(false) private val droppedEvents = metrics.metricRegistry.counter(s"queue.$name.numDroppedEvents") private val processingTime = metrics.metricRegistry.timer(s"queue.$name.listenerProcessingTime") // Remove the queue size gauge first, in case it was created by a previous incarnation of // this queue that was removed from the listener bus. metrics.metricRegistry.remove(s"queue.$name.size") metrics.metricRegistry.register(s"queue.$name.size", new Gauge[Int] { override def getValue: Int = eventQueue.size() }) private val dispatchThread = new Thread(s"spark-listener-group-$name") { setDaemon(true) override def run(): Unit = Utils.tryOrStopSparkContext(sc) { dispatch() } } private def dispatch(): Unit = LiveListenerBus.withinListenerThread.withValue(true) { var next: SparkListenerEvent = eventQueue.take() while (next != POISON_PILL) { val ctx = processingTime.time() try { super.postToAll(next) } finally { ctx.stop() } eventCount.decrementAndGet() next = eventQueue.take() } eventCount.decrementAndGet() } override protected def getTimer(listener: SparkListenerInterface): Option[Timer] = { metrics.getTimerForListenerClass(listener.getClass.asSubclass(classOf[SparkListenerInterface])) } /** * Start an asynchronous thread to dispatch events to the underlying listeners. * * @param sc Used to stop the SparkContext in case the async dispatcher fails. */ private[scheduler] def start(sc: SparkContext): Unit = { if (started.compareAndSet(false, true)) { this.sc = sc dispatchThread.start() } else { throw new IllegalStateException(s"$name already started!") } } /** * Stop the listener bus. It will wait until the queued events have been processed, but new * events will be dropped. */ private[scheduler] def stop(): Unit = { if (!started.get()) { throw new IllegalStateException(s"Attempted to stop $name that has not yet started!") } if (stopped.compareAndSet(false, true)) { eventCount.incrementAndGet() eventQueue.put(POISON_PILL) } // this thread might be trying to stop itself as part of error handling -- we can't join // in that case. if (Thread.currentThread() != dispatchThread) { dispatchThread.join() } } def post(event: SparkListenerEvent): Unit = { if (stopped.get()) { return } eventCount.incrementAndGet() if (eventQueue.offer(event)) { return } eventCount.decrementAndGet() droppedEvents.inc() droppedEventsCounter.incrementAndGet() if (logDroppedEvent.compareAndSet(false, true)) { // Only log the following message once to avoid duplicated annoying logs. logError(s"Dropping event from queue $name. " + "This likely means one of the listeners is too slow and cannot keep up with " + "the rate at which tasks are being started by the scheduler.") } logTrace(s"Dropping event $event") val droppedCount = droppedEventsCounter.get if (droppedCount > 0) { // Don't log too frequently if (System.currentTimeMillis() - lastReportTimestamp >= 60 * 1000) { // There may be multiple threads trying to decrease droppedEventsCounter. // Use "compareAndSet" to make sure only one thread can win. // And if another thread is increasing droppedEventsCounter, "compareAndSet" will fail and // then that thread will update it. if (droppedEventsCounter.compareAndSet(droppedCount, 0)) { val prevLastReportTimestamp = lastReportTimestamp lastReportTimestamp = System.currentTimeMillis() val previous = new java.util.Date(prevLastReportTimestamp) logWarning(s"Dropped $droppedCount events from $name since $previous.") } } } } /** * For testing only. Wait until there are no more events in the queue. * * @return true if the queue is empty. */ def waitUntilEmpty(deadline: Long): Boolean = { while (eventCount.get() != 0) { if (System.currentTimeMillis > deadline) { return false } Thread.sleep(10) } true } override def removeListenerOnError(listener: SparkListenerInterface): Unit = { // the listener failed in an unrecoverably way, we want to remove it from the entire // LiveListenerBus (potentially stopping a queue if it is empty) bus.removeListener(listener) } } private object AsyncEventQueue { val POISON_PILL = new SparkListenerEvent() { } }
bravo-zhang/spark
core/src/main/scala/org/apache/spark/scheduler/AsyncEventQueue.scala
Scala
apache-2.0
7,271
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package kafka.utils import java.util.Properties import joptsimple.{OptionParser, OptionSpec} import org.junit.jupiter.api.Assertions._ import org.junit.jupiter.api.Test class CommandLineUtilsTest { @Test def testParseEmptyArg(): Unit = { val argArray = Array("my.empty.property=") assertThrows(classOf[java.lang.IllegalArgumentException], () => CommandLineUtils.parseKeyValueArgs(argArray, acceptMissingValue = false)) } @Test def testParseEmptyArgWithNoDelimiter(): Unit = { val argArray = Array("my.empty.property") assertThrows(classOf[java.lang.IllegalArgumentException], () => CommandLineUtils.parseKeyValueArgs(argArray, acceptMissingValue = false)) } @Test def testParseEmptyArgAsValid(): Unit = { val argArray = Array("my.empty.property=", "my.empty.property1") val props = CommandLineUtils.parseKeyValueArgs(argArray) assertEquals(props.getProperty("my.empty.property"), "", "Value of a key with missing value should be an empty string") assertEquals(props.getProperty("my.empty.property1"), "", "Value of a key with missing value with no delimiter should be an empty string") } @Test def testParseSingleArg(): Unit = { val argArray = Array("my.property=value") val props = CommandLineUtils.parseKeyValueArgs(argArray) assertEquals(props.getProperty("my.property"), "value", "Value of a single property should be 'value' ") } @Test def testParseArgs(): Unit = { val argArray = Array("first.property=first","second.property=second") val props = CommandLineUtils.parseKeyValueArgs(argArray) assertEquals(props.getProperty("first.property"), "first", "Value of first property should be 'first'") assertEquals(props.getProperty("second.property"), "second", "Value of second property should be 'second'") } @Test def testParseArgsWithMultipleDelimiters(): Unit = { val argArray = Array("first.property==first", "second.property=second=", "third.property=thi=rd") val props = CommandLineUtils.parseKeyValueArgs(argArray) assertEquals(props.getProperty("first.property"), "=first", "Value of first property should be '=first'") assertEquals(props.getProperty("second.property"), "second=", "Value of second property should be 'second='") assertEquals(props.getProperty("third.property"), "thi=rd", "Value of second property should be 'thi=rd'") } val props = new Properties() val parser = new OptionParser(false) var stringOpt : OptionSpec[String] = _ var intOpt : OptionSpec[java.lang.Integer] = _ var stringOptOptionalArg : OptionSpec[String] = _ var intOptOptionalArg : OptionSpec[java.lang.Integer] = _ var stringOptOptionalArgNoDefault : OptionSpec[String] = _ var intOptOptionalArgNoDefault : OptionSpec[java.lang.Integer] = _ def setUpOptions(): Unit = { stringOpt = parser.accepts("str") .withRequiredArg .ofType(classOf[String]) .defaultsTo("default-string") intOpt = parser.accepts("int") .withRequiredArg() .ofType(classOf[java.lang.Integer]) .defaultsTo(100) stringOptOptionalArg = parser.accepts("str-opt") .withOptionalArg .ofType(classOf[String]) .defaultsTo("default-string-2") intOptOptionalArg = parser.accepts("int-opt") .withOptionalArg .ofType(classOf[java.lang.Integer]) .defaultsTo(200) stringOptOptionalArgNoDefault = parser.accepts("str-opt-nodef") .withOptionalArg .ofType(classOf[String]) intOptOptionalArgNoDefault = parser.accepts("int-opt-nodef") .withOptionalArg .ofType(classOf[java.lang.Integer]) } @Test def testMaybeMergeOptionsOverwriteExisting(): Unit = { setUpOptions() props.put("skey", "existing-string") props.put("ikey", "300") props.put("sokey", "existing-string-2") props.put("iokey", "400") props.put("sondkey", "existing-string-3") props.put("iondkey", "500") val options = parser.parse( "--str", "some-string", "--int", "600", "--str-opt", "some-string-2", "--int-opt", "700", "--str-opt-nodef", "some-string-3", "--int-opt-nodef", "800" ) CommandLineUtils.maybeMergeOptions(props, "skey", options, stringOpt) CommandLineUtils.maybeMergeOptions(props, "ikey", options, intOpt) CommandLineUtils.maybeMergeOptions(props, "sokey", options, stringOptOptionalArg) CommandLineUtils.maybeMergeOptions(props, "iokey", options, intOptOptionalArg) CommandLineUtils.maybeMergeOptions(props, "sondkey", options, stringOptOptionalArgNoDefault) CommandLineUtils.maybeMergeOptions(props, "iondkey", options, intOptOptionalArgNoDefault) assertEquals("some-string", props.get("skey")) assertEquals("600", props.get("ikey")) assertEquals("some-string-2", props.get("sokey")) assertEquals("700", props.get("iokey")) assertEquals("some-string-3", props.get("sondkey")) assertEquals("800", props.get("iondkey")) } @Test def testMaybeMergeOptionsDefaultOverwriteExisting(): Unit = { setUpOptions() props.put("sokey", "existing-string") props.put("iokey", "300") props.put("sondkey", "existing-string-2") props.put("iondkey", "400") val options = parser.parse( "--str-opt", "--int-opt", "--str-opt-nodef", "--int-opt-nodef" ) CommandLineUtils.maybeMergeOptions(props, "sokey", options, stringOptOptionalArg) CommandLineUtils.maybeMergeOptions(props, "iokey", options, intOptOptionalArg) CommandLineUtils.maybeMergeOptions(props, "sondkey", options, stringOptOptionalArgNoDefault) CommandLineUtils.maybeMergeOptions(props, "iondkey", options, intOptOptionalArgNoDefault) assertEquals("default-string-2", props.get("sokey")) assertEquals("200", props.get("iokey")) assertNull(props.get("sondkey")) assertNull(props.get("iondkey")) } @Test def testMaybeMergeOptionsDefaultValueIfNotExist(): Unit = { setUpOptions() val options = parser.parse() CommandLineUtils.maybeMergeOptions(props, "skey", options, stringOpt) CommandLineUtils.maybeMergeOptions(props, "ikey", options, intOpt) CommandLineUtils.maybeMergeOptions(props, "sokey", options, stringOptOptionalArg) CommandLineUtils.maybeMergeOptions(props, "iokey", options, intOptOptionalArg) CommandLineUtils.maybeMergeOptions(props, "sondkey", options, stringOptOptionalArgNoDefault) CommandLineUtils.maybeMergeOptions(props, "iondkey", options, intOptOptionalArgNoDefault) assertEquals("default-string", props.get("skey")) assertEquals("100", props.get("ikey")) assertEquals("default-string-2", props.get("sokey")) assertEquals("200", props.get("iokey")) assertNull(props.get("sondkey")) assertNull(props.get("iondkey")) } @Test def testMaybeMergeOptionsNotOverwriteExisting(): Unit = { setUpOptions() props.put("skey", "existing-string") props.put("ikey", "300") props.put("sokey", "existing-string-2") props.put("iokey", "400") props.put("sondkey", "existing-string-3") props.put("iondkey", "500") val options = parser.parse() CommandLineUtils.maybeMergeOptions(props, "skey", options, stringOpt) CommandLineUtils.maybeMergeOptions(props, "ikey", options, intOpt) CommandLineUtils.maybeMergeOptions(props, "sokey", options, stringOptOptionalArg) CommandLineUtils.maybeMergeOptions(props, "iokey", options, intOptOptionalArg) CommandLineUtils.maybeMergeOptions(props, "sondkey", options, stringOptOptionalArgNoDefault) CommandLineUtils.maybeMergeOptions(props, "iondkey", options, intOptOptionalArgNoDefault) assertEquals("existing-string", props.get("skey")) assertEquals("300", props.get("ikey")) assertEquals("existing-string-2", props.get("sokey")) assertEquals("400", props.get("iokey")) assertEquals("existing-string-3", props.get("sondkey")) assertEquals("500", props.get("iondkey")) } }
guozhangwang/kafka
core/src/test/scala/unit/kafka/utils/CommandLineUtilsTest.scala
Scala
apache-2.0
8,732
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.mllib.tree.impurity import org.apache.spark.annotation.{DeveloperApi, Experimental, Since} /** * :: Experimental :: * Class for calculating [[http://en.wikipedia.org/wiki/Binary_entropy_function entropy]] during * binary classification. */ @Since("1.0.0") @Experimental object Entropy extends Impurity { private[tree] def log2(x: Double) = scala.math.log(x) / scala.math.log(2) /** * :: DeveloperApi :: * information calculation for multiclass classification * @param counts Array[Double] with counts for each label * @param totalCount sum of counts for all labels * @return information value, or 0 if totalCount = 0 */ @Since("1.1.0") @DeveloperApi override def calculate(counts: Array[Double], totalCount: Double): Double = { if (totalCount == 0) { return 0 } val numClasses = counts.length var impurity = 0.0 var classIndex = 0 while (classIndex < numClasses) { val classCount = counts(classIndex) if (classCount != 0) { val freq = classCount / totalCount impurity -= freq * log2(freq) } classIndex += 1 } impurity } /** * :: DeveloperApi :: * variance calculation * @param count number of instances * @param sum sum of labels * @param sumSquares summation of squares of the labels * @return information value, or 0 if count = 0 */ @Since("1.0.0") @DeveloperApi override def calculate(count: Double, sum: Double, sumSquares: Double): Double = throw new UnsupportedOperationException("Entropy.calculate") /** * Get this impurity instance. * This is useful for passing impurity parameters to a Strategy in Java. */ @Since("1.1.0") def instance: this.type = this } /** * Class for updating views of a vector of sufficient statistics, * in order to compute impurity from a sample. * Note: Instances of this class do not hold the data; they operate on views of the data. * @param numClasses Number of classes for label. */ private[tree] class EntropyAggregator(numClasses: Int) extends ImpurityAggregator(numClasses) with Serializable { /** * Update stats for one (node, feature, bin) with the given label. * @param allStats Flat stats array, with stats for this (node, feature, bin) contiguous. * @param offset Start index of stats for this (node, feature, bin). */ def update(allStats: Array[Double], offset: Int, label: Double, instanceWeight: Double): Unit = { if (label >= statsSize) { throw new IllegalArgumentException(s"EntropyAggregator given label $label" + s" but requires label < numClasses (= $statsSize).") } if (label < 0) { throw new IllegalArgumentException(s"EntropyAggregator given label $label" + s"but requires label is non-negative.") } allStats(offset + label.toInt) += instanceWeight } /** * Get an [[ImpurityCalculator]] for a (node, feature, bin). * @param allStats Flat stats array, with stats for this (node, feature, bin) contiguous. * @param offset Start index of stats for this (node, feature, bin). */ def getCalculator(allStats: Array[Double], offset: Int): EntropyCalculator = { new EntropyCalculator(allStats.view(offset, offset + statsSize).toArray) } } /** * Stores statistics for one (node, feature, bin) for calculating impurity. * Unlike [[EntropyAggregator]], this class stores its own data and is for a specific * (node, feature, bin). * @param stats Array of sufficient statistics for a (node, feature, bin). */ private[spark] class EntropyCalculator(stats: Array[Double]) extends ImpurityCalculator(stats) { /** * Make a deep copy of this [[ImpurityCalculator]]. */ def copy: EntropyCalculator = new EntropyCalculator(stats.clone()) /** * Calculate the impurity from the stored sufficient statistics. */ def calculate(): Double = Entropy.calculate(stats, stats.sum) /** * Number of data points accounted for in the sufficient statistics. */ def count: Long = stats.sum.toLong /** * Prediction which should be made based on the sufficient statistics. */ def predict: Double = if (count == 0) { 0 } else { indexOfLargestArrayElement(stats) } /** * Probability of the label given by [[predict]]. */ override def prob(label: Double): Double = { val lbl = label.toInt require(lbl < stats.length, s"EntropyCalculator.prob given invalid label: $lbl (should be < ${stats.length}") require(lbl >= 0, "Entropy does not support negative labels") val cnt = count if (cnt == 0) { 0 } else { stats(lbl) / cnt } } override def toString: String = s"EntropyCalculator(stats = [${stats.mkString(", ")}])" }
practice-vishnoi/dev-spark-1
mllib/src/main/scala/org/apache/spark/mllib/tree/impurity/Entropy.scala
Scala
apache-2.0
5,559
/** * Scala Units * Copyright (C) 2012 Bradley W. Kimmel * * Permission is hereby granted, free of charge, to any person * obtaining a copy of this software and associated documentation * files (the "Software"), to deal in the Software without * restriction, including without limitation the rights to use, * copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the * Software is furnished to do so, subject to the following * conditions: * * The above copyright notice and this permission notice shall be * included in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR * OTHER DEALINGS IN THE SOFTWARE. */ package ca.eandb.units /** * Indicates an attempt to convert to Units of differing dimensions. * @param from The Units that were being converted. * @param to The Units to which conversion was requested. */ class IncompatibleUnitsException(from: Units, to: Units) extends IllegalArgumentException("Incompatible units: %s -> %s".format(from, to)) /** * Indicates a parsing error ocurred while attempting to parse units. * @param units The string being parsed. */ class UnitsParsingException(units: String) extends IllegalArgumentException("Unable to parse units: %s".format(units)) /** * Indicates a parsing error ocurred while attempting to parse a command. * @param cmd The string being parsed. */ class CommandParsingException(cmd: String) extends IllegalArgumentException("Unable to parse command: %s".format(cmd)) /** * Indicates the presence of an undefined unit while attempting to resolve * units. * @param symbol The symbol for the undefined unit. */ class UndefinedUnitsException(symbol: String) extends IllegalArgumentException("Cannot resolve symbol: %s".format(symbol)) /** Indicates that the units could not be resolved. */ class UnitsResolutionException(msg: String) extends IllegalArgumentException(msg)
bwkimmel/scala-units
src/main/scala/ca/eandb/units/Exceptions.scala
Scala
mit
2,395
/* * (c) Copyright 2016 Hewlett Packard Enterprise Development LP * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package cogx.compiler.codegenerator.opencl.generator import cogx.platform.types._ import cogx.platform.types.ElementTypes.Complex32 import cogx.compiler.codegenerator.opencl.hyperkernels._ import cogx.compiler.parser.op._ import cogx.compiler.codegenerator.common.FieldPolicies._ import cogx.api.ImplicitConversions import cogx.compiler.parser.op.ConvolveOp import cogx.cogmath.geometry.Shape import cogx.cogmath.algebra.real.Logarithm import cogx.cogmath.algebra.complex.{ComplexVector, Complex, ComplexMatrix} import cogx.cogmath.fft.FFT2D import cogx.compiler.codegenerator.opencl.cpukernels.ConstantFieldKernel /** Code generation for convolution with a kernel that is dynamically altered at * runtime. There are many cases to cover here, based on: * * 1. What type of field, e.g. Scalar, Vector, Color or Complex, the inputs are. * 2. Whether the filter field (or image field for that matter) is truly * dynamic, or is sourced from a static field (with no recurrence). * 3. Whether the input is large, and may be more efficiently handled by * an FFT-based approach. * 4. The dimensionality of the convolution, 1D, 2D or 3D. * * This was a clone of the DynamicConvolutionGenerator designed to process * just vector and matrix convolutions via the FFT. This is done with a * "private" double-deep real vector or matrix field to hold the intermediate * complex fields. This class will likely soon go away once HyperFields are * introduced along with multi-output kernels. * * @author Dick Carter */ private [cogx] object FFTVectorConvolutionGenerator extends ImplicitConversions with Logarithm { /** Handle FFT-based convolution where either image or filter (or both) are a * VectorField or MatrixField. The inputs and outputs are necessarily * real (since Cog doesn't support complex vector fields natively). However, * a VectorField with 2X the number of tensor elements is used privately by * this generator and the kernels it invokes: the FFT2DVectorHyperKernel and * the FFTMultiplyHyperKernel (see further comments there for the data layout). */ def apply(inputs: Array[VirtualFieldRegister], op: ConvolveOp, resultType: FieldType): AbstractKernel = { val image = inputs(0) val kernel = inputs(1) val imageType = image.fieldType val kernelType = kernel.fieldType val kernelRows = kernelType.rows val kernelColumns = kernelType.columns require(imageType.tensorShape == kernelType.tensorShape || isTensor0Field(imageType) || isTensor0Field(kernelType), "incompatible field types for vector convolution: " + imageType + ", " + kernelType) // The tensor shape of the result (after the inverse FFT) */ val resultTensorShape = if (isTensor0Field(imageType)) kernelType.tensorShape else imageType.tensorShape // Compute size of FFT image. Note that we use 2 * apronSize // to accomodate border fill. For zero fill, 1 * apronSize is // sufficient. For cyclic fill, if the image were a power of 2, technically // no border would be required; however, if any expansion is performed, it // had better be at least 2 * apronSize. For now, since we have a further // complication with the alignment padding, safest is to always add 2 * apronSize. val rowApronSize = kernelRows / 2 val columnApronSize = kernelColumns / 2 val expandedShape = FFTConvolutionUtils.fftShape2D(image.fieldType, kernel.fieldType, op) val fftRows = expandedShape(0) val fftColumns = expandedShape(1) val scaling = 1f / (fftRows * fftColumns) def complexMatrixToKernel(kernelMatrix: ComplexMatrix): AbstractKernel = { val bigKernel = kernelMatrix.expand(fftRows, fftColumns, borderFill = false). shiftCyclic(-rowApronSize, -columnApronSize) val freqKernel = FFT2D.transform(bigKernel) * scaling val resultType = new FieldType(Shape(freqKernel.rows, freqKernel.columns), Shape(), Complex32) val opcode = ConstantComplex2DOp((r,c) => freqKernel(r,c)) new ConstantFieldKernel(resultType, opcode) } /** Converts an array of constant complex filter matrices into the form needed * for a vector FFT. */ def complexMatricesToConstantVectorFieldKernel(kernelMatrices: Array[ComplexMatrix]): AbstractKernel = { val N = kernelMatrices.length val bigKernels = kernelMatrices.map( _.expand(fftRows, fftColumns, borderFill = false). shiftCyclic(-rowApronSize, -columnApronSize)) val freqKernels = bigKernels.map(FFT2D.transform(_) * scaling) val freqKernelShape = Shape(freqKernels(0).rows, freqKernels(0).columns) val resultType = new FieldType(freqKernelShape, Shape(N), Complex32) val opcode = ConstantComplexVector2DOp((r,c) => ComplexVector(N, (i) => freqKernels(i)(r,c)) ) new ConstantFieldKernel(resultType, opcode) } // Prep a dynamic filter: flip, expand, shift, fft, scale def dynamicFilterPrep(scaleFactor: Float) = { val flippedAsNeeded = op.filterOrientation match { case ConvolutionOrientation => kernel case CrossCorrelationOrientation => FlipHyperKernel(Array(kernel), FlipOp, kernel.fieldType).outputs(0) } val expandedType = kernel.fieldType.resize(expandedShape) val expanded = ExpandBorderHyperKernel(flippedAsNeeded, ExpandBorderOp(expandedShape, BorderZero), expandedType) val shiftOp = ShiftOp(Array(-rowApronSize, -columnApronSize), BorderCyclic) val bigKernel = ShiftHyperKernel(Array(expanded.outputs(0)), shiftOp, expandedType) val freqKernelType = toComplex(bigKernel.fieldType) val freqKernel = FFT2DHyperKernel(bigKernel.outputs.toArray, FFT2DOp(scaleFactor), Array(freqKernelType)) freqKernel } // The scale factor can be applied to either the image or the filter. // Since the filter is constant sometimes, we fold the scaling into the filter always. // I experimented with applying the scaling to the image for the // ScalarField image convolve VectorField filter case, but saw no discernable // speed-up over always applying the scaling to the filter, so I left it // Still some work to do to make purely functional and avoid instantiating // the entire kernel matrix val kernelSource = kernel.source match { case k: ConstantFieldKernel => k.opcode match { case ConstantScalar2DOp(f) => // Perform kernel flip if needed for cross-correlation val kernelMatrix = op.filterOrientation match { case ConvolutionOrientation => ComplexMatrix(kernelRows, kernelColumns, (r,c) => Complex(f(r,c), 0f)) case CrossCorrelationOrientation => ComplexMatrix(kernelRows, kernelColumns, (r,c) => Complex(f(kernelRows - 1 - r, kernelColumns - 1 - c), 0f)) } complexMatrixToKernel(kernelMatrix) case ConstantComplex2DOp(f) => // Perform kernel flip if needed for cross-correlation val kernelMatrix = op.filterOrientation match { case ConvolutionOrientation => ComplexMatrix(kernelRows, kernelColumns, (r,c) => f(r,c)) case CrossCorrelationOrientation => ComplexMatrix(kernelRows, kernelColumns, (r,c) => f(kernelRows - 1 - r, kernelColumns - 1 - c)) } complexMatrixToKernel(kernelMatrix) case ConstantVector2DOp(f) => val vectorLength = f(0,0).length // Perform kernel flip if needed for cross-correlation val kernelMatrices = Array.tabulate(vectorLength) { i => op.filterOrientation match { case ConvolutionOrientation => ComplexMatrix(kernelRows, kernelColumns, (r,c) => Complex(f(r,c)(i), 0f)) case CrossCorrelationOrientation => ComplexMatrix(kernelRows, kernelColumns, (r,c) => Complex(f(kernelRows - 1 - r, kernelColumns - 1 - c)(i), 0f)) } } complexMatricesToConstantVectorFieldKernel(kernelMatrices) case ConstantComplexVector2DOp(f) => val vectorLength = f(0,0).length // Perform kernel flip if needed for cross-correlation val kernelMatrices = Array.tabulate(vectorLength) { i => op.filterOrientation match { case ConvolutionOrientation => ComplexMatrix(kernelRows, kernelColumns, (r,c) => f(r,c)(i)) case CrossCorrelationOrientation => ComplexMatrix(kernelRows, kernelColumns, (r,c) => f(kernelRows - 1 - r, kernelColumns - 1 - c)(i)) } } complexMatricesToConstantVectorFieldKernel(kernelMatrices) case ConstantMatrix2DOp(f) => throw new RuntimeException("Convolution of images or filters " + "organized as MatrixFields not supported- use VectorFields.") case x => throw new RuntimeException("Internal compiler error: " + "unexpected constant opcode: " + x) } case x => dynamicFilterPrep(scaling) } // Convert the image to complex if necessary and expand the borders. val expandedImageType = imageType.resize(expandedShape) val expandImage = ExpandBorderHyperKernel(image, ExpandBorderOp(expandedShape, op.borderPolicy), expandedImageType) val freqImageType = toComplex(expandedImageType) val freqImage = FFT2DHyperKernel(expandImage.outputs.toArray, FFT2DOp(), Array(freqImageType)) // Do frequency domain convolution val freqConvolvedType = freqImage.fieldType.resizeTensor(resultTensorShape) val freqConvolved = ComplexBinaryHyperKernel(Array(freqImage.outputs(0), kernelSource.outputs(0)), ComplexMultiplyOp, freqConvolvedType) // Inverse FFT and trimming. val spaceDomainType = if (isComplexField(imageType) || isComplexField(kernelType)) freqConvolvedType else toReal(freqConvolvedType) val spaceDomain = FFT2DHyperKernel(freqConvolved.outputs.toArray, InverseFFT2DOp(), Array(spaceDomainType)) val trimmed = TrimHyperKernel(Array(spaceDomain.outputs(0)), TrimOp(image.fieldType.fieldShape), spaceDomainType.resize(image.fieldType.fieldShape)) trimmed } }
hpe-cct/cct-core
src/main/scala/cogx/compiler/codegenerator/opencl/generator/FFTVectorConvolutionGenerator.scala
Scala
apache-2.0
11,349
/* * Copyright 2012 Eike Kettner * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.eknet.publet.webdav import javax.servlet.http.HttpServletRequest import org.eknet.publet.web._ import util.{PubletWeb, Key} import scala.Some /** * @author Eike Kettner [email protected] * @since 27.09.12 22:54 */ trait WebdavRequestUtil { this: RequestAttr with RequestUrl => protected def req: HttpServletRequest private val isWebdavRequestKey = Key(getClass.getName, { case org.eknet.publet.web.util.Request => { //for windows clients: they probe the server with an OPTIONS request to the root //thus, we should let this go to milton. isDavRequest(applicationUri) || (applicationPath.isRoot && req.getMethod == Method.options.toString) } }) /** * Returns whether the current request is handled by the webdav filter * * @return */ def isDavRequest: Boolean = attr(isWebdavRequestKey).get /** * Returns whether the request is pointing to a resource that * is mounted as webdav resource. * * @param path the request uri path * @return */ private def isDavRequest(path: String): Boolean = { if (!Config("webdav.enabled").map(_.toBoolean).getOrElse(true)) { false } else { getWebdavFilterUrls.exists(url => path.startsWith(url)) } } /** * Returns all configured url prefixes that are handled by the * webdav filter. * * @return */ private def getWebdavFilterUrls = { def recurseFind(num: Int): List[String] = { val key = "webdav.filter."+num PubletWeb.publetSettings(key) match { case Some(filter) => { filter :: recurseFind(num +1) } case None => Nil } } recurseFind(0) } }
eikek/publet
webdav/src/main/scala/org/eknet/publet/webdav/WebdavRequestUtil.scala
Scala
apache-2.0
2,293
package controllers import _root_.util.JsonHelpers import dbservice.DAO.userDAO import models.{UserHelpers, User} import models.UserHelpers.json._ import models.UserHelpers.json.implicits._ import play.api._ import play.api.libs.json._ import play.api.mvc.Results._ import pwguard.global.Globals.ExecutionContexts.Default._ import _root_.util.EitherOptionHelpers.Implicits._ import _root_.util.EitherOptionHelpers._ import _root_.util.JsonHelpers.angularJson import scala.concurrent.Future import scala.util.control.NonFatal /** Controller for reading and saving users. */ object UserController extends BaseController { override val logger = Logger("pwguard.controllers.UserController") // ------------------------------------------------------------------------- // Public methods // ------------------------------------------------------------------------- def save(id: Int) = SecuredJSONAction { authReq => implicit val request = authReq.request val res = for { userOpt <- userDAO.findByID(id) user <- userOpt.toFuture("User not found.") user2 <- decodeUserJSON(Some(user), request.body) saved <- userDAO.save(user2) json <- safeUserJSON(saved) } yield json res map { json => angularJson(Ok, json) } recover { case NonFatal(e) => angularJson(InternalServerError, jsonError("Save of user ID $id failed", e)) } } def create = SecuredJSONAction { authReq => implicit val request = authReq.request val res = for { user <- decodeUserJSON(None, request.body) saved <- userDAO.create(user) json <- safeUserJSON(saved) } yield json res map { json => angularJson(Ok, json) } recover { case NonFatal(e) => angularJson(InternalServerError, jsonError("Failed to create user", e)) } } def getAll = AdminAction { authReq => val res = for { users <- userDAO.getAll json <- Future.sequence(users.map { safeUserJSON _ }) } yield json res map { json => angularJson(Ok, Json.obj("users" -> json)) } recover { case NonFatal(e) => angularJson(InternalServerError, jsonError("Retrieval failed", e)) } } def getAllWithTotalPasswords = AdminAction { authReq => def createJSON(tuples: Seq[(User, Int)]): Future[Seq[JsValue]] = { val users = tuples.map(_._1) val countsByUser = tuples.toMap Future.sequence { users.map { u => safeUserJSON(u) map { js => (u, js) } } } map { jsonTuples => jsonTuples.map { case (u, js) => val total = countsByUser.getOrElse(u, 0) JsonHelpers.addFields(js, ("totalPasswords" -> Json.toJson(total))) } } } val res = for { tuples <- userDAO.getAllWithPasswordCounts usersJS <- createJSON(tuples) } yield usersJS res map { json => angularJson(Ok, Json.obj("users" -> json)) } recover { case NonFatal(e) => { angularJson(InternalServerError, jsonError("Retrieval failed", e)) } } } def delete(id: Int) = SecuredAction { authReq => userDAO.delete(id) map { ok => angularJson(Ok, Json.obj("ok" -> ok)) } recover { case NonFatal(e) => { angularJson(InternalServerError, jsonError(s"Failed to delete user with ID $id", e)) } } } // ------------------------------------------------------------------------- // Private methods // ------------------------------------------------------------------------- private def decodeUserJSON(userOpt: Option[User], json: JsValue): Future[User] = { val emailOpt = (json \\ "email").asOpt[String] val firstNameOpt = (json \\ "firstName").asOpt[String] val lastNameOpt = (json \\ "lastName").asOpt[String] val password1Opt = blankToNone((json \\ "password1").asOpt[String]) val password2Opt = blankToNone((json \\ "password2").asOpt[String]) val adminOpt = (json \\ "admin").asOpt[Boolean] val activeOpt = (json \\ "active").asOpt[Boolean] val pwMatch = Seq(password1Opt, password2Opt).flatten match { case pw1 :: pw2 :: Nil => pw1 == pw2 case Nil => true case _ => false } if (! pwMatch) { Future.failed(new Exception("Passwords don't match.")) } else { def handleExistingUser(u: User): Future[User] = { // Can't overwrite email address on an existing user. val u2 = u.copy(firstName = firstNameOpt.orElse(u.firstName), lastName = lastNameOpt.orElse(u.lastName), active = activeOpt.getOrElse(u.active), admin = adminOpt.getOrElse(u.admin)) password1Opt.map { pw => UserHelpers.encryptLoginPassword(pw) map { epw: String => u2.copy(encryptedPassword = epw) } } .getOrElse(Future.successful(u2)) } def handleNewUser: Future[User] = { // New user. Email and password are required. for { e <- emailOpt.toFuture("Missing email field") pw <- password1Opt.toFuture("Missing password1 field") u <- UserHelpers.createUser( email = e, password = pw, firstName = firstNameOpt, lastName = lastNameOpt, admin = adminOpt.getOrElse(false)) } yield u } userOpt.map { u => handleExistingUser(u) } .getOrElse { handleNewUser } } } }
bmc/pwguard
app/controllers/UserController.scala
Scala
bsd-3-clause
5,695
package uk.co.sprily.dh package util import java.util.concurrent.atomic.AtomicInteger import scala.concurrent.duration._ import org.specs2.ScalaCheck import org.specs2.mutable.Specification import org.specs2.time.NoTimeConversions class ResourcePoolSpec extends Specification with ScalaCheck { "Resource Pool" should { "provide mutual exclusion" in { val singleResource = new Resource() val p = ResourcePool.apply( create = singleResource, isValid = const(true), destroy = unit, maxResources = 1, timeout = 5.seconds) withPool(p) { pool => val ws = for (i <- 0 until 3) yield new Worker(pool, iters=1000) ws.foreach(_.start()) ws.foreach(_.join()) singleResource.numAcqs must === (0) } } "create no more than the max number of resources" in { val numCreated = new AtomicInteger() def create() = { numCreated.incrementAndGet() new Resource() } val p = ResourcePool.apply( create = create, isValid = const(true), destroy = unit, maxResources = 4, timeout = 5.seconds) withPool(p) { pool => val ws = for (i <- 0 until 10) yield new Worker(pool, iters=1000) ws.foreach(_.start()) ws.foreach(_.join()) numCreated.get() must be_<=(4) } } "not lease out a destroyed Resource" in { val numCreated = new AtomicInteger() def create() = { numCreated.incrementAndGet() new Resource() } val p = ResourcePool.apply( create = create, isValid = (r: Resource) => !r.beenAcquired, destroy = unit, maxResources = 4, timeout = 5.seconds) withPool(p) { pool => val ws = for (i <- 0 until 3) yield new Worker(pool, iters=2) ws.foreach(_.start()) ws.foreach(_.join()) } numCreated.get() must === (2 * 3) } } private def withPool[S,T](pool: ResourcePool[S])(body: ResourcePool[S] => T) = { try { body(pool) } finally { pool.close() } } private def const[S,T](t: T): S => T = { s: S => t } private def unit[T]: T => Unit = const[T,Unit]({}) private class Worker(pool: ResourcePool[Resource], iters: Int = 100) extends Thread { override def run() = { for (i <- 0 until iters) { pool.withResource(2.seconds) { r => r.workerAcquired() r.workerReleased() } } } } private class Resource { // deliberately no mutex for accessing these, as that's the job of the pool! @volatile var numAcqs = 0L @volatile var destroyed = false @volatile var beenAcquired = false def workerAcquired() = { numAcqs = numAcqs + 1 ; beenAcquired = true } def workerReleased() = { numAcqs = numAcqs - 1 } } }
sprily/datahopper
util/src/test/scala/resourcePoolSpec.scala
Scala
gpl-3.0
2,868
/* * Copyright (C) 2005, The Beangle Software. * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.beangle.notify trait Notifier: def getType: String def deliver(message: Message, observer: SendingObserver): Unit def deliver(messages: Iterable[Message], observer: SendingObserver): Unit
beangle/notify
core/src/main/scala/org/beangle/notify/Notifier.scala
Scala
gpl-3.0
928
package org.sodatest.runtime.processing.formatting.xhtml /* * Copyright (c) 2011 Belmont Technology Pty Ltd. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import org.sodatest.runtime.data.results.SodaTestResult import java.io.{File, FileWriter, PrintWriter} import org.sodatest.runtime.processing.running.{SodaTestResultWriter, PathUtils} import org.sodatest.runtime.processing.SodaTestContext object XhtmlSodaTestResultWriter extends SodaTestResultWriter { import PathUtils._ @Override def createOutputDirectories(inputRoot: File, files: scala.List[File], outputRoot: File) { val inputDirectories = files.map(_.getParentFile).toSet for (outputDirectory <- inputDirectories.map(getOutputPath(_, inputRoot, outputRoot))) { if (!outputDirectory.exists && !outputDirectory.mkdirs) error("Failed to create output directory " + outputDirectory.getAbsolutePath) } } @Override def writeResultsFiles(filesAndResults: List[(File, SodaTestResult)], inputRoot: File, outputRoot: File)(implicit context: SodaTestContext): Unit = { for ((file, result) <- filesAndResults) { val outputFile: File = getOutputPath(file, inputRoot, outputRoot, ".html") writeResultFile(result, outputFile) } } def writeResultFile(result: SodaTestResult, outputFile: File)(implicit context: SodaTestContext) { context.log.debug("Writing result to " + outputFile.getAbsolutePath) val writer = new PrintWriter(new FileWriter(outputFile)) try { writer.println(new XhtmlFormatter().format(result)) } finally { writer.close } } }
GrahamLea/SodaTest
sodatest-runtime/src/main/scala/org/sodatest/runtime/processing/formatting/xhtml/XhtmlSodaTestResultWriter.scala
Scala
apache-2.0
2,132
package edu.berkeley.nlp.summ.data import scala.collection.JavaConverters._ import edu.berkeley.nlp.futile.fig.basic.IOUtils import edu.berkeley.nlp.futile.util.Counter import edu.berkeley.nlp.futile.util.Logger import edu.berkeley.nlp.futile.LightRunner import edu.berkeley.nlp.futile.EditDistance.EditOp import edu.berkeley.nlp.summ.RougeComputer object SummaryAligner { def alignDocAndSummary(depParseDoc: DepParseDoc, verbose: Boolean): Array[Int] = { alignDocAndSummary(depParseDoc.doc.map(_.getWords.toSeq), depParseDoc.summary.map(_.getWords.toSeq), depParseDoc.name, verbose) } def getEditDistanceWithDeletion(docSent: Seq[String], summSent: Seq[String]) = { edu.berkeley.nlp.futile.EditDistance.editDistance(docSent.map(_.toLowerCase).asJava, summSent.map(_.toLowerCase).asJava, 1.0, 0.0, 1.0, true) } def getEditDistanceOpsWithDeletion(docSent: Seq[String], summSent: Seq[String]): Array[EditOp] = { edu.berkeley.nlp.futile.EditDistance.getEditDistanceOperations(docSent.map(_.toLowerCase).asJava, summSent.map(_.toLowerCase).asJava, 1.0, 0.0, 1.0, true) } /** * Produces a one-to-many alignment between the doc and the summary (i.e. each summary * sentence is aligned to at most one document sentence). Length is the length of the * summary (so summary is the target). */ def alignDocAndSummary(docSentences: Seq[Seq[String]], summary: Seq[Seq[String]], docName: String = "", verbose: Boolean = false): Array[Int] = { val alignments = Array.tabulate(summary.size)(i => -1) var numSentsAligned = 0 for (summSentIdx <- 0 until summary.size) { var someAlignment = false; var bestAlignmentEd = Int.MaxValue var bestAlignmentChoice = -1 for (docSentIdx <- 0 until docSentences.size) { val ed = edu.berkeley.nlp.futile.EditDistance.editDistance(docSentences(docSentIdx).asJava, summary(summSentIdx).asJava, 1.0, 0.0, 1.0, true) if (ed < bestAlignmentEd) { bestAlignmentEd = ed.toInt bestAlignmentChoice = docSentIdx } } if (verbose) { Logger.logss(summSentIdx + ": best alignment choice = " + bestAlignmentChoice + ", ed = " + bestAlignmentEd) } if (bestAlignmentEd < summary(summSentIdx).size * 0.5) { someAlignment = true alignments(summSentIdx) = bestAlignmentChoice if (verbose) { Logger.logss("ALIGNED: " + summSentIdx + " -> " + bestAlignmentChoice) Logger.logss("S1: " + docSentences(bestAlignmentChoice).reduce(_ + " " + _)) Logger.logss("S2: " + summary(summSentIdx).reduce(_ + " " + _)) Logger.logss("ED: " + bestAlignmentEd) } } if (!someAlignment) { // Logger.logss("UNALIGNED: " + summSentIdx + " " + summary(summSentIdx).reduce(_ + " " + _)); } else { numSentsAligned += 1 } } if (verbose && numSentsAligned > 0) { Logger.logss(">1 alignment for " + docName) } alignments } def alignDocAndSummaryOracleRouge(depParseDoc: DepParseDoc, summSizeCutoff: Int): Array[Int] = { alignDocAndSummaryOracleRouge(depParseDoc.doc.map(_.getWords.toSeq), depParseDoc.summary.map(_.getWords.toSeq), summSizeCutoff) } def alignDocAndSummaryOracleRouge(docSentences: Seq[Seq[String]], summary: Seq[Seq[String]], summSizeCutoff: Int): Array[Int] = { val choices = Array.tabulate(summary.size)(i => { if (summary(i).size >= summSizeCutoff) { val summSent = summary(i) var bestRougeSourceIdx = -1 var bestRougeScore = 0 for (j <- 0 until docSentences.size) { var score = RougeComputer.computeRouge2SuffStats(Seq(docSentences(j)), Seq(summSent))._1 if (score > bestRougeScore) { bestRougeSourceIdx = j bestRougeScore = score } } bestRougeSourceIdx } else { -1 } }) choices } def identifySpuriousSummary(firstSentence: Seq[String]) = { val firstWords = firstSentence.slice(0, Math.min(10, firstSentence.size)).map(_.toLowerCase) val firstWordsNoPlurals = firstWords.map(word => if (word.endsWith("s")) word.dropRight(1) else word) firstWordsNoPlurals.contains("letter") || firstWordsNoPlurals.contains("article") || firstWordsNoPlurals.contains("column") || firstWordsNoPlurals.contains("review") || firstWordsNoPlurals.contains("interview") || firstWordsNoPlurals.contains("profile") } def identifySpuriousSentence(sentence: Seq[String]) = { val sentenceLcNoPlurals = sentence.map(_.toLowerCase).map(word => if (word.endsWith("s")) word.dropRight(1) else word) // sentenceLcNoPlurals.contains("photo") sentenceLcNoPlurals.contains("photo") || sentenceLcNoPlurals.contains("photo.") } }
gregdurrett/berkeley-doc-summarizer
src/main/scala/edu/berkeley/nlp/summ/data/SummaryAligner.scala
Scala
gpl-3.0
4,784
package sigmastate.utxo import com.google.common.primitives.Longs import org.ergoplatform._ import org.ergoplatform.dsl.{ContractSpec, SigmaContractSyntax, TestContractSpec} import scorex.crypto.authds.avltree.batch._ import scorex.crypto.authds.{ADKey, ADValue, SerializedAdProof} import scorex.crypto.hash.{Digest32, Blake2b256} import sigmastate.SCollection.SByteArray import sigmastate.Values._ import sigmastate._ import sigmastate.eval.{CSigmaProp, IRContext} import sigmastate.eval._ import sigmastate.eval.Extensions._ import sigmastate.helpers.{ContextEnrichingTestProvingInterpreter, ErgoLikeContextTesting, ErgoLikeTestInterpreter, SigmaTestingCommons} import sigmastate.helpers.TestingHelpers._ import sigmastate.interpreter.Interpreter.ScriptNameProp import sigmastate.interpreter.ProverResult import sigmastate.lang.Terms._ import special.collection.Coll import special.sigma.{AvlTree, Context} class AVLTreeScriptsSpecification extends SigmaTestingCommons with CrossVersionProps { suite => import org.ergoplatform.dsl.AvlTreeHelpers._ lazy val spec = TestContractSpec(suite)(new TestingIRContext) lazy val prover = spec.ProvingParty("Alice") private implicit lazy val IR: IRContext = spec.IR private val reg1 = ErgoBox.nonMandatoryRegisters(0) private val reg2 = ErgoBox.nonMandatoryRegisters(1) def genKey(str: String): ADKey = ADKey @@ Blake2b256("key: " + str) def genValue(str: String): ADValue = ADValue @@ Blake2b256("val: " + str) val inKey = genKey("init key") val inValue = genValue("init value") property("avl tree - removals") { case class AvlTreeContract[Spec <: ContractSpec] (ops: Coll[Coll[Byte]], proof: Coll[Byte], prover: Spec#ProvingParty) (implicit val spec: Spec) extends SigmaContractSyntax { def pkProver = prover.pubKey import syntax._ lazy val contractEnv = Env("pkProver" -> pkProver, "ops" -> ops, "proof" -> proof) lazy val treeProp = proposition("treeProp", { ctx: Context => import ctx._ sigmaProp(SELF.R4[AvlTree].get.remove(ops, proof).get == SELF.R5[AvlTree].get) }, """{ | sigmaProp(SELF.R4[AvlTree].get.remove(ops, proof).get == SELF.R5[AvlTree].get) |} """.stripMargin) lazy val proverSig = proposition("proverSig", { _ => pkProver }, "pkProver") } val entries = (0 to 10).map { i => (genKey(i.toString) -> genValue(i.toString)) } val (tree, avlProver) = createAvlTree(AvlTreeFlags.AllOperationsAllowed, entries:_*) val removalKeys = (0 to 10).map(i => genKey(i.toString)).toArray val removals: Seq[Operation] = removalKeys.map(k => Remove(k)) removals.foreach(o => avlProver.performOneOperation(o)) val proof = avlProver.generateProof().toColl val endDigest = avlProver.digest.toColl val endTree = tree.updateDigest(endDigest) val contract = AvlTreeContract[spec.type](removalKeys.toColl, proof, prover)(spec) import contract.spec._ val mockTx = candidateBlock(0).newTransaction() val s = mockTx .outBox(20, contract.treeProp) .withRegs(reg1 -> tree, reg2 -> endTree) val spendingTx = candidateBlock(50).newTransaction().spending(s) val in1 = spendingTx.inputs(0) val res = in1.runDsl() res shouldBe CSigmaProp(TrivialProp.TrueProp) val pr = prover.prove(in1).get contract.verifier.verify(in1, pr) shouldBe true } property("avl tree - inserts") { case class AvlTreeContract[Spec <: ContractSpec] (ops: Coll[(Coll[Byte], Coll[Byte])], proof: Coll[Byte], prover: Spec#ProvingParty) (implicit val spec: Spec) extends SigmaContractSyntax { def pkProver = prover.pubKey import syntax._ lazy val contractEnv = Env("pkProver" -> pkProver, "ops" -> ops, "proof" -> proof) lazy val treeProp = proposition("treeProp", { ctx: Context => import ctx._ val tree = SELF.R4[AvlTree].get val endTree = SELF.R5[AvlTree].get sigmaProp(tree.insert(ops, proof).get == endTree) }, """{ | val tree = SELF.R4[AvlTree].get | val endTree = SELF.R5[AvlTree].get | sigmaProp(tree.insert(ops, proof).get == endTree) |}""".stripMargin) lazy val proverSig = proposition("proverSig", { _ => pkProver }, "pkProver") } val (tree, avlProver) = createAvlTree(AvlTreeFlags.AllOperationsAllowed) val insertPairs = (0 to 10).map { i => (genKey(i.toString), genValue(i.toString)) }.toArray insertPairs.foreach { case (k, v) => avlProver.performOneOperation(Insert(k, v)) } val proof = avlProver.generateProof().toColl val endDigest = avlProver.digest.toColl val endTree = tree.updateDigest(endDigest) val contract = AvlTreeContract[spec.type](insertPairs.toColl, proof, prover)(spec) import contract.spec._ val mockTx = candidateBlock(0).newTransaction() val s = mockTx .outBox(20, contract.treeProp) .withRegs(reg1 -> tree, reg2 -> endTree) val spendingTx = candidateBlock(50).newTransaction().spending(s) val in1 = spendingTx.inputs(0) val res = in1.runDsl() res shouldBe CSigmaProp(TrivialProp.TrueProp) val pr = prover.prove(in1).get contract.verifier.verify(in1, pr) shouldBe true } property("avl tree lookup") { case class AvlTreeContract[Spec <: ContractSpec] (key: Coll[Byte], proof: Coll[Byte], value: Coll[Byte], prover: Spec#ProvingParty) (implicit val spec: Spec) extends SigmaContractSyntax { def pkProver = prover.pubKey import syntax._ lazy val contractEnv = Env("pkProver" -> pkProver, "key" -> key, "proof" -> proof, "value" -> value) lazy val treeProp = proposition("treeProp", { ctx: Context => import ctx._ val tree = SELF.R4[AvlTree].get sigmaProp(tree.get(key, proof).get == value) }, """{ | val tree = SELF.R4[AvlTree].get | sigmaProp(tree.get(key, proof).get == value) |}""".stripMargin) lazy val proverSig = proposition("proverSig", { _ => pkProver }, "pkProver") } val key = genKey("key") val value = genValue("value") val (_, avlProver) = createAvlTree(AvlTreeFlags.AllOperationsAllowed, key -> value, genKey("key2") -> genValue("value2")) avlProver.performOneOperation(Lookup(genKey("key"))) val digest = avlProver.digest val proof = avlProver.generateProof().toColl val treeData = SigmaDsl.avlTree(new AvlTreeData(digest, AvlTreeFlags.ReadOnly, 32, None)) val contract = AvlTreeContract[spec.type](key.toColl, proof, value.toColl, prover)(spec) import contract.spec._ val mockTx = candidateBlock(0).newTransaction() val s = mockTx .outBox(20, contract.treeProp) .withRegs(reg1 -> treeData) val spendingTx = candidateBlock(50).newTransaction().spending(s) val in1 = spendingTx.inputs(0) val res = in1.runDsl() res shouldBe CSigmaProp(TrivialProp.TrueProp) val pr = prover.prove(in1).get contract.verifier.verify(in1, pr) shouldBe true } property("avl tree - simplest case") { val prover = new ContextEnrichingTestProvingInterpreter val verifier = new ErgoLikeTestInterpreter val pubkey = prover.dlogSecrets.head.publicImage val avlProver = new BatchAVLProver[Digest32, Blake2b256.type](keyLength = 32, None) val key = genKey("hello world") avlProver.performOneOperation(Insert(key, genValue("val"))) avlProver.generateProof() avlProver.performOneOperation(Lookup(key)) val digest = avlProver.digest val proof = avlProver.generateProof() val treeData = new AvlTreeData(digest, AvlTreeFlags.ReadOnly, 32, None) val env = Map("key" -> key, "proof" -> proof) val prop = compile(env, """SELF.R4[AvlTree].get.contains(key, proof)""").asBoolValue.toSigmaProp val propExp = IR.builder.mkMethodCall( ExtractRegisterAs[SAvlTree.type](Self, reg1).get, SAvlTree.containsMethod, IndexedSeq(ByteArrayConstant(key), ByteArrayConstant(proof)) ).asBoolValue.toSigmaProp prop shouldBe propExp val newBox1 = testBox(10, pubkey, 0) val newBoxes = IndexedSeq(newBox1) val spendingTransaction = createTransaction(newBoxes) val s = testBox(20, TrueTree, 0, Seq(), Map(reg1 -> AvlTreeConstant(SigmaDsl.avlTree(treeData)))) val ctx = ErgoLikeContextTesting( currentHeight = 50, lastBlockUtxoRoot = AvlTreeData.dummy, minerPubkey = ErgoLikeContextTesting.dummyPubkey, boxesToSpend = IndexedSeq(s), spendingTransaction, self = s, activatedVersionInTests) val propTree = ErgoTree.fromProposition(ergoTreeHeaderInTests, prop) val pr = prover.prove(propTree, ctx, fakeMessage).get verifier.verify(propTree, ctx, pr, fakeMessage).get._1 shouldBe true } property("avl tree - contains key satisfying condition") { val elements = Seq(123, 22) val treeElements = elements.map(i => Longs.toByteArray(i)).map(s => (ADKey @@ Blake2b256(s), ADValue @@ s)) val avlProver = new BatchAVLProver[Digest32, Blake2b256.type](keyLength = 32, None) treeElements.foreach(s => avlProver.performOneOperation(Insert(s._1, s._2))) avlProver.generateProof() val treeData = new AvlTreeData(avlProver.digest, AvlTreeFlags.ReadOnly, 32, None) val proofId = 0: Byte val elementId = 1: Byte val env = Map("proofId" -> proofId.toLong, "elementId" -> elementId.toLong) val prop = ErgoTree.fromProposition(ergoTreeHeaderInTests, compile(env, """{ | val tree = SELF.R4[AvlTree].get | val proof = getVar[Coll[Byte]](proofId).get | val element = getVar[Long](elementId).get | val elementKey = blake2b256(longToByteArray(element)) | element >= 120L && tree.contains(elementKey, proof) |}""".stripMargin).asBoolValue.toSigmaProp) val recipientProposition = new ContextEnrichingTestProvingInterpreter().dlogSecrets.head.publicImage val selfBox = testBox(20, TrueTree, 0, Seq(), Map(reg1 -> AvlTreeConstant(SigmaDsl.avlTree(treeData)))) val ctx = ErgoLikeContextTesting( currentHeight = 50, lastBlockUtxoRoot = AvlTreeData.dummy, minerPubkey = ErgoLikeContextTesting.dummyPubkey, boxesToSpend = IndexedSeq(selfBox), createTransaction(testBox(1, recipientProposition, 0)), self = selfBox, activatedVersionInTests) avlProver.performOneOperation(Lookup(treeElements.head._1)) val bigLeafProof = avlProver.generateProof() val prover = new ContextEnrichingTestProvingInterpreter() .withContextExtender(proofId, ByteArrayConstant(bigLeafProof)) .withContextExtender(elementId, LongConstant(elements.head)) val proof = prover.prove(prop, ctx, fakeMessage).get val verifier = new ErgoLikeTestInterpreter verifier.verify(prop, ctx, proof, fakeMessage).get._1 shouldBe true // check that verifier returns false for incorrect proofs? val invalidProof = SerializedAdProof @@ Array[Byte](1, 2, 3) val invalidProofResult = new ProverResult( proof = proof.proof, extension = proof.extension.add(proofId -> ByteArrayConstant(invalidProof)) ) verifier.verify(prop, ctx, invalidProofResult, fakeMessage).get._1 shouldBe false avlProver.performOneOperation(Lookup(treeElements.last._1)) val smallLeafTreeProof = avlProver.generateProof() val smallProver = new ContextEnrichingTestProvingInterpreter() .withContextExtender(proofId, ByteArrayConstant(smallLeafTreeProof)) .withContextExtender(elementId, LongConstant(elements.head)) smallProver.prove(prop, ctx, fakeMessage).isSuccess shouldBe false } property("avl tree - prover provides proof") { val avlProver = new BatchAVLProver[Digest32, Blake2b256.type](keyLength = 32, None) val key = genKey("hello world") avlProver.performOneOperation(Insert(key, genValue("val"))) avlProver.generateProof() avlProver.performOneOperation(Lookup(key)) val digest = avlProver.digest val proof = avlProver.generateProof() val treeData = SigmaDsl.avlTree(new AvlTreeData(digest, AvlTreeFlags.ReadOnly, 32, None)) val proofId = 31: Byte val prover = new ContextEnrichingTestProvingInterpreter().withContextExtender(proofId, ByteArrayConstant(proof)) val verifier = new ErgoLikeTestInterpreter val pubkey = prover.dlogSecrets.head.publicImage val env = Map("proofId" -> proofId.toLong) val prop = compile(env, """{ | val tree = SELF.R4[AvlTree].get | val key = SELF.R5[Coll[Byte]].get | val proof = getVar[Coll[Byte]](proofId).get | tree.contains(key, proof) |}""".stripMargin).asBoolValue.toSigmaProp val propTree = ErgoTree.fromProposition(ergoTreeHeaderInTests, prop) val propExp = IR.builder.mkMethodCall( ExtractRegisterAs[SAvlTree.type](Self, reg1).get, SAvlTree.containsMethod, IndexedSeq(ExtractRegisterAs[SByteArray](Self, reg2).get, GetVarByteArray(proofId).get) ).asBoolValue.toSigmaProp prop shouldBe propExp val newBox1 = testBox(10, pubkey, 0) val newBoxes = IndexedSeq(newBox1) val spendingTransaction = createTransaction(newBoxes) val s = testBox(20, TrueTree, 0, Seq(), Map(reg1 -> AvlTreeConstant(treeData), reg2 -> ByteArrayConstant(key))) val ctx = ErgoLikeContextTesting( currentHeight = 50, lastBlockUtxoRoot = AvlTreeData.dummy, minerPubkey = ErgoLikeContextTesting.dummyPubkey, boxesToSpend = IndexedSeq(s), spendingTransaction, self = s, activatedVersionInTests) val pr = prover.prove(propTree, ctx, fakeMessage).get val ctxv = ctx.withExtension(pr.extension) verifier.verify(propTree, ctxv, pr, fakeMessage).get._1 shouldBe true } property("avl tree - getMany") { val avlProver = new BatchAVLProver[Digest32, Blake2b256.type](keyLength = 32, None) (1 to 10).foreach {i => val key = genKey(s"$i") avlProver.performOneOperation(Insert(key, genValue(s"${i + 100}"))) } avlProver.generateProof() (3 to 5).foreach { i => val key = genKey(s"$i") avlProver.performOneOperation(Lookup(key)) } val digest = avlProver.digest val proof = avlProver.generateProof() val proofId = 31: Byte val prover = new ContextEnrichingTestProvingInterpreter().withContextExtender(proofId, ByteArrayConstant(proof)) val verifier = new ErgoLikeTestInterpreter val pubkey = prover.dlogSecrets.head.publicImage val treeData = SigmaDsl.avlTree(new AvlTreeData(digest, AvlTreeFlags.ReadOnly, 32, None)) val env = Map("proofId" -> proofId.toLong, "keys" -> ConcreteCollection.fromItems(genKey("3"), genKey("4"), genKey("5"))) val prop = compile(env, """{ | val tree = SELF.R4[AvlTree].get | val proof = getVar[Coll[Byte]](proofId).get | sigmaProp(tree.getMany(keys, proof).forall( { (o: Option[Coll[Byte]]) => o.isDefined })) |}""".stripMargin).asBoolValue.toSigmaProp val propTree = ErgoTree.fromProposition(ergoTreeHeaderInTests, prop) val newBox1 = testBox(10, pubkey, 0) val newBoxes = IndexedSeq(newBox1) val spendingTransaction = ErgoLikeTransaction(IndexedSeq(), newBoxes) val s = testBox(20, TrueTree, 0, Seq(), Map(reg1 -> AvlTreeConstant(treeData))) val ctx = ErgoLikeContextTesting( currentHeight = 50, lastBlockUtxoRoot = AvlTreeData.dummy, minerPubkey = ErgoLikeContextTesting.dummyPubkey, boxesToSpend = IndexedSeq(s), spendingTransaction, self = s, activatedVersionInTests) val pr = prover.prove(env + (ScriptNameProp -> "prove"), propTree, ctx, fakeMessage).get val ctxv = ctx.withExtension(pr.extension) verifier.verify(env + (ScriptNameProp -> "verify"), propTree, ctxv, pr, fakeMessage).get._1 shouldBe true } }
ScorexFoundation/sigmastate-interpreter
sigmastate/src/test/scala/sigmastate/utxo/AVLTreeScriptsSpecification.scala
Scala
mit
15,832
package com.realtimecep.scalatest.functionaltesting import org.openqa.selenium.WebDriver import org.openqa.selenium.htmlunit.HtmlUnitDriver import org.scalatest.selenium.WebBrowser import org.scalatest.{FlatSpec, Matchers} /** * Created on 9/23/14. * * @author <a href="[email protected]">Ted Won</a> * @version 1.0 */ class StormAdminUISpec extends FlatSpec with Matchers with WebBrowser { implicit val webDriver: WebDriver = new HtmlUnitDriver val host = "http://10.15.11.146:8088/" "The blog app home page" should "have the correct title" in { go to (host + "index.html") pageTitle should be("Storm UI") } }
jhshin9/helloworld-project
commons/src/test/scala/com/realtimecep/scalatest/functionaltesting/StormAdminUISpec.scala
Scala
apache-2.0
643
/* * Copyright 2014 Commonwealth Computer Research, Inc. * * Licensed under the Apache License, Version 2.0 (the License); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an AS IS BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.locationtech.geomesa.core.transform import org.geotools.process.vector.TransformProcess import org.locationtech.geomesa.feature.FeatureEncoding.FeatureEncoding import org.locationtech.geomesa.feature._ import org.opengis.feature.simple.{SimpleFeature, SimpleFeatureType} import scala.collection.JavaConversions._ object TransformCreator { /** * Create a function to transform a feature from one sft to another...this will * result in a new feature instance being created and encoded. * * The function returned may NOT be ThreadSafe to due the fact it contains a * SimpleFeatureEncoder instance which is not thread safe to optimize performance */ def createTransform(targetFeatureType: SimpleFeatureType, featureEncoding: FeatureEncoding, transformString: String): (SimpleFeature => Array[Byte]) = { val encoder = SimpleFeatureEncoder(targetFeatureType, featureEncoding) val defs = TransformProcess.toDefinition(transformString) featureEncoding match { case FeatureEncoding.KRYO | FeatureEncoding.AVRO => (feature: SimpleFeature) => { val newSf = new ScalaSimpleFeature(feature.getIdentifier.getID, targetFeatureType) defs.foreach { t => newSf.setAttribute(t.name, t.expression.evaluate(feature)) } encoder.encode(newSf) } case FeatureEncoding.TEXT => val builder = ScalaSimpleFeatureFactory.featureBuilder(targetFeatureType) (feature: SimpleFeature) => { defs.foreach { t => builder.set(t.name, t.expression.evaluate(feature)) } val newFeature = builder.buildFeature(feature.getID) encoder.encode(newFeature) } } } }
kevinwheeler/geomesa
geomesa-core/src/main/scala/org/locationtech/geomesa/core/transform/TransformCreator.scala
Scala
apache-2.0
2,344
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.bwsw.sj.engine.core.engine import com.typesafe.scalalogging.Logger /** * Provides methods for a basic execution logic of task engine * that has a checkpoint based on the number of messages (envelopes) [[com.bwsw.sj.common.utils.EngineLiterals.everyNthMode]] */ trait NumericalCheckpointTaskEngine { private val logger = Logger(this.getClass) private var countOfEnvelopes = 0 protected val checkpointInterval: Long private val isNotOnlyCustomCheckpoint = checkpointInterval > 0 def isItTimeToCheckpoint(isCheckpointInitiated: Boolean): Boolean = { isNotOnlyCustomCheckpoint && countOfEnvelopes == checkpointInterval || isCheckpointInitiated } def afterReceivingEnvelope(): Unit = { increaseCounter() } private def increaseCounter(): Unit = { countOfEnvelopes += 1 logger.debug(s"Increase count of envelopes to: $countOfEnvelopes.") } def prepareForNextCheckpoint(): Unit = { resetCounter() } private def resetCounter(): Unit = { logger.debug(s"Reset a counter of envelopes to 0.") countOfEnvelopes = 0 } }
bwsw/sj-platform
core/sj-engine-core/src/main/scala/com/bwsw/sj/engine/core/engine/NumericalCheckpointTaskEngine.scala
Scala
apache-2.0
1,895
// Ensure we don't get "the type test for argType cannot be checked at runtime" warning class Symbol { type ThisName } type TermSymbol = Symbol { type ThisName = String } type TermSymbolOrNull = TermSymbol | Null def testSimple = val x: Symbol | Null = ??? x match case key: Symbol => 1 case null => 0 def testWithRefinedType = val x: TermSymbol | Null = ??? x match case key: TermSymbol => 1 case null => 0 def testWithAlias = val x: TermSymbolOrNull = ??? x match case key: TermSymbol => 1 case null => 0
dotty-staging/dotty
tests/explicit-nulls/pos-patmat/match-pat.scala
Scala
apache-2.0
551
package io.atal.butterfly.action import io.atal.butterfly.{Editor, Clipboard, Cursor} import org.scalatest._ import Matchers._ /** AddCursor action unit test */ class AddCursorTest extends FlatSpec { "The AddCursor action" should "add a cursor to the editor" in { val action = new AddCursor((1, 1)) val editor = new Editor() val clipboard = new Clipboard() action.execute(editor, clipboard) editor.cursors should have length 2 editor.cursors should contain (new Cursor((1, 1))) } }
Matthieu-Riou/Butterfly
src/test/scala/io/atal/butterfly/action/AddCursorTest.scala
Scala
mit
517
package bad.robot.radiate import java.io.File import org.apache.log4j._ object Logging { def initialise() { val root = Logger.getRootLogger root.addAppender(createFileAppender) root.addAppender(createConsoleAppender) } private def createFileAppender: FileAppender = { val file = new RollingFileAppender file.setName("File Appender") file.setMaxBackupIndex(1) file.setMaxFileSize("2MB") file.setFile(System.getProperty("user.home") + File.separator + ".radiate" + File.separator + "radiate.log") file.setLayout(new PatternLayout("%d %-5p [%c{1}] %m%n")) file.setThreshold(Level.INFO) file.setAppend(true) file.activateOptions() file } private def createConsoleAppender: ConsoleAppender = { val console = new ConsoleAppender console.setLayout(new PatternLayout("%d [%p|%c{1}] %m%n")) console.setThreshold(Level.INFO) console.activateOptions() console } }
tobyweston/radiate
src/main/scala/bad/robot/radiate/Logging.scala
Scala
apache-2.0
943
package com.seanshubin.concurrency.samples.actor import com.seanshubin.concurrency.samples.domain.{Cleanup, Event} class CleanupActorSystems(eventActorSystem: ActorSystemContract[Event]) extends Cleanup { override def cleanup(): Unit = { eventActorSystem.terminate() } }
SeanShubin/concurrency-samples
actor/src/main/scala/com/seanshubin/concurrency/samples/actor/CleanupActorSystems.scala
Scala
unlicense
281
package slaq.meta import slaq.ResultSetInvoker import slaq.ql.TypeMapperDelegate /** * A wrapper for a row in the ResultSet returned by DatabaseMetaData.getUDTs(). */ case class MUDT( typeName: MQName, className: String, sqlType: Int, remarks: String, baseType: Option[Short] ) { def sqlTypeName = TypeMapperDelegate.typeNames.get(sqlType) def getAttributes(attributeNamePattern: String = "%") = MAttribute.getAttributes(typeName, attributeNamePattern) } object MUDT { def getUDTs(typeNamePattern: MQName, types: Option[Seq[Int]] = None) = ResultSetInvoker[MUDT]( _.metaData.getUDTs(typeNamePattern.catalog_?, typeNamePattern.schema_?, typeNamePattern.name, types.map(_.toArray) getOrElse (null)) ) { r => MUDT(MQName.from(r), r<<, r<<, r<<, r<<) } }
godenji/slaq
src/main/scala/slaq/scalaquery/meta/MUDT.scala
Scala
bsd-2-clause
808
package org.higherstate.jameson.validators import org.higherstate.jameson.Path import org.higherstate.jameson.failures.{ValidationFailure, InvalidValueFailure} trait LengthValidator extends Validator { def apply(value:Any, path:Path) = value match { case n:Seq[_] => validate(n.length, path) case n:String => validate(n.length, path) case v => Some(InvalidValueFailure(this, "Value does not have a length", v, path)) } protected def validate(value:Int, path:Path):Option[ValidationFailure] } case class MinLength(compare:Int) extends LengthValidator { protected def validate(value:Int, path:Path):Option[ValidationFailure] = if (value < compare) Some(InvalidValueFailure(this, s"Expected value length to be greater than $compare", value, path)) else None def schema = Map("minItems" -> compare) } case class MaxLength(compare:Int) extends LengthValidator { protected def validate(value:Int, path:Path):Option[ValidationFailure] = if (value > compare) Some(InvalidValueFailure(this, s"Expected value length to be less than $compare", value, path)) else None def schema = Map("maxItems" -> compare) }
HigherState/jameson
src/main/scala/org/higherstate/jameson/validators/LengthValidator.scala
Scala
apache-2.0
1,169
package controllers import chess.format.Forsyth import chess.format.Forsyth.SituationPlus import chess.Situation import chess.variant.{ Variant, Standard, FromPosition } import play.api.i18n.Messages.Implicits._ import play.api.libs.json.Json import play.api.mvc._ import play.api.Play.current import scala.concurrent.duration._ import lila.app._ import lila.game.{ GameRepo, Pov } import lila.round.Forecast.{ forecastStepJsonFormat, forecastJsonWriter } import views._ object UserAnalysis extends LilaController with TheftPrevention { def index = load("", Standard) def parse(arg: String) = arg.split("/", 2) match { case Array(key) => load("", Variant orDefault key) case Array(key, fen) => Variant.byKey get key match { case Some(variant) => load(fen, variant) case _ if fen == Standard.initialFen => load(arg, Standard) case _ => load(arg, FromPosition) } case _ => load("", Standard) } def load(urlFen: String, variant: Variant) = Open { implicit ctx => val fenStr = Some(urlFen.trim.replace("_", " ")).filter(_.nonEmpty) orElse get("fen") val decodedFen = fenStr.map { java.net.URLDecoder.decode(_, "UTF-8").trim } val pov = makePov(decodedFen, variant) val orientation = get("color").flatMap(chess.Color.apply) | pov.color Env.api.roundApi.userAnalysisJson(pov, ctx.pref, decodedFen, orientation, owner = false) map { data => Ok(html.board.userAnalysis(data, pov)) } } private lazy val keyboardI18nKeys = { val trans = Env.i18n.keys Seq( trans.keyboardShortcuts, trans.keyMoveBackwardOrForward, trans.keyGoToStartOrEnd, trans.keyShowOrHideComments, trans.keyEnterOrExitVariation, trans.youCanAlsoScrollOverTheBoardToMoveInTheGame, trans.pressShiftPlusClickOrRightClickToDrawCirclesAndArrowsOnTheBoard) } def keyboardI18n = Action.async { implicit req => JsonOk(fuccess(Env.i18n.jsDump.keysToObject(keyboardI18nKeys, lang))) } private[controllers] def makePov(fen: Option[String], variant: Variant): Pov = makePov { fen.filter(_.nonEmpty).flatMap { Forsyth.<<<@(variant, _) } | SituationPlus(Situation(variant), 1) } private[controllers] def makePov(from: SituationPlus): Pov = Pov( lila.game.Game.make( game = chess.Game( board = from.situation.board, player = from.situation.color, turns = from.turns), whitePlayer = lila.game.Player.white, blackPlayer = lila.game.Player.black, mode = chess.Mode.Casual, variant = from.situation.board.variant, source = lila.game.Source.Api, pgnImport = None).copy(id = "synthetic"), from.situation.color) def game(id: String, color: String) = Open { implicit ctx => OptionFuResult(GameRepo game id) { game => GameRepo initialFen game.id flatMap { initialFen => val pov = Pov(game, chess.Color(color == "white")) Env.api.roundApi.userAnalysisJson(pov, ctx.pref, initialFen, pov.color, owner = isMyPov(pov)) map { data => Ok(html.board.userAnalysis(data, pov)) } } map NoCache } } // XHR only def pgn = OpenBody { implicit ctx => implicit val req = ctx.body Env.importer.forms.importForm.bindFromRequest.fold( failure => BadRequest(errorsAsJson(failure)).fuccess, data => Env.importer.importer.inMemory(data).fold( err => BadRequest(jsonError(err.shows)).fuccess, { case (game, fen) => val pov = Pov(game, chess.White) Env.api.roundApi.userAnalysisJson(pov, ctx.pref, initialFen = fen.map(_.value), pov.color, owner = false) map { data => Ok(data) } }) ).map(_ as JSON) } def forecasts(fullId: String) = AuthBody(BodyParsers.parse.json) { implicit ctx => me => import lila.round.Forecast OptionFuResult(GameRepo pov fullId) { pov => if (isTheft(pov)) fuccess(theftResponse) else ctx.body.body.validate[Forecast.Steps].fold( err => BadRequest(err.toString).fuccess, forecasts => Env.round.forecastApi.save(pov, forecasts) >> Env.round.forecastApi.loadForDisplay(pov) map { case None => Ok(Json.obj("none" -> true)) case Some(fc) => Ok(Json toJson fc) as JSON } recover { case Forecast.OutOfSync => Ok(Json.obj("reload" -> true)) }) } } def forecastsOnMyTurn(fullId: String, uci: String) = AuthBody(BodyParsers.parse.json) { implicit ctx => me => import lila.round.Forecast OptionFuResult(GameRepo pov fullId) { pov => if (isTheft(pov)) fuccess(theftResponse) else { ctx.body.body.validate[Forecast.Steps].fold( err => BadRequest(err.toString).fuccess, forecasts => { def wait = 50 + (Forecast maxPlies forecasts min 10) * 50 Env.round.forecastApi.playAndSave(pov, uci, forecasts) >> Env.current.scheduler.after(wait.millis) { Ok(Json.obj("reload" -> true)) } } ) } } } }
clarkerubber/lila
app/controllers/UserAnalysis.scala
Scala
agpl-3.0
5,196
abstract class foo(a: Int, b: Int) extends scala.DelayedInit { def delayedInit(x: => Unit) { println("delayed init"); x } } object Test { def main(args: Array[String]) { new foo(1, 2) { println("new foo(1, 2)") } new foo(b = 2, a = 1) { println("new foo(b = 2, a = 1)") } } }
felixmulder/scala
test/files/run/t6481.scala
Scala
bsd-3-clause
301
import language.higherKinds trait Travers[T[_]] extends Functor[T] with Foldable[T] { def traverse[F[_]:Applic,A,B](t: T[A])(f: A => F[B]): F[T[B]] override def map[A,B](functor: T[A])(g: A => B): T[B] = traverse(functor)((a:A) => Applic.ApplicId.pure(g(a)))(Applic.ApplicId).id override def foldMap[A,B:Monoid](foldable: T[A])(f: A => B): B = { case class Const[B,A](unConst: B) type ConstB[X]=Const[B,X] val monoid = implicitly[Monoid[B]] implicit val constApplic = new Applic[ConstB] { override def pure[A](a: A): Const[B,A] = Const(monoid.mzero) override def ap[A,C](b: Const[B,A])(f: Const[B,A => C]): Const[B,C] = Const(monoid.madd(b.unConst,f.unConst)) } traverse[ConstB,A,B](foldable)(a => Const[B,B](f(a))).unConst } } object Travers { def traverse[T[_]:Travers,F[_]:Applic,A,B](t: T[A])(f: A => F[B]): F[T[B]] = implicitly[Travers[T]].traverse(t)(f) def sequence[T[_]:Travers,F[_]:Applic,A](t: T[F[A]]): F[T[A]] = implicitly[Travers[T]].traverse(t)(identity) implicit object TraversId extends Travers[Id] { override def traverse[F[_],A,B](t: Id[A])(f: A => F[B])(implicit applic: Applic[F]): F[Id[B]] = applic.ap(f(t.id))(applic.pure( (b:B) => Id(b) )) } implicit object TraversOption extends Travers[Option] { override def traverse[F[_],A,B](t: Option[A])(f: A => F[B])(implicit applic: Applic[F]): F[Option[B]] = t match { case None => applic.pure(None) case Some(x) => applic.ap(f(x))(applic.pure( (b:B) => Some(b) )) } } implicit object TraversList extends Travers[List] { override def traverse[F[_],A,B](t: List[A])(f: A => F[B])(implicit applic: Applic[F]): F[List[B]] = t match { case Nil => applic.pure(Nil) case x :: xs => applic.ap(traverse(xs)(f))(applic.ap(f(x))(applic.pure( (b:B) => (bs:List[B]) => b::bs ))) } } implicit object TraversTree extends Travers[Tree] { override def traverse[F[_],A,B](t: Tree[A])(f: A => F[B])(implicit applic: Applic[F]): F[Tree[B]] = t match { case EmptyTree => applic.pure(EmptyTree) case Node(left, elem, right) => applic.ap(traverse(left)(f))(applic.ap(f(elem))(applic.ap(traverse(right)(f))(applic.pure( (r:Tree[B]) => (e:B) => (l:Tree[B]) => Node(l,e,r) )))) } } }
lkuczera/scalatypeclasses
solutions/Travers.scala
Scala
mit
2,296
/* * Copyright University of Basel, Graphics and Vision Research Group * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package scalismo.faces.render import scalismo.color.ColorSpaceOperations import scalismo.faces.image.{InterpolatedPixelImage, PixelImage, PixelImageDomain} import scalismo.faces.mesh.TextureMappedProperty import scalismo.geometry.{Point, _2D, _3D} import scalismo.mesh._ import scala.util.Try /** methods to extract texture from images when rendering a mesh */ object TextureExtraction { /** * Texture Extraction from Image. * 1. Find correspondence between mesh and target image * Render Points on image plane according to pointShader * Put the 2D coordinates of the image onto the mesh. This is the texture mapping. */ def imageAsSurfaceProperty[Pixel](mesh: TriangleMesh3D, pointShader: PointShader, targetImage: PixelImage[Pixel])(implicit ops: ColorSpaceOperations[Pixel]): MeshSurfaceProperty[Option[Pixel]] = { val visible: MeshSurfaceProperty[Boolean] = TriangleRenderer.visibilityAsSurfaceProperty(mesh, pointShader, targetImage.domain, 1e-3, boundaryAlwaysVisible = false) new MeshSurfaceProperty[Option[Pixel]] { val target: InterpolatedPixelImage[Pixel] = targetImage.interpolate override def onSurface(triangleId: TriangleId, bcc: BarycentricCoordinates): Option[Pixel] = { val vis: Boolean = visible(triangleId, bcc) if (vis) { val imagePoint: Point[_3D] = TriangleRenderer.transformPoint(mesh.position(triangleId, bcc), pointShader, targetImage.domain) Try(target(imagePoint.x + 0.5, imagePoint.y + 0.5)).toOption // interpolated image access is shifted by 0.5/0.5 } else None } override def triangulation: TriangleList = mesh.triangulation } } /** * Texture Extraction from Image. * 1. Find correspondence between mesh and target image. * Render Points on image plane according to pointShader. * Put the 2D coordinates of the image onto the mesh. This is the texture mapping. */ def imageAsTexture[Pixel](mesh: TriangleMesh3D, pointShader: PointShader, targetImage: PixelImage[Pixel])(implicit ops: ColorSpaceOperations[Pixel]): TextureMappedProperty[Pixel] = { // get canonical mapping: projected points in image (image itself becomes texture) val projectedPoints: IndexedSeq[Point[_2D]] = mesh.pointSet.points.map(p => { val p2d = TriangleRenderer.transformPoint(p, pointShader, targetImage.domain) Point(p2d.x, p2d.y) }).toIndexedSeq val projectedUVPoints = projectedPoints.map(p => TextureMappedProperty.imageCoordinatesToUV(p, targetImage.width, targetImage.height)) val imageTextureCoordinates: SurfacePointProperty[Point[_2D]] = SurfacePointProperty(mesh.triangulation, projectedUVPoints) TextureMappedProperty(mesh.triangulation, imageTextureCoordinates, targetImage) } /** full texture extraction: texture image extracted through a mesh rendered to an image, image "pulled back" from image to texture domain using texture mapping */ def extractTextureAsImage[A](mesh: TriangleMesh3D, pointShader: PointShader, targetImage: PixelImage[A], textureDomain: PixelImageDomain, textureMap: MeshSurfaceProperty[Point[_2D]])(implicit ops: ColorSpaceOperations[A]): PixelImage[Option[A]] = { val imgOnSurface = imageAsSurfaceProperty(mesh, pointShader, targetImage) val texturedSurface: TextureMappedProperty[Option[A]] = TextureMappedProperty.fromSurfaceProperty(imgOnSurface, textureMap, textureDomain, None) texturedSurface.texture } }
unibas-gravis/scalismo-faces
src/main/scala/scalismo/faces/render/TextureExtraction.scala
Scala
apache-2.0
4,352
package pamflet case class Frontin(header: Option[String], body: String) object Frontin { val seperator = "---" def seperates(str: String): Boolean = (str.trim == seperator) && (str startsWith seperator) def apply(str: String): Frontin = str.linesWithSeparators.toList match { case Nil => Frontin(None, "") case x :: xs if seperates(x) => xs span {!seperates(_)} match { case (h, b) => Frontin(Some(h.mkString("")), if (b isEmpty) "" else b.tail.mkString("")) } case _ => Frontin(None, str) } }
n8han/pamflet
library/src/main/scala/frontin.scala
Scala
lgpl-3.0
575
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.usergrid.simulations import io.gatling.core.Predef._ import io.gatling.core.structure.ScenarioBuilder import io.gatling.http.config.HttpProtocolBuilder import org.apache.usergrid.enums.ScenarioType import org.apache.usergrid.helpers.Setup import org.apache.usergrid.scenarios.{AuditScenarios, EntityCollectionScenarios} import org.apache.usergrid.settings.Settings /** * Audit simulations. * */ class AuditSimulation extends Simulation { def getScenario(scenarioType: String): ScenarioBuilder = { scenarioType match { case ScenarioType.AuditGetCollectionEntities => AuditScenarios.getAllCollections case ScenarioType.AuditVerifyCollectionEntities => AuditScenarios.verifyAuditedEntities case ScenarioType.AuditDeleteEntities => AuditScenarios.deleteAuditedEntities } } before{ Settings.setTestStartTime() } if (ScenarioType.isValid(Settings.scenarioType)) { val scenario: ScenarioBuilder = getScenario(Settings.scenarioType) val httpConf: HttpProtocolBuilder = Settings.httpOrgConf .acceptHeader("application/json") setUp( scenario .inject( rampUsers(Settings.rampUsers) over Settings.rampTime ).protocols(httpConf) ) } else { println(s"Audit scenario type ${Settings.scenarioType} not found.") } after { endHandler } def endHandler: Unit = { Settings.setTestEndTime() if (Settings.captureAuditUuids) { val uuidDesc = Settings.scenarioType match { case ScenarioType.AuditGetCollectionEntities => "found" case ScenarioType.AuditVerifyCollectionEntities => "failed" case ScenarioType.AuditDeleteEntities => "failed" } Settings.writeAuditUuidsToFile(uuidDesc) } Settings.printSettingsSummary(true) Settings.printAuditResults() } def abortedEarly: Unit = { println(">>>>>>>>>>>>AUDIT ABORTED") endHandler } sys addShutdownHook abortedEarly }
mdunker/usergrid
tests/performance/src/main/scala/org/apache/usergrid/simulations/AuditSimulation.scala
Scala
apache-2.0
2,765
package componentswing import componentwork._, javax.swing.{JFrame, JPanel}, java.awt.{BorderLayout, Component => SC,Color}, java.awt.event.{WindowAdapter,WindowEvent, ComponentAdapter, ComponentEvent, KeyListener, KeyEvent}, scala.collection.mutable.{Map => MMap} class Frame(val name:MMap[String, Component]) extends JFrame with Component{ //Interface "IFrame" val IFrameEx:IFrameFR = new IFrameFR; var IFrameIm:IFrameS = null interfaces += ("IFrame" -> (IFrameEx,"IFrameF","IFrameS", (c:Component, i:Interface) => {IFrameIm = i.asInstanceOf[IFrameS]}, (c:Component) => {IFrameIm = null; false}, false)) //Multiinterface "IWidgets" val IWidgetsEx:IWidgetFR = new IWidgetFR; var IWidgetsIm = MMap[Component, IWidgetS]() interfaces += ("IWidgets" -> (IWidgetsEx,"IWidgetF","IWidgetS", (c:Component, i:Interface) => {IWidgetsIm += (c -> i.asInstanceOf[IWidgetS])}, (c:Component) => {IWidgetsIm -= c; false}, true)) //Interfaces export realization class IFrameFR extends IFrameF { override def setTitle(s:String) = {thisComponent.asInstanceOf[Frame].setTitle(s)} override def show () = {thisComponent.asInstanceOf[Frame].show()} } class IWidgetFR extends IWidgetF { override def connection(c:Component) = { panel.add(IWidgetsIm(c).component, IWidgetsIm(c).position) pack() } override def disconnection(c:Component) = { panel.remove(IWidgetsIm(c).component); pack() } override def pack () = {thisComponent.asInstanceOf[Frame].pack()} } //Self-assembly setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE) private val panel = new JPanel panel setLayout new BorderLayout add(panel) //Listeners addWindowListener(new WindowAdapter{ override def windowClosing(e:WindowEvent) = {if(IFrameIm != null){IFrameIm.closing}} }) addKeyListener(new KeyListener{ def keyPressed(e:KeyEvent) = {if(IFrameIm != null){IFrameIm.keyPressed(e.getKeyCode())}} def keyReleased(e:KeyEvent) = {if(IFrameIm != null){IFrameIm.keyReleased(e.getKeyCode())}} def keyTyped(e:KeyEvent) = {} }) //Service code construction() }
AlexCAB/whfzf
componentswing/Frame.scala
Scala
mit
2,156
/* * Copyright (c) 2014-2021 by The Monix Project Developers. * See the project homepage at: https://monix.io * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package monix.tail.batches /** [[Batch]] implementation specialized for `Double`. * * Under the hood it uses an [[monix.tail.batches.ArrayBatch ArrayBatch]] * implementation, which is `@specialized`. Using `DoublesBatch` * might be desirable instead for `isInstanceOf` checks. */ final class DoublesBatch(underlying: ArrayBatch[Double]) extends Batch[Double] { override def cursor(): DoublesCursor = new DoublesCursor(underlying.cursor()) override def take(n: Int): DoublesBatch = new DoublesBatch(underlying.take(n)) override def drop(n: Int): DoublesBatch = new DoublesBatch(underlying.drop(n)) override def slice(from: Int, until: Int): DoublesBatch = new DoublesBatch(underlying.slice(from, until)) override def filter(p: (Double) => Boolean): DoublesBatch = new DoublesBatch(underlying.filter(p)) override def map[B](f: (Double) => B): ArrayBatch[B] = underlying.map(f) override def collect[B](pf: PartialFunction[Double, B]): ArrayBatch[B] = underlying.collect(pf) override def foldLeft[R](initial: R)(op: (R, Double) => R): R = underlying.foldLeft(initial)(op) }
monixio/monix
monix-tail/shared/src/main/scala/monix/tail/batches/DoublesBatch.scala
Scala
apache-2.0
1,808
package chapter2 object Exercise2_4 { /** * */ def uncurry[A, B, C](f : A => B => C): (A, B) => C = (x, y) => f(x)(y) def main(args: Array[String]): Unit = { def curriedAdd(a:Int)(b: Int) = a + b val unCurriedAdd = uncurry(curriedAdd) assert(unCurriedAdd(2, 3) == 5) println("Tests Successful") } }
amolnayak311/functional-programming-in-scala
src/chapter2/Exercise2_4.scala
Scala
unlicense
336
/* * Copyright 2014–2020 SlamData Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package quasar.contrib.scalaz import slamdata.Predef.{Int, Unit} import org.specs2.mutable.Specification import scalaz.concurrent.Task class MoreBindOpsSpec extends Specification { import bind._ "MoreBindOps" >> { "<<" >> { var state = true val t1: Task[Int] = Task.delay{state = true; 7} val t2: Task[Unit] = Task.delay{state = false} ((t1 << t2).unsafePerformSync must_=== 7) and (state must_=== false) } } }
slamdata/quasar
foundation/src/test/scala/quasar/contrib/scalaz/MoreBindOpsSpec.scala
Scala
apache-2.0
1,058
package aug.profile object DefaultColorSchemes { val colorSchemes: List[ColorSchemeConfig] = { val list = List.newBuilder[ColorSchemeConfig] list += ColorSchemeConfig("default") list.result } }
austinmiller/augustmc
src/main/scala/aug/profile/DefaultColorSchemes.scala
Scala
apache-2.0
215
/******************************************************************************* Copyright 2009,2011, Oracle and/or its affiliates. All rights reserved. Use is subject to license terms. This distribution may include materials developed by third parties. ******************************************************************************/ package kr.ac.kaist.jsaf.scala_src.useful import _root_.java.util.{Iterator => JIterator} import _root_.java.lang.{Iterable => JIterable} import scala.collection.JavaConversions object Iterators { implicit def wrapIterator[T](iter: JIterator[T]): Iterator[T] = JavaConversions.asScalaIterator(iter) implicit def wrapIterable[T](iter: JIterable[T]): Iterator[T] = JavaConversions.asScalaIterator(iter.iterator) }
darkrsw/safe
src/main/scala/kr/ac/kaist/jsaf/scala_src/useful/Iterators.scala
Scala
bsd-3-clause
780
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package kafka.admin import kafka.utils.Exit import org.junit.jupiter.api.Assertions._ import org.junit.jupiter.api.{AfterEach, BeforeEach, Test, Timeout} @Timeout(60) class ReassignPartitionsCommandArgsTest { @BeforeEach def setUp(): Unit = { Exit.setExitProcedure((_, message) => throw new IllegalArgumentException(message.orNull)) } @AfterEach def tearDown(): Unit = { Exit.resetExitProcedure() } ///// Test valid argument parsing @Test def shouldCorrectlyParseValidMinimumGenerateOptions(): Unit = { val args = Array( "--bootstrap-server", "localhost:1234", "--generate", "--broker-list", "101,102", "--topics-to-move-json-file", "myfile.json") ReassignPartitionsCommand.validateAndParseArgs(args) } @Test def shouldCorrectlyParseValidMinimumExecuteOptions(): Unit = { val args = Array( "--bootstrap-server", "localhost:1234", "--execute", "--reassignment-json-file", "myfile.json") ReassignPartitionsCommand.validateAndParseArgs(args) } @Test def shouldCorrectlyParseValidMinimumLegacyExecuteOptions(): Unit = { val args = Array( "--zookeeper", "localhost:1234", "--execute", "--reassignment-json-file", "myfile.json") ReassignPartitionsCommand.validateAndParseArgs(args) } @Test def shouldCorrectlyParseValidMinimumVerifyOptions(): Unit = { val args = Array( "--bootstrap-server", "localhost:1234", "--verify", "--reassignment-json-file", "myfile.json") ReassignPartitionsCommand.validateAndParseArgs(args) } @Test def shouldCorrectlyParseValidMinimumLegacyVerifyOptions(): Unit = { val args = Array( "--zookeeper", "localhost:1234", "--verify", "--reassignment-json-file", "myfile.json") ReassignPartitionsCommand.validateAndParseArgs(args) } @Test def shouldAllowThrottleOptionOnExecute(): Unit = { val args = Array( "--bootstrap-server", "localhost:1234", "--execute", "--throttle", "100", "--reassignment-json-file", "myfile.json") ReassignPartitionsCommand.validateAndParseArgs(args) } @Test def shouldUseDefaultsIfEnabled(): Unit = { val args = Array( "--bootstrap-server", "localhost:1234", "--execute", "--reassignment-json-file", "myfile.json") val opts = ReassignPartitionsCommand.validateAndParseArgs(args) assertEquals(10000L, opts.options.valueOf(opts.timeoutOpt)) assertEquals(-1L, opts.options.valueOf(opts.interBrokerThrottleOpt)) } @Test def testList(): Unit = { val args = Array( "--list", "--bootstrap-server", "localhost:1234") ReassignPartitionsCommand.validateAndParseArgs(args) } @Test def testCancelWithPreserveThrottlesOption(): Unit = { val args = Array( "--cancel", "--bootstrap-server", "localhost:1234", "--reassignment-json-file", "myfile.json", "--preserve-throttles") ReassignPartitionsCommand.validateAndParseArgs(args) } ///// Test handling missing or invalid actions @Test def shouldFailIfNoArgs(): Unit = { val args: Array[String]= Array() shouldFailWith(ReassignPartitionsCommand.helpText, args) } @Test def shouldFailIfBlankArg(): Unit = { val args = Array(" ") shouldFailWith("Command must include exactly one action", args) } @Test def shouldFailIfMultipleActions(): Unit = { val args = Array( "--bootstrap-server", "localhost:1234", "--execute", "--verify", "--reassignment-json-file", "myfile.json" ) shouldFailWith("Command must include exactly one action", args) } ///// Test --execute @Test def shouldNotAllowExecuteWithTopicsOption(): Unit = { val args = Array( "--bootstrap-server", "localhost:1234", "--execute", "--reassignment-json-file", "myfile.json", "--topics-to-move-json-file", "myfile.json") shouldFailWith("Option \\"[topics-to-move-json-file]\\" can't be used with action \\"[execute]\\"", args) } @Test def shouldNotAllowExecuteWithBrokerList(): Unit = { val args = Array( "--bootstrap-server", "localhost:1234", "--execute", "--reassignment-json-file", "myfile.json", "--broker-list", "101,102" ) shouldFailWith("Option \\"[broker-list]\\" can't be used with action \\"[execute]\\"", args) } @Test def shouldNotAllowExecuteWithoutReassignmentOption(): Unit = { val args = Array( "--bootstrap-server", "localhost:1234", "--execute") shouldFailWith("Missing required argument \\"[reassignment-json-file]\\"", args) } @Test def testMissingBootstrapServerArgumentForExecute(): Unit = { val args = Array( "--execute") shouldFailWith("Please specify --bootstrap-server", args) } ///// Test --generate @Test def shouldNotAllowGenerateWithoutBrokersAndTopicsOptions(): Unit = { val args = Array( "--bootstrap-server", "localhost:1234", "--generate") shouldFailWith("Missing required argument \\"[topics-to-move-json-file]\\"", args) } @Test def shouldNotAllowGenerateWithoutBrokersOption(): Unit = { val args = Array( "--bootstrap-server", "localhost:1234", "--topics-to-move-json-file", "myfile.json", "--generate") shouldFailWith("Missing required argument \\"[broker-list]\\"", args) } @Test def shouldNotAllowGenerateWithoutTopicsOption(): Unit = { val args = Array( "--bootstrap-server", "localhost:1234", "--broker-list", "101,102", "--generate") shouldFailWith("Missing required argument \\"[topics-to-move-json-file]\\"", args) } @Test def shouldNotAllowGenerateWithThrottleOption(): Unit = { val args = Array( "--bootstrap-server", "localhost:1234", "--generate", "--broker-list", "101,102", "--throttle", "100", "--topics-to-move-json-file", "myfile.json") shouldFailWith("Option \\"[throttle]\\" can't be used with action \\"[generate]\\"", args) } @Test def shouldNotAllowGenerateWithReassignmentOption(): Unit = { val args = Array( "--bootstrap-server", "localhost:1234", "--generate", "--broker-list", "101,102", "--topics-to-move-json-file", "myfile.json", "--reassignment-json-file", "myfile.json") shouldFailWith("Option \\"[reassignment-json-file]\\" can't be used with action \\"[generate]\\"", args) } @Test def testInvalidCommandConfigArgumentForLegacyGenerate(): Unit = { val args = Array( "--zookeeper", "localhost:1234", "--generate", "--broker-list", "101,102", "--topics-to-move-json-file", "myfile.json", "--command-config", "/tmp/command-config.properties" ) shouldFailWith("You must specify --bootstrap-server when using \\"[command-config]\\"", args) } ///// Test --verify @Test def shouldNotAllowVerifyWithoutReassignmentOption(): Unit = { val args = Array( "--bootstrap-server", "localhost:1234", "--verify") shouldFailWith("Missing required argument \\"[reassignment-json-file]\\"", args) } @Test def shouldNotAllowBrokersListWithVerifyOption(): Unit = { val args = Array( "--bootstrap-server", "localhost:1234", "--verify", "--broker-list", "100,101", "--reassignment-json-file", "myfile.json") shouldFailWith("Option \\"[broker-list]\\" can't be used with action \\"[verify]\\"", args) } @Test def shouldNotAllowThrottleWithVerifyOption(): Unit = { val args = Array( "--bootstrap-server", "localhost:1234", "--verify", "--throttle", "100", "--reassignment-json-file", "myfile.json") shouldFailWith("Option \\"[throttle]\\" can't be used with action \\"[verify]\\"", args) } @Test def shouldNotAllowTopicsOptionWithVerify(): Unit = { val args = Array( "--bootstrap-server", "localhost:1234", "--verify", "--reassignment-json-file", "myfile.json", "--topics-to-move-json-file", "myfile.json") shouldFailWith("Option \\"[topics-to-move-json-file]\\" can't be used with action \\"[verify]\\"", args) } def shouldFailWith(msg: String, args: Array[String]): Unit = { val e = assertThrows(classOf[Exception], () => ReassignPartitionsCommand.validateAndParseArgs(args), () => s"Should have failed with [$msg] but no failure occurred.") assertTrue(e.getMessage.startsWith(msg), s"Expected exception with message:\\n[$msg]\\nbut was\\n[${e.getMessage}]") } ///// Test --cancel @Test def shouldNotAllowCancelWithoutBootstrapServerOption(): Unit = { val args = Array( "--cancel") shouldFailWith("Please specify --bootstrap-server", args) } @Test def shouldNotAllowCancelWithoutReassignmentJsonFile(): Unit = { val args = Array( "--cancel", "--bootstrap-server", "localhost:1234", "--preserve-throttles") shouldFailWith("Missing required argument \\"[reassignment-json-file]\\"", args) } ///// Test --list @Test def shouldNotAllowZooKeeperWithListOption(): Unit = { val args = Array( "--list", "--zookeeper", "localhost:1234") shouldFailWith("Option \\"[zookeeper]\\" can't be used with action \\"[list]\\"", args) } }
Chasego/kafka
core/src/test/scala/unit/kafka/admin/ReassignPartitionsCommandArgsTest.scala
Scala
apache-2.0
9,998
/* ************************************************************************************* * Copyright 2011 Normation SAS ************************************************************************************* * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * In accordance with the terms of section 7 (7. Additional Terms.) of * the GNU Affero GPL v3, the copyright holders add the following * Additional permissions: * Notwithstanding to the terms of section 5 (5. Conveying Modified Source * Versions) and 6 (6. Conveying Non-Source Forms.) of the GNU Affero GPL v3 * licence, when you create a Related Module, this Related Module is * not considered as a part of the work and may be distributed under the * license agreement of your choice. * A "Related Module" means a set of sources files including their * documentation that, without modification of the Source Code, enables * supplementary functions or services in addition to those offered by * the Software. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/agpl.html>. * ************************************************************************************* */ package com.normation.rudder.web.services import com.normation.rudder.domain.policies.DirectiveId import com.normation.rudder.domain.policies.RuleVal import com.normation.cfclerk.domain.Variable import com.normation.cfclerk.domain.VariableSpec import com.normation.rudder.web.model.{ DirectiveEditor } import com.normation.cfclerk.services.TechniqueRepository import net.liftweb.common._ import Box._ import com.normation.cfclerk.domain.{ TechniqueId, Technique } import org.joda.time.{ LocalDate, LocalTime, Duration, DateTime } import com.normation.cfclerk.domain.PredefinedValuesVariableSpec trait DirectiveEditorService { /** * Retrieve a policyEditor given the Directive name, * if such Directive is known in the system */ def get( techniqueId: TechniqueId, directiveId: DirectiveId, //withExecutionPlanning:Option[TemporalVariableVal] = None, withVars: Map[String, Seq[String]] = Map()): Box[DirectiveEditor] } class DirectiveEditorServiceImpl( techniqueRepository: TechniqueRepository, section2FieldService: Section2FieldService) extends DirectiveEditorService { /** * Retrieve vars for the given Directive. * First, we try to retrieve default vars * from the techniqueRepository. * Then, we look in the parameter vars to * search for vars with the same name. * For each found, we change the default * var value by the parameter one. */ import scala.util.control.Breaks._ /* * We exactly set the variable values to varValues, * so a missing variable key actually set the value * to Seq() */ private def getVars(allVars: Seq[VariableSpec], vars: Map[String, Seq[String]]): Seq[Variable] = { allVars.map { varSpec => varSpec match { case spec : PredefinedValuesVariableSpec => // variables values are already builtin spec.toVariable() case _ => // variable can be modified varSpec.toVariable(vars.getOrElse(varSpec.name, Seq())) } } } override def get( techniqueId: TechniqueId, directiveId: DirectiveId, withVarValues: Map[String, Seq[String]] = Map()): Box[DirectiveEditor] = { for { //start by checking Directive existence pol <- techniqueRepository.get(techniqueId) ?~! s"Error when looking for technique with ID '${techniqueId}'. Check technique name and version" allVars = pol.rootSection.getAllVariables vars = getVars(allVars, withVarValues) pe <- section2FieldService.initDirectiveEditor(pol, directiveId, vars) } yield pe } }
Kegeruneku/rudder
rudder-web/src/main/scala/com/normation/rudder/web/services/DirectiveEditorService.scala
Scala
agpl-3.0
4,218
package dpla.ingestion3.mappers.providers import dpla.ingestion3.enrichments.normalizations.filters.{DigitalSurrogateBlockList, ExtentIdentificationList} import dpla.ingestion3.mappers.utils._ import dpla.ingestion3.messages.IngestMessageTemplates import dpla.ingestion3.model.DplaMapData._ import dpla.ingestion3.model.{EdmAgent, _} import dpla.ingestion3.utils.Utils import org.json4s.JValue import org.json4s.JsonDSL._ import dpla.ingestion3.enrichments.normalizations.StringNormalizationUtils._ import scala.xml.{Elem, Node, NodeSeq} class HarvardMapping extends XmlMapping with XmlExtractor with IngestMessageTemplates { // SourceResource fields override def alternateTitle(data: Document[NodeSeq]): ZeroToMany[String] = for { titleInfoNode <- data \ "metadata" \ "mods" \ "titleInfo" if titleInfoNode \@ "type" == "alternative" titleText <- processTitleInfo(titleInfoNode) } yield titleText override def collection(data: Document[NodeSeq]): ZeroToMany[DcmiTypeCollection] = extractStrings(data \ "metadata" \ "mods" \ "extension" \ "sets" \ "set" \ "setName") .map(nameOnlyCollection) //name type="corporate" override def contributor(data: Document[NodeSeq]): ZeroToMany[EdmAgent] = processNames(data).contributors override def creator(data: Document[NodeSeq]): ZeroToMany[EdmAgent] = processNames(data).creators override def date(data: Document[NodeSeq]): ZeroToMany[EdmTimeSpan] = { // date issued val dateIssued = (data \ "metadata" \ "mods" \ "originInfo" \ "dateIssued") .flatMap(extractStrings(_)) .map(stringOnlyTimeSpan) // Get primary display date val keyDates = ((data \ "metadata" \ "mods" \ "originInfo" \ "dateCreated") ++ (data \ "metadata" \ "mods" \ "originInfo" \ "dateOther")) .flatMap(node => getByAttribute(node, "keyDate", "yes")) .filterNot(node => filterAttribute(node, "encoding", "marc")) .flatMap(extractStrings(_)) .map(stringOnlyTimeSpan) // approximate dates val approxDates = ((data \ "metadata" \ "mods" \ "originInfo" \ "dateCreated") ++ (data \ "metadata" \ "mods" \ "originInfo" \ "dateIssued")) .flatMap(node => getByAttribute(node, "qualifier", "questionable")) .filterNot(node => filterAttribute(node, "encoding", "marc")) .flatMap(extractStrings(_)) .map(str => if (str.startsWith("ca. ")) { str } else s"ca. $str" ) .map(stringOnlyTimeSpan) // Constructed date range val beginDate = ((data \ "metadata" \ "mods" \ "originInfo" \ "dateCreated") ++ (data \ "metadata" \ "mods" \ "originInfo" \ "dateIssued")) .flatMap(node => getByAttribute(node, "point", "start")) .flatMap(extractStrings(_)) val endDate = ((data \ "metadata" \ "mods" \ "originInfo" \ "dateCreated") ++ (data \ "metadata" \ "mods" \ "originInfo" \ "dateIssued")) .flatMap(node => getByAttribute(node, "point", "end")) .flatMap(extractStrings(_)) val constructedDates = if (beginDate.length == endDate.length) { beginDate.zip(endDate).map { case (begin: String, end: String) => EdmTimeSpan( originalSourceDate = Some(""), // blank original source date begin = Some(begin), end = Some(end) ) } } else { Seq() } dateIssued ++ keyDates ++ approxDates ++ constructedDates } override def description(data: Document[NodeSeq]): ZeroToMany[String] = extractStrings(data \ "metadata" \ "mods" \ "abstract") ++ (data \ "metadata" \ "mods" \ "note") .filterNot(node => filterAttribute(node, "type", "funding")) .filterNot(node => filterAttribute(node, "type", "organization")) .filterNot(node => filterAttribute(node, "type", "reproduction")) .filterNot(node => filterAttribute(node, "type", "system details")) .filterNot(node => filterAttribute(node, "type", "statement of responsibility")) .filterNot(node => filterAttribute(node, "type", "venue")) .flatMap(extractStrings) override def extent(data: Document[NodeSeq]): ZeroToMany[String] = extractStrings(data \ "metadata" \ "mods" \ "physicalDescription" \ "extent") override def format(data: Document[NodeSeq]): ZeroToMany[String] = (extractStrings(data \ "metadata" \ "mods" \ "genre") ++ extractStrings(data \ "metadata" \ "mods" \\ "termMaterialsTech")) .map( _.applyBlockFilter( DigitalSurrogateBlockList.termList ++ ExtentIdentificationList.termList )) .flatMap(_.splitAtDelimiter(";")) .filter(_.nonEmpty) override def identifier(data: Document[NodeSeq]): ZeroToMany[String] = extractStrings(data \ "metadata" \ "mods" \ "recordInfo" \ "recordIdentifier") ++ extractStrings(data \ "metadata" \ "mods" \ "identifier") override def language(data: Document[NodeSeq]): ZeroToMany[SkosConcept] = for { language <- data \ "metadata" \ "mods" \ "language" terms = language \ "languageTerm" data = terms.map(term => term \@ "type" -> term.text).toMap } yield SkosConcept(providedLabel = (data.get("text"), data.get("code")) match { case (Some(text), _) => Some(text) case (None, Some(code)) => Some(code) case (_, _) => None }) override def place(data: Document[NodeSeq]): ZeroToMany[DplaPlace] = ( extractStrings(data \ "metadata" \ "mods" \ "subject" \ "geographic") ++ extractStrings(data \ "metadata" \ "mods" \ "subject" \ "hierarchicalGeographic") ).map(nameOnlyPlace) override def publisher(data: Document[NodeSeq]): ZeroToMany[EdmAgent] = extractStrings(data \ "metadata" \ "mods" \ "originInfo" \ "publisher") .map(nameOnlyAgent) override def relation(data: Document[NodeSeq]): ZeroToMany[LiteralOrUri] = for { relatedItem <- data \ "metadata" \ "mods" \ "relatedItem" if relatedItem \@ "type" == "series" relation <- relatedItem \ "titleInfo" title <- processTitleInfo(relation) } yield Left(title) override def rights(data: Document[NodeSeq]): AtLeastOne[String] = Seq("Held in the collections of Harvard University.") override def subject(data: Document[NodeSeq]): ZeroToMany[SkosConcept] = { val topicSubjects = for { subjectNode <- data \ "metadata" \ "mods" \ "subject" subject <- subjectNode \ "topic" subjectText = subject.text.trim if subjectText.nonEmpty } yield nameOnlyConcept(subjectText) val nameSubjects = for { subjectNode <- data \ "metadata" \ "mods" \ "subject" \ "name" subjectText = processNameParts(subjectNode) } yield nameOnlyConcept(subjectText) val titleSubjects = for { subjectNode <- data \ "metadata" \ "mods" \ "subject" \ "titleInfo" subjectText <- processTitleInfo(subjectNode) } yield nameOnlyConcept(subjectText) topicSubjects ++ nameSubjects ++ titleSubjects } override def temporal(data: Document[NodeSeq]): ZeroToMany[EdmTimeSpan] = extractStrings(data \ "metadata" \ "mods" \ "subject" \ "temporal"). map(stringOnlyTimeSpan) override def title(data: Document[NodeSeq]): AtLeastOne[String] = { val title = for { titleInfoNode <- data \ "metadata" \ "mods" \ "titleInfo" if titleInfoNode \@ "type" != "alternative" titleText <- processTitleInfo(titleInfoNode) } yield titleText val collectionTitle = (data \ "metadata" \\ "relatedItem") .filter(node => filterAttribute(node, "displayLabel", "collection")) .flatMap(node => extractStrings(node \ "titleInfo" \ "title")) title ++ collectionTitle } override def `type`(data: Document[NodeSeq]): ZeroToMany[String] = extractStrings(data \ "metadata" \ "mods" \ "typeOfResource") ++ extractStrings(data \ "metadata" \ "mods" \ "extension" \ "librarycloud" \\ "digitalFormat") // OreAggregation fields override def dataProvider(data: Document[NodeSeq]): ZeroToMany[EdmAgent] = { val lookup = Map( "crimes" -> "Harvard Law School Library, Harvard University", "eda" -> "Emily Dickinson Archive", "lap" -> "Widener Library, Harvard University", "maps" -> "Harvard Map Collection, Harvard University", "medmss" -> "Houghton Library, Harvard University", "rubbings" -> "Fine Arts Library, Special Collections, Harvard University", "scarlet" -> "Harvard Law School Library, Harvard University", "scores" -> "Eda Kuhn Loeb Music Library, Harvard University", "ward" -> "General Artemas Ward Museum, Harvard University" ) val setSpec = (for { setSpec <- data \ "metadata" \ "mods" \ "extension" \ "sets" \ "set" \ "setSpec" } yield setSpec.text.trim).headOption val setSpecAgent = lookup.get(setSpec.getOrElse("")).map(nameOnlyAgent) // <mods:location><physicalLocation displayLabel="Harvard repository"> val physicalLocationAgent = (for { node <- data \ "metadata" \ "mods" \ "location" \ "physicalLocation" if node \@ "displayLabel" == "Harvard repository" } yield nameOnlyAgent(node.text.trim)).headOption // <mods:relatedItem displayLabel="collection"><location><physicalLocation displayLabel="Harvard repository"> val hostPhysicalLocationAgent = (for { relatedItem <- data \ "metadata" \ "mods" \\ "relatedItem" if (relatedItem \@ "displayLabel") == "collection" node <- relatedItem \ "location" \ "physicalLocation" if node \@ "displayLabel" == "Harvard repository" } yield nameOnlyAgent(node.text.trim)).headOption setSpecAgent .orElse(physicalLocationAgent) .orElse(hostPhysicalLocationAgent) .orElse(Some(nameOnlyAgent("Harvard Library, Harvard University"))) .toSeq } override def originalRecord(data: Document[NodeSeq]): ExactlyOne[String] = Utils.formatXml(data) override def isShownAt(data: Document[NodeSeq]): ZeroToMany[EdmWebResource] = { val artMuseumLink = (data \ "metadata" \ "mods" \ "location" \ "url") .flatMap(node => getByAttribute(node, "displayLabel", "Harvard Art Museums")) .flatMap(node => getByAttribute(node, "access", "object in context")) .flatMap(extractString(_)) .map(stringOnlyWebResource) val collectionLinks = collection(data) .flatMap(_.title) .flatMap(collectionTitle => { (data \ "metadata" \ "mods" \ "location" \ "url") .flatMap(node => getByAttribute(node, "displayLabel", collectionTitle)) .flatMap(node => getByAttribute(node, "access", "object in context")) .flatMap(extractString(_)) .map(stringOnlyWebResource) }) val objectInContext = (data \ "metadata" \ "mods" \ "location" \ "url") .flatMap(node => getByAttribute(node, "displayLabel", "Harvard Digital Collections")) .flatMap(node => getByAttribute(node, "access", "object in context")) .flatMap(extractString(_)) .map(stringOnlyWebResource) artMuseumLink ++ collectionLinks ++ objectInContext } //<mods:location><mods:url access="preview"> override def preview(data: Document[NodeSeq]): ZeroToMany[EdmWebResource] = for { node <- data \ "metadata" \ "mods" \ "location" \ "url" if node \@ "access" == "preview" } yield uriOnlyWebResource(URI(node.text.trim)) override def provider(data: Document[NodeSeq]): ExactlyOne[EdmAgent] = EdmAgent( name = Some("Harvard Library"), uri = Some(URI("http://dp.la/api/contributor/harvard")) ) //utility override def useProviderName: Boolean = false override def getProviderName: Option[String] = Some("harvard") override def originalId(implicit data: Document[NodeSeq]): ZeroToOne[String] = extractString(data \\ "header" \ "identifier").map(_.trim) override def sidecar(data: Document[NodeSeq]): JValue = ("prehashId", buildProviderBaseId()(data)) ~ ("dplaId", mintDplaId(data)) override def dplaUri(data: Document[NodeSeq]): ZeroToOne[URI] = mintDplaItemUri(data) private def name(name: Node): ZeroToOne[String] = name match { case elem: Elem => val text = elem.text.trim if (text.isEmpty) None else Some(text) case _ => None } private def processTitleInfo(titleInfo: Node): ZeroToOne[String] = titleInfo match { case elem: Elem => val candidates = for { child <- elem.child if child.isInstanceOf[Elem] } yield child.text.trim if (candidates.isEmpty) None else Some(candidates.mkString(" ").trim) case _ => None } //Helper method to get a list of creators and contributors private val processNames: Document[NodeSeq] => Names = (data: Document[NodeSeq]) => { val names = (for (name <- data \ "metadata" \ "mods" \ "name") yield { val nameString: String = processNameParts(name) val roleTerms = (name \ "role" \ "roleTerm").map(_.text.toLowerCase).distinct Name(nameString, roleTerms) }).toSet val creatorTypes = names.filter(name => name.roleTerm.contains("creator")) val categories: (Seq[Name], Seq[Name]) = if (names.isEmpty) //if names is empty, there are no creators or contributors (Seq(), Seq()) else if (creatorTypes.nonEmpty) //if some of the names have the "creator" roleType, those are the creators //and the rest are the contributors (creatorTypes.toSeq, (names -- creatorTypes).toSeq) else if ( names.head.roleTerm.isEmpty || names.head.roleTerm.intersect(Seq("sitter", "subject", "donor", "owner")).isEmpty ) //otherwise, if the first name isn't a contributor role type, it's a creator, //and the rest are contributors (Seq(names.head), names.tail.toSeq) else (Seq(), names.toSeq) val creators = categories._1.map(x => nameOnlyAgent(x.name)) val contributors = categories._2.map(x => nameOnlyAgent(x.name)) Names(creators, contributors) } private def processNameParts(name: Node): String = { val nameParts = for { namePart <- name \ "namePart" typeAttr = (namePart \ "@type").map(_.text).headOption.getOrElse("") part = namePart.text } yield NamePart(part, typeAttr) if (nameParts.isEmpty) return "" val nameString = nameParts.tail.foldLeft(nameParts.head.part)( (a, b) => { a + (if (b.`type` == "date") ", " else " ") + b.part } ) nameString } case class Name(name: String, roleTerm: Seq[String]) case class NamePart(part: String, `type`: String) case class Names(creators: Seq[EdmAgent], contributors: Seq[EdmAgent]) }
dpla/ingestion3
src/main/scala/dpla/ingestion3/mappers/providers/HarvardMapping.scala
Scala
mit
14,675
package actors import akka.ActorTimer import akka.actor.{ActorLogging, Actor} import akka.pattern.pipe import com.dvgodoy.spark.benford.image.SBA._ import com.dvgodoy.spark.benford.util.JobId import models.ImageCommons import models.ImageService._ import org.scalactic._ import play.api.libs.json.{JsValue, Json} import scala.concurrent.Future import actors.ActorBuffer.Finished class ImageActor extends Actor with ActorLogging with ActorTimer { import context.dispatcher private var data: SBAImageDataMsg = _ private var sba: SBADataMsg = _ implicit val jobId = JobId(self.path.name) override def receive = { case srvDirect(baos: java.io.ByteArrayOutputStream) => { val originalSender = sender data = ImageCommons.loadData(baos) val result: JsValue = data match { case Good(s) => { Json.obj("job" -> Json.toJson(self.path.name.slice(0,self.path.name.length - 7))) } case Bad(e) => Json.obj("error" -> Json.toJson(e.head)) } Future(result) map (Finished(srvDirect(baos), _)) pipeTo originalSender } case srvData(filePath: String) => { val originalSender = sender data = ImageCommons.loadData(filePath) val result: JsValue = data match { case Good(s) => { Json.obj("job" -> Json.toJson(self.path.name.slice(0,self.path.name.length - 7))) } case Bad(e) => Json.obj("error" -> Json.toJson(e.head)) } Future(result) map (Finished(srvData(filePath), _)) pipeTo originalSender } case srvCalc(windowSize: Int) => { val originalSender = sender sba = ImageCommons.calcSBA(data, windowSize) val result: JsValue = sba match { case Good(s) => Json.obj("calc" -> "ok") case Bad(e) => Json.obj("error" -> Json.toJson(e.head)) } Future(result) map (Finished(srvCalc(windowSize), _)) pipeTo originalSender } case srvImage() => { val originalSender = sender val result: JsValue = if (data.get.originalImage == null) { Json.obj("error" -> "Error: Cannot load original image.") } else { Json.obj("image" -> Json.toJson(data.get.originalImage)) } Future(result) map (Finished(srvImage(), _)) pipeTo originalSender } case srvSBAImage(threshold: Double, whiteBackground: Boolean) => { val originalSender = sender val content = ImageCommons.getImage(sba, threshold, whiteBackground) val result: JsValue = content match { case Good(image) => Json.obj("image" -> Json.toJson(image)) case Bad(e) => Json.obj("error" -> Json.toJson(e.head)) } Future(result) map (Finished(srvSBAImage(threshold, whiteBackground), _)) pipeTo originalSender } } }
dvgodoy/play-benford-analysis
app/actors/ImageActor.scala
Scala
apache-2.0
2,744
/* This file is part of Octetoscope. Copyright (C) 2013-2015 Octetoscope contributors (see /AUTHORS.txt) This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. */ package ru.corrigendum.octetoscope.presentation import org.scalatest.FunSuite import org.scalatest.MustMatchers._ import ru.corrigendum.octetoscope.abstractinfra.Blob import ru.corrigendum.octetoscope.core._ import ru.corrigendum.octetoscope.presentation.tools.DisplayTreeNodeData class PackageSuite extends FunSuite { import PackageSuite._ test("presentVersionInfo") { val hash = "1234" * 10 presentVersionInfo(VersionInfo("1.2", 0, hash, dirty = false)) mustBe "1.2-g1234123" presentVersionInfo(VersionInfo("1.2", 34, hash, dirty = false)) mustBe "1.2+34-g1234123" presentVersionInfo(VersionInfo("1.2", 0, hash, dirty = true)) mustBe "1.2-g1234123-dirty" presentVersionInfo(VersionInfo("1.2", 34, hash, dirty = true)) mustBe "1.2+34-g1234123-dirty" } test("presentPiece - atom - with value") { DisplayTreeNodeData.from(presentPieceIgnoringEvents(Atom(Bytes(5), new EagerContentsR((), "alpha")))) mustBe DisplayTreeNodeData("WHOLE: alpha", Nil) } test("presentPiece - atom - without value") { DisplayTreeNodeData.from(presentPieceIgnoringEvents(Atom(Bytes(2), EmptyContents))) mustBe DisplayTreeNodeData("WHOLE", Nil) } test("presentPiece - molecule") { val molecule = Molecule(Bytes(100), new EagerContentsR((), "beta"), Seq( SubPiece("one", Bytes(0), Atom(Bytes(10), new EagerContentsR((), "gamma"))), SubPiece("two", Bytes(50), Atom(Bytes(10), EmptyContents)))) DisplayTreeNodeData.from(presentPieceIgnoringEvents(molecule)) mustBe DisplayTreeNodeData("WHOLE: beta", Nil, Some(Seq( DisplayTreeNodeData("one: gamma", Nil), DisplayTreeNodeData("two", Nil) )) ) } test("presentPiece - with note") { for (severity <- NoteSeverity.values) { val piece = Atom(Bytes(2), EmptyContents, notes = Seq(Note(severity, "note"))) DisplayTreeNodeData.from(presentPieceIgnoringEvents(piece)) mustBe DisplayTreeNodeData("WHOLE", Seq((SeverityColors(severity), "note"))) } } test("presentPiece - multiple notes") { val actual = DisplayTreeNodeData.from(presentPieceIgnoringEvents(Atom(Bytes(2), EmptyContents, notes = Seq(Note(NoteSeverity.Info, "note 1"), Note(NoteSeverity.Error, "note 2"))))) val expected = DisplayTreeNodeData("WHOLE", Seq((SeverityColors(NoteSeverity.Info), "note 1"), (SeverityColors(NoteSeverity.Error), "note 2"))) actual mustBe expected } test("presentPiece - double click handler") { val piece = Molecule(Bytes(100), EmptyContents, Seq( SubPiece("alpha", Bytes(50), Molecule(Bytes(25), EmptyContents, Seq( SubPiece("beta", Bytes(5), Atom(Bytes(10), EmptyContents)) ))) )) var receivedOffset: InfoSize = null var receivedSize: InfoSize = null def handleDoubleClick(offset: InfoSize, size: InfoSize): Unit = { receivedOffset = offset receivedSize = size } val displayTreeNode = presentPiece(piece, handleDoubleClick) displayTreeNode.getChildren.get()(0).getChildren.get()(0).eventListener.doubleClicked() receivedOffset mustBe Bytes(55) receivedSize mustBe Bytes(10) } test("presentBlobAsHexadecimal - empty") { presentBlobAsHexadecimal(Blob.empty, 10) mustBe "" } test("presentBlobAsHexadecimal - multiple") { val blob = new ArrayBlob(Array[Byte](0x12, 0x23, 0x34, 0x4a, 0x5b, 0x6c)) presentBlobAsHexadecimal(blob, 2) mustBe "12 23\\n34 4a\\n5b 6c" } test("presentBlobAsHexadecimal - non-multiple") { val blob = new ArrayBlob(Array[Byte](0x12, 0x23, 0x34, 0x4a, 0x5b, 0x6c, 0)) presentBlobAsHexadecimal(blob, 3) mustBe "12 23 34\\n4a 5b 6c\\n00" } test("generateBlobOffsets") { generateBlobOffsets(0, 4) mustBe "" generateBlobOffsets(1, 4) mustBe "00" generateBlobOffsets(4, 4) mustBe "00" generateBlobOffsets(7, 4) mustBe "00\\n04" generateBlobOffsets(9, 4) mustBe "00\\n04\\n08" generateBlobOffsets(0x101, 0x40) mustBe "0000\\n0040\\n0080\\n00c0\\n0100" generateBlobOffsets(0x15007, 0x3001) mustBe "00000000\\n00003001\\n00006002\\n00009003\\n0000c004\\n0000f005\\n00012006" generateBlobOffsets(0x200000000L, 0x7fffffff) mustBe "0000000000000000\\n000000007fffffff\\n00000000fffffffe\\n000000017ffffffd\\n00000001fffffffc" } } object PackageSuite { def presentPieceIgnoringEvents(piece: PlainPiece) = presentPiece(piece, (offset: InfoSize, size: InfoSize) => ()) }
SpecLad/Octetoscope
presentation/src/test/scala/ru/corrigendum/octetoscope/presentation/PackageSuite.scala
Scala
gpl-3.0
5,160
package org.jetbrains.plugins.scala package lang package psi package impl package base package literals import java.lang import java.lang.{Double => JDouble} import com.intellij.lang.ASTNode import com.intellij.openapi.project.Project import com.intellij.psi.util.PsiLiteralUtil import org.jetbrains.plugins.scala.lang.psi.api.base.ScLiteral import org.jetbrains.plugins.scala.lang.psi.api.base.literals.ScDoubleLiteral import org.jetbrains.plugins.scala.lang.psi.types.{ScType, api} final class ScDoubleLiteralImpl(node: ASTNode, override val toString: String) extends NumericLiteralImplBase(node, toString) with ScDoubleLiteral { override protected def wrappedValue(value: JDouble): ScLiteral.Value[lang.Double] = ScDoubleLiteralImpl.Value(value) override protected def parseNumber(text: String): JDouble = PsiLiteralUtil.parseDouble(text) override private[psi] def unwrappedValue(value: JDouble) = value.doubleValue } object ScDoubleLiteralImpl { final case class Value(override val value: JDouble) extends NumericLiteralImplBase.Value(value) { override def negate: NumericLiteralImplBase.Value[JDouble] = Value(-value) override def wideType(implicit project: Project): ScType = api.Double } }
JetBrains/intellij-scala
scala/scala-impl/src/org/jetbrains/plugins/scala/lang/psi/impl/base/literals/ScDoubleLiteralImpl.scala
Scala
apache-2.0
1,279
/* * This file is part of Kiama. * * Copyright (C) 2008-2015 Anthony M Sloane, Macquarie University. * * Kiama is free software: you can redistribute it and/or modify it under * the terms of the GNU Lesser General Public License as published by the * Free Software Foundation, either version 3 of the License, or (at your * option) any later version. * * Kiama is distributed in the hope that it will be useful, but WITHOUT ANY * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS * FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for * more details. * * You should have received a copy of the GNU Lesser General Public License * along with Kiama. (See files COPYING and COPYING.LESSER.) If not, see * <http://www.gnu.org/licenses/>. */ package org.kiama package attribution /** * Common support for attribution of syntax trees in a functional style. * Includes circular and constant attributes but needs to be augmented with * basic attributes and parameterised attributes. */ trait AttributionCommon { import scala.language.experimental.macros /** * A constant attribute of a node type `T` with value of type `U`. The * value is given by the computation `u` which is evaluated at most once. */ class ConstantAttribute[T,U] (name : String, u : => U) extends Attribute[T,U] (name) { /** * Lazily computed result of evaluating the attribute's computation. */ private lazy val result = u /** * Return the value of this attribute for node `t`, always returning * `u` but only evaluating it once. */ def apply (t : T) : U = result } /** * Define a constant attribute of `T` nodes of type `U` given by the value * `u`. `u` is evaluated at most once. */ def constant[T,U] (u : => U) : Attribute[T,U] = macro AttributionCommonMacros.constantMacro[T,U] /** * As for the other `constant` with the first argument specifying a name for * the constructed attribute. */ def constant[T,U] (name : String, u : => U) : Attribute[T,U] = new ConstantAttribute[T,U] (name, u) }
solomono/kiama
core/src/org/kiama/attribution/AttributionCommon.scala
Scala
gpl-3.0
2,219
/* * Copyright 2007-2015 WorldWide Conferencing, LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.liftweb package http import java.net.URI import scala.concurrent.duration._ import common._ import json._ import util.Props import util.Helpers.tryo import LiftRules._ /** * Rules for HTTPS usage by a Lift application. * * Currently corresponds directly to the [[http://tools.ietf.org/html/rfc6797 * HTTP `Strict-Transport-Security` header]]. */ final case class HttpsRules( /** * When set, the duration of the requirement that HTTPS be used for this * site. It's usually a good idea for this to be a high number. If unset, * HTTPS is not required when interacting with this site. */ requiredTime: Option[Duration] = None, /** * When set to true, the required time above includes subdomains. */ includeSubDomains: Boolean = false ) { lazy val headers: List[(String, String)] = { requiredTime.toList.map { duration => val age = s"max-age=${duration.toSeconds}" val header = if (includeSubDomains) { s"$age ; includeSubDomains" } else { age } ("Strict-Transport-Security" -> header) } } /** * Returns the headers implied by this set of HTTPS rules. If `enforce` is * false, returns nothing. */ def headers(enforce: Boolean): List[(String, String)] = { if (enforce) { headers } else { Nil } } } object HttpsRules { /** * Creates a restrictive set of HTTPS rules requiring HTTPS for 365 days and * including subdomains in the requirement. */ def secure = apply(Some(365.days), true) } /** * Base trait for content source restrictions. These are different ways of * restricting where contents of various types are allowed to come from, in * conjunction with `ContentSecurityPolicy` categories. */ sealed trait ContentSourceRestriction { /** * The `Content-Security-Policy` string that represents this restriction. */ def sourceRestrictionString: String } /** * Marker trait for restrictions that only apply to JavaScript. */ sealed trait JavaScriptSourceRestriction extends ContentSourceRestriction /** * Marker trait for restrictions that only apply to stylesheets. */ sealed trait StylesheetSourceRestriction extends ContentSourceRestriction sealed trait GeneralSourceRestriction extends JavaScriptSourceRestriction with StylesheetSourceRestriction object ContentSourceRestriction { /** * Indicates content from all sources is allowed. */ case object All extends GeneralSourceRestriction { val sourceRestrictionString = "*" } /** * Indicates content from the given host path is allowed. See the * `Content-Security-Policy` spec's [[https://www.w3.org/TR/CSP/#source-list-path-patching * matching rules for `host-source`]] for more about what this can look * like. * * Example: * {{{ * Host("https://base.*.example.com") * }}} */ case class Host(hostAndPath: String) extends GeneralSourceRestriction { val sourceRestrictionString = hostAndPath } /** * Indicates content from the given scheme is allowed. The scheme should not * include the trailing `:`. * * Example: * {{{ * Scheme("data") * }}} */ case class Scheme(scheme: String) extends GeneralSourceRestriction { val sourceRestrictionString = scheme + ":" } /** * Indicates content from no sources is allowed. */ case object None extends GeneralSourceRestriction { val sourceRestrictionString = "'none'" } /** * Indicates content from the same origin as the content is allowed. */ case object Self extends GeneralSourceRestriction { val sourceRestrictionString = "'self'" } /** * Indicates inline content on the page is allowed to be interpreted. It is * highly recommended that this not be used, as it exposes your application to * cross-site scripting and other vulnerabilities. * * If not specified for JavaScript, JavaScript `on*` event handler attributes, * `<script>` elements, and `javascript:` URIs will not be executed by a * browser that supports content security policies. * * If not specified for stylesheets, `<style>` elements and inline `style` * attributes will not be read by a browser that supports content security * policies. */ case object UnsafeInline extends JavaScriptSourceRestriction with StylesheetSourceRestriction { val sourceRestrictionString = "'unsafe-inline'" } /** * Indicates `eval` and related functionality can be used. Some of Lift's * functionality, including `idMemoize` and comet handling, relies on eval, * so not including this in your script sources will mean you won't be able to * use those. * * If not specified for JavaScript, invoking `eval`, the `Function` * constructor, or `setTimeout`/`setInterval` with a string parameter will * all throw security exceptions in a browser that supports content security * policies. */ case object UnsafeEval extends JavaScriptSourceRestriction { val sourceRestrictionString = "'unsafe-eval'" } } /** * Specifies a `[[https://developer.mozilla.org/en-US/docs/Web/Security/CSP Content-Security-Policy]] ` * for this site. This will be sent to the client in a `Content-Security-Policy` * header when responses are returned from Lift. * * In development mode, content security policy violations are only reported if * the browser supports them, not enforced. In all other modes, content security * policy violations are enforced if the browser supports them. * * Note that the `X-Webkit-CSP` header is NOT specified, due to * potentially-broken behavior in iOS 5 and 5.1. This means iOS 6/6.1 will not * receive a content security policy that it can * understand. See the [[http://caniuse.com/#feat=contentsecuritypolicy caniuse * page on content security policy]] for more. * * @param defaultSources A list of default source restrictions; if one of the * other sources parameters is empty, the default sources will apply * instead. * @param connectSources A list of source restrictions for `XmlHttpRequest` * (AJAX) connections. * @param fontSources A list of source restrictions for loading fonts (e.g., * from CSS `font-face` declarations). * @param frameSources A list of source restrictions for loading frames and * iframes. * @param imageSources A list of source restrictions for loading images. * @param mediaSources A list of source restrictions for loading media (audio * and video). * @param objectSources A list of source restrictions for loading `object`, * `embed`, `applet`, and related elements. * @param scriptSources A list of source restrictions for loading scripts. Also * accepts the `[[ContentSourceRestriction.UnsafeInline UnsafeInline]]` * and `[[ContentSourceRestriction.UnsafeEval UnsafeEval]]` source * restrictions, though these are strongly discouraged. * @param styleSources A list of source restrictions for loading styles. Also * accepts the `[[ContentSourceRestriction.UnsafeInline UnsafeInline]]` * source, though it is strongly discouraged. * @param reportUri The URI where any violation of the security policy will be * reported. You can set the function that handles these violations in * `[[LiftRules.contentSecurityPolicyViolationReport]]`. By default, * reported to `[[ContentSecurityPolicy.defaultReportUri]]`. * * If this is `None`, violations will not be reported. */ final case class ContentSecurityPolicy( defaultSources: List[ContentSourceRestriction] = List(ContentSourceRestriction.Self), connectSources: List[ContentSourceRestriction] = Nil, fontSources: List[ContentSourceRestriction] = Nil, frameSources: List[ContentSourceRestriction] = Nil, imageSources: List[ContentSourceRestriction] = List(ContentSourceRestriction.All), mediaSources: List[ContentSourceRestriction] = Nil, objectSources: List[ContentSourceRestriction] = Nil, scriptSources: List[JavaScriptSourceRestriction] = List( ContentSourceRestriction.UnsafeEval, ContentSourceRestriction.Self ), styleSources: List[StylesheetSourceRestriction] = Nil, reportUri: Option[URI] = Some(ContentSecurityPolicy.defaultReportUri) ) { /** * The string that describes this content security policy in the syntax * expected by the `Content-Security-Policy` header. */ def contentSecurityPolicyString = { val allRestrictions = List( "default-src" -> defaultSources, "connect-src" -> connectSources, "font-src" -> fontSources, "frame-src" -> frameSources, "img-src" -> imageSources, "media-src" -> mediaSources, "object-src" -> objectSources, "script-src" -> scriptSources, "style-src" -> styleSources ) val restrictionString = allRestrictions .collect { case (category, restrictions) if restrictions.nonEmpty => category + " " + restrictions.map(_.sourceRestrictionString).mkString(" ") } .mkString("; ") reportUri.map { uri => s"$restrictionString; report-uri $uri" } getOrElse { restrictionString } } private[this] lazy val reportOnlyHeaders = { List( "Content-Security-Policy-Report-Only" -> contentSecurityPolicyString, "X-Content-Security-Policy-Report-Only" -> contentSecurityPolicyString ) } private[this] lazy val enforcedHeaders = { List( "Content-Security-Policy" -> contentSecurityPolicyString, "X-Content-Security-Policy" -> contentSecurityPolicyString ) } /** * Returns the headers implied by this content security policy. */ def headers(enforce: Boolean = true, logViolations: Boolean = true): List[(String, String)] = { if (enforce) { enforcedHeaders } else if (logViolations) { reportOnlyHeaders } else { Nil } } } object ContentSecurityPolicy { /** * The default URI where security policy violations will be reported. This * URI is under Lift's URI namespace, at `[[LiftRules.liftPath]]`. */ def defaultReportUri = { new URI(LiftRules.liftPath + "/content-security-policy-report") } /** * Creates a restrictive content security policy that disallows images from * all sources except the page's origin. * * Note that the default content security policy restricts all other resources * to the same origin, but allows images from any source; the secure one only * differs because it adds restrictions to the image sources. */ def secure: ContentSecurityPolicy = { ContentSecurityPolicy(imageSources = Nil) } } /** * The expected payload of a content security policy violation report. * * Parsable from the JSON POST that a browser should send when a violation * occurs. */ case class ContentSecurityPolicyViolation( documentUri: String, referrer: String, blockedUri: String, violatedDirective: String, originalPolicy: String ) object ContentSecurityPolicyViolation extends LazyLoggable { private[this] implicit val formats = DefaultFormats def defaultViolationHandler: DispatchPF = { case request @ Req(start :: "content-security-policy-report" :: Nil, _, _) if start == LiftRules.liftContextRelativePath => val violation = for { requestJson <- request.forcedBodyAsJson camelCasedJson = requestJson.transformField { case JField("document-uri", content) => JField("documentUri", content) case JField("blocked-uri", content) => JField("blockedUri", content) case JField("violated-directive", content) => JField("violatedDirective", content) case JField("original-policy", content) => JField("originalPolicy", content) } violationJson = camelCasedJson \\ "csp-report" extractedViolation <- tryo(violationJson.extract[ContentSecurityPolicyViolation]) } yield { extractedViolation } () => { violation match { case Full(violation) => LiftRules.contentSecurityPolicyViolationReport(violation) or Full(OkResponse()) case _ => logger.warn( s"Got a content security violation report we couldn't interpret: '${request.body.map(new String(_, "UTF-8"))}'." ) Full(BadRequestResponse("Unrecognized format for content security policy report.")) } } } } /** * Defines restrictions on allowing served pages to be embedded in frames. */ sealed trait FrameRestrictions { def headers: List[(String,String)] /** * Returns the headers implied by these frame restrictions. * * Because of how frame restrictions are handled, if enforcement is turned * off, no headers are generated. */ def headers(enforce: Boolean = false): List[(String,String)] = { if (enforce) { headers } else { Nil } } } object FrameRestrictions { /** * Allows other pages from the same origin as the one being served to embed * this page in a frame. */ case object SameOrigin extends FrameRestrictions { val headers = List("X-Frame-Options" -> "SAMEORIGIN") } /** * Does not allow embedding the page being served in a frame at all. */ case object Deny extends FrameRestrictions { val headers = List("X-Frame-Options" -> "DENY") } } /** * Specifies security rules for a Lift application. By default, HTTPS is not * required and `Content-Security-Policy` is restricted to the current domain * for everything except images, which are accepted from any domain. * Additionally, served pages can only be embedded in other frames from * the current domain. * * You can use `[[SecurityRules.secure]]` to enable more restrictive, but * also more secure, defaults. * * @param enforceInDevMode If true, security policies and HTTPS rules are * enforced in dev mode in addition to staging/pilot/production/etc. * @param logInDevMode If true, dev mode violations of security policies are * logged by default. Note that if you override * `[[LiftRules.contentSecurityPolicyViolationReport]]` or otherwise * change the default Lift policy violation handling behavior, it will * be up to you to handle this property as desired. */ final case class SecurityRules( https: Option[HttpsRules] = None, content: Option[ContentSecurityPolicy] = Some(ContentSecurityPolicy()), frameRestrictions: Option[FrameRestrictions] = Some(FrameRestrictions.SameOrigin), enforceInOtherModes: Boolean = false, logInOtherModes: Boolean = true, enforceInDevMode: Boolean = false, logInDevMode: Boolean = true ) { private val enforce_? = { if (Props.devMode) { enforceInDevMode } else { enforceInOtherModes } } private val logViolations_? = { if (Props.devMode) { logInDevMode } else { logInOtherModes } } /** * Returns the headers implied by this set of security rules. */ lazy val headers: List[(String, String)] = { https.toList.flatMap(_.headers(enforce_?)) ::: content.toList.flatMap(_.headers(enforce_?, logViolations_?)) ::: frameRestrictions.toList.flatMap(_.headers(enforce_?)) } } object SecurityRules { /** * Creates a restrictive set of security rules, including required HTTPS, * [[HttpsRules$.secure secure HTTPS rules]], and * [[ContentSecurityPolicy$.secure secure `Content-Security-Policy` rules]]. * * To tweak any of these settings, use the `SecurityRules` constructor * directly. */ def secure = { apply( Some(HttpsRules.secure), Some(ContentSecurityPolicy.secure), enforceInOtherModes = true ) } }
lift/framework
web/webkit/src/main/scala/net/liftweb/http/SecurityRules.scala
Scala
apache-2.0
16,478
package io.buoyant.marathon.v2 import com.twitter.finagle._ import com.twitter.finagle.util.DefaultTimer import com.twitter.finagle.tracing.Trace import com.twitter.util.{NonFatal => _, _} import scala.util.control.NonFatal object AppIdNamer { object Closed extends Throwable } class AppIdNamer( api: Api, prefix: Path, ttl: Duration, timer: Timer = DefaultTimer.twitter ) extends Namer { import AppIdNamer._ private[this] implicit val _timer = timer /** * Accepts names in the form: * /<app-id>/residual/path * * or * * /<group-id>/<app-id>/residual/path * * etc * * and attempts to bind an Addr by resolving named endpoint from the * Marathon master. */ def lookup(path: Path): Activity[NameTree[Name]] = if (path.isEmpty) Activity.value(NameTree.Neg) else { // each time the map of all Apps updates, find the // shortest-matching part of `path` that exists as an App ID. val possibleIds = (1 to path.size).map(path.take(_)) appsActivity.map { apps => Trace.recordBinary("marathon.path", path.show) val found = possibleIds.collectFirst { case app if apps(app) => Trace.recordBinary("marathon.appId", app.show) val residual = path.drop(app.size) val id = prefix ++ app val addr = getAndMonitorAddr(app) Name.Bound(addr, id, residual) } Trace.recordBinary("marathon.found", found.isDefined) found match { case Some(name) => NameTree.Leaf(name) case None => NameTree.Neg } } } private[this] val appsActivity: Activity[Api.AppIds] = Activity(Var.async[Activity.State[Api.AppIds]](Activity.Pending) { state => @volatile var initialized, stopped = false @volatile var pending: Future[_] = Future.never def loop(): Unit = if (!stopped) { pending = api.getAppIds().respond { case Return(apps) => initialized = true state() = Activity.Ok(apps) Trace.recordBinary("marathon.apps", apps.map(_.show).mkString(",")) if (!stopped) { pending = Future.sleep(ttl).onSuccess(_ => loop()) } case Throw(NonFatal(e)) => if (!initialized) { state() = Activity.Failed(e) } if (!stopped) { pending = Future.sleep(ttl).onSuccess(_ => loop()) } case Throw(e) => state() = Activity.Failed(e) } } loop() Closable.make { deadline => stopped = true pending.raise(Closed) Future.Unit } }) private[this] var appMonitors: Map[Path, Var[Addr]] = Map.empty private[this] def getAndMonitorAddr(app: Path): Var[Addr] = synchronized { appMonitors.get(app) match { case Some(addr) => addr case None => val addr = Var.async[Addr](Addr.Pending) { addr => @volatile var initialized, stopped = false @volatile var pending: Future[_] = Future.never def loop(): Unit = if (!stopped) { pending = api.getAddrs(app).respond { case Return(addrs) => initialized = true addr() = Addr.Bound(addrs) if (!stopped) { pending = Future.sleep(ttl).onSuccess(_ => loop()) } case Throw(NonFatal(e)) => if (!initialized) { addr() = Addr.Failed(e) } if (!stopped) { pending = Future.sleep(ttl).onSuccess(_ => loop()) } case Throw(e) => addr() = Addr.Failed(e) } } loop() Closable.make { deadline => stopped = true synchronized { appMonitors -= app } pending.raise(Closed) Future.Unit } } appMonitors += (app -> addr) addr } } }
hhtpcd/linkerd
marathon/src/main/scala/io/buoyant/marathon/v2/AppIdNamer.scala
Scala
apache-2.0
4,079
//https://www.hackerrank.com/challenges/mirko-at-construction-site object Control { import scala.language.reflectiveCalls def using[A <: { def close(): Unit }, B](param: A)(f: A => B): B = try { f(param) } finally { param.close() } } object Perf { import java.util.concurrent.TimeUnit def time[R](block: => R): R = { val s = System.nanoTime() val r = block // call-by-name println("Elapsed time: " + TimeUnit.SECONDS.convert(System.nanoTime() - s, TimeUnit.NANOSECONDS) + " seconds") r } } object MirkoConstructionSite extends App { import Control._ def readLines(filename:String):Option[Iterator[String]] = { try { val lines = using(io.Source.fromFile(filename)) { source => (for (line <- source.getLines) yield line).toList.iterator } Some(lines) } catch { case e: Exception => None } } def run(lines: Iterator[String]):Unit = { val Array(n, q) = lines.next.split(' ').toArray.map(_.toInt) // n = number of buildings, q = number of queries //println(s"n:${n} q:${q}") val base = lines.next.split(' ').toList.map(_.toInt) // base height val step = lines.next.split(' ').toList.map(_.toInt) // step height //println(s"bases:${base.length} steps:${step.length}") // (bs: building indexes, q: steps) = List((height,building index)) ordered from tallest to shortest def build(bs:List[Int], q: Int) = bs. map(b => (base(b) + step(b) * q, b)). sorted(Ordering[(Int,Int)].reverse). unzip._2. zipWithIndex // buildings (indexes) in the game var buildings = (0 until n).toList // create a list of tuples (query, position) val queries = new Array[Int](q) (0 until q).foreach(queries(_) = lines.next.toInt) val sortedQueries = queries.zipWithIndex.sorted // List[(query, original position)] //println(s"queries:${queries.length}") import scala.collection.mutable.{Map => MutableMap} var answers = MutableMap[Int, Int]() // Map(query -> building index) // this is to get rid of buildings that get a worse position from iteration to iteration var prev = Map[Int, Int]() import Perf._ time { for (query <- sortedQueries) { print(buildings.length + " ") if (buildings.length == 1) answers += (query._1 -> (buildings(0) + 1)) else { val actual = build(buildings, query._1) // building -> position answers += query._1 -> (actual.head._1 + 1) if (!prev.isEmpty) buildings = for (x <- actual if (prev(x._1) >= x._2)) yield x._1 prev = actual.toMap } } } //for (query <- queries) println(answers(query)) } if (args.length == 1) { readLines(args(0)) match { case Some(lines) => run(lines) case None => println("No lines") } } else println("No input") }
flopezlasanta/hackerrank
src/functional_programming/functional_structures/MirkoConstructionSite.scala
Scala
mit
2,805
/* * Copyright 2014–2017 SlamData Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package quasar.niflheim import quasar.precog.common._ import quasar.precog.common.ingest.EventId import quasar.precog.common.security.Authorities import quasar.precog.util._ import akka.actor.{Actor, ActorRef, ActorSystem, Props} import akka.pattern.{AskSupport, GracefulStopSupport} import akka.util.Timeout import scala.concurrent.{ExecutionContext, Future} import scala.concurrent.duration._ import org.slf4s.Logging import quasar.blueeyes.json._ import quasar.blueeyes.json.serialization._ import quasar.blueeyes.json.serialization.DefaultSerialization._ import quasar.blueeyes.json.serialization.IsoSerialization._ import quasar.blueeyes.json.serialization.Extractor._ import scalaz._ import scalaz.effect.IO import scalaz.Validation._ import scalaz.syntax.monad._ import java.io.File import java.util.concurrent.ScheduledExecutorService import java.util.concurrent.atomic._ import scala.collection.immutable.SortedMap import shapeless._ case class Insert(batch: Seq[NIHDB.Batch], responseRequested: Boolean) case object GetSnapshot case class Block(id: Long, segments: Seq[Segment], stable: Boolean) case object GetStatus case class Status(cooked: Int, pending: Int, rawSize: Int) case object GetStructure case class Structure(columns: Set[(CPath, CType)]) sealed trait InsertResult case class Inserted(offset: Long, size: Int) extends InsertResult case object Skipped extends InsertResult case object Cook case object Quiesce object NIHDB { import scala.concurrent.ExecutionContext.Implicits.global // TODO!!!! case class Batch(offset: Long, values: Seq[JValue]) final val projectionIdGen = new AtomicInteger() final def create(chef: ActorRef, authorities: Authorities, baseDir: File, cookThreshold: Int, timeout: FiniteDuration, txLogScheduler: ScheduledExecutorService)(implicit actorSystem: ActorSystem): IO[Validation[Error, NIHDB]] = NIHDBActor.create(chef, authorities, baseDir, cookThreshold, timeout, txLogScheduler) map { _ map { actor => new NIHDBImpl(actor, timeout, authorities) } } final def open(chef: ActorRef, baseDir: File, cookThreshold: Int, timeout: FiniteDuration, txLogScheduler: ScheduledExecutorService)(implicit actorSystem: ActorSystem) = NIHDBActor.open(chef, baseDir, cookThreshold, timeout, txLogScheduler) map { _ map { _ map { case (authorities, actor) => new NIHDBImpl(actor, timeout, authorities) } } } final def hasProjection(dir: File) = NIHDBActor.hasProjection(dir) } trait NIHDB { def authorities: Authorities def insert(batch: Seq[NIHDB.Batch]): IO[Unit] def insertVerified(batch: Seq[NIHDB.Batch]): Future[InsertResult] def getSnapshot(): Future[NIHDBSnapshot] def getBlockAfter(id: Option[Long], cols: Option[Set[ColumnRef]]): Future[Option[Block]] def getBlock(id: Option[Long], cols: Option[Set[CPath]]): Future[Option[Block]] def length: Future[Long] def projectionId: Int def status: Future[Status] def structure: Future[Set[ColumnRef]] /** * Returns the total number of defined objects for a given `CPath` *mask*. * Since this punches holes in our rows, it is not simply the length of the * block. Instead we count the number of rows that have at least one defined * value at each path (and their children). */ def count(paths0: Option[Set[CPath]]): Future[Long] /** * Forces the chef to cook the current outstanding commit log. This should only * be called in the event that an ingestion is believed to be 100% complete, since * it will result in a "partial" block (i.e. a block that is not of maximal length). * Note that the append log is visible to snapshots, meaning that this function * should be unnecessary in nearly all circumstances. */ def cook: Future[Unit] def quiesce: Future[Unit] def close(implicit actorSystem: ActorSystem): Future[Unit] } private[niflheim] class NIHDBImpl private[niflheim] (actor: ActorRef, timeout: Timeout, val authorities: Authorities)(implicit executor: ExecutionContext) extends NIHDB with GracefulStopSupport with AskSupport { private implicit val impFiniteDuration = timeout val projectionId = NIHDB.projectionIdGen.getAndIncrement def insert(batch: Seq[NIHDB.Batch]): IO[Unit] = IO(actor ! Insert(batch, false)) def insertVerified(batch: Seq[NIHDB.Batch]): Future[InsertResult] = (actor ? Insert(batch, true)).mapTo[InsertResult] def getSnapshot(): Future[NIHDBSnapshot] = (actor ? GetSnapshot).mapTo[NIHDBSnapshot] def getBlockAfter(id: Option[Long], cols: Option[Set[ColumnRef]]): Future[Option[Block]] = getSnapshot().map(_.getBlockAfter(id, cols)) def getBlock(id: Option[Long], cols: Option[Set[CPath]]): Future[Option[Block]] = getSnapshot().map(_.getBlock(id, cols)) def length: Future[Long] = getSnapshot().map(_.count()) def status: Future[Status] = (actor ? GetStatus).mapTo[Status] def structure: Future[Set[ColumnRef]] = getSnapshot().map(_.structure) def count(paths0: Option[Set[CPath]]): Future[Long] = getSnapshot().map(_.count(paths0)) def cook: Future[Unit] = (actor ? Cook).mapTo[Unit] def quiesce: Future[Unit] = (actor ? Quiesce).mapTo[Unit] def close(implicit actorSystem: ActorSystem): Future[Unit] = gracefulStop(actor, timeout.duration).map(_ => ()) } private[niflheim] object NIHDBActor extends Logging { final val descriptorFilename = "NIHDBDescriptor.json" final val cookedSubdir = "cooked_blocks" final val rawSubdir = "raw_blocks" final val lockName = "NIHDBProjection" private[niflheim] final val internalDirs = Set(cookedSubdir, rawSubdir, descriptorFilename, CookStateLog.logName + "_1.log", CookStateLog.logName + "_2.log", lockName + ".lock", CookStateLog.lockName + ".lock") final def create(chef: ActorRef, authorities: Authorities, baseDir: File, cookThreshold: Int, timeout: FiniteDuration, txLogScheduler: ScheduledExecutorService)(implicit actorSystem: ActorSystem): IO[Validation[Error, ActorRef]] = { val descriptorFile = new File(baseDir, descriptorFilename) val currentState: IO[Validation[Error, ProjectionState]] = if (descriptorFile.exists) { ProjectionState.fromFile(descriptorFile) } else { val state = ProjectionState.empty(authorities) for { _ <- IO { log.info("No current descriptor found for " + baseDir + "; " + authorities + ", creating fresh descriptor") } _ <- ProjectionState.toFile(state, descriptorFile) } yield { success(state) } } currentState map { _ map { s => actorSystem.actorOf(Props(new NIHDBActor(s, baseDir, chef, cookThreshold, txLogScheduler))) } } } final def readDescriptor(baseDir: File): IO[Option[Validation[Error, ProjectionState]]] = { val descriptorFile = new File(baseDir, descriptorFilename) if (descriptorFile.exists) { ProjectionState.fromFile(descriptorFile) map { Some(_) } } else { log.warn("No projection found at " + baseDir) IO { None } } } final def open(chef: ActorRef, baseDir: File, cookThreshold: Int, timeout: FiniteDuration, txLogScheduler: ScheduledExecutorService)(implicit actorSystem: ActorSystem): IO[Option[Validation[Error, (Authorities, ActorRef)]]] = { val currentState: IO[Option[Validation[Error, ProjectionState]]] = readDescriptor(baseDir) currentState map { _ map { _ map { s => (s.authorities, actorSystem.actorOf(Props(new NIHDBActor(s, baseDir, chef, cookThreshold, txLogScheduler)))) } } } } final def hasProjection(dir: File) = (new File(dir, descriptorFilename)).exists private case class BlockState(cooked: List[CookedReader], pending: Map[Long, StorageReader], rawLog: RawHandler) private class State(val txLog: CookStateLog, var blockState: BlockState, var currentBlocks: SortedMap[Long, StorageReader]) } private[niflheim] class NIHDBActor private (private var currentState: ProjectionState, baseDir: File, chef: ActorRef, cookThreshold: Int, txLogScheduler: ScheduledExecutorService) extends Actor with Logging { import NIHDBActor._ assert(cookThreshold > 0) assert(cookThreshold < (1 << 16)) private[this] val workLock = FileLock(baseDir, lockName) private[this] val cookedDir = new File(baseDir, cookedSubdir) private[this] val rawDir = new File(baseDir, rawSubdir) private[this] val descriptorFile = new File(baseDir, descriptorFilename) private[this] val cookSequence = new AtomicLong private[this] var actorState: Option[State] = None private def state = { import scalaz.syntax.effect.id._ actorState getOrElse open.flatMap(_.tap(s => IO(actorState = Some(s)))).unsafePerformIO } private def initDirs(f: File) = IO { if (!f.isDirectory) { if (!f.mkdirs) { throw new Exception("Failed to create dir: " + f) } } } private def initActorState = IO { log.debug("Opening log in " + baseDir) val txLog = new CookStateLog(baseDir, txLogScheduler) log.debug("Current raw block id = " + txLog.currentBlockId) // We'll need to update our current thresholds based on what we read out of any raw logs we open var maxOffset = currentState.maxOffset val currentRawFile = rawFileFor(txLog.currentBlockId) val (currentLog, rawLogOffsets) = if (currentRawFile.exists) { val (handler, offsets, ok) = RawHandler.load(txLog.currentBlockId, currentRawFile) if (!ok) { log.warn("Corruption detected and recovery performed on " + currentRawFile) } (handler, offsets) } else { (RawHandler.empty(txLog.currentBlockId, currentRawFile), Seq.empty[Long]) } rawLogOffsets.sortBy(- _).headOption.foreach { newMaxOffset => maxOffset = maxOffset max newMaxOffset } val pendingCooks = txLog.pendingCookIds.map { id => val (reader, offsets, ok) = RawHandler.load(id, rawFileFor(id)) if (!ok) { log.warn("Corruption detected and recovery performed on " + currentRawFile) } maxOffset = math.max(maxOffset, offsets.max) (id, reader) }.toMap this.currentState = currentState.copy(maxOffset = maxOffset) // Restore the cooked map val cooked = currentState.readers(cookedDir) val blockState = BlockState(cooked, pendingCooks, currentLog) val currentBlocks = computeBlockMap(blockState) log.debug("Initial block state = " + blockState) // Re-fire any restored pending cooks blockState.pending.foreach { case (id, reader) => log.debug("Restarting pending cook on block %s:%d".format(baseDir, id)) chef ! Prepare(id, cookSequence.getAndIncrement, cookedDir, reader, () => ()) } new State(txLog, blockState, currentBlocks) } private def open = actorState.map(IO(_)) getOrElse { for { _ <- initDirs(cookedDir) _ <- initDirs(rawDir) state <- initActorState } yield state } private def cook(responseRequested: Boolean) = IO { state.blockState.rawLog.close val toCook = state.blockState.rawLog val newRaw = RawHandler.empty(toCook.id + 1, rawFileFor(toCook.id + 1)) state.blockState = state.blockState.copy(pending = state.blockState.pending + (toCook.id -> toCook), rawLog = newRaw) state.txLog.startCook(toCook.id) val target = sender val onComplete = if (responseRequested) () => target ! (()) else () => () chef ! Prepare(toCook.id, cookSequence.getAndIncrement, cookedDir, toCook, onComplete) } private def quiesce = IO { actorState foreach { s => log.debug("Releasing resources for projection in " + baseDir) s.blockState.rawLog.close s.txLog.close ProjectionState.toFile(currentState, descriptorFile) actorState = None } } private def close = { IO(log.debug("Closing projection in " + baseDir)) >> quiesce } except { case t: Throwable => IO { log.error("Error during close", t) } } ensuring { IO { workLock.release } } override def postStop() = { close.unsafePerformIO } def getSnapshot(): NIHDBSnapshot = NIHDBSnapshot(state.currentBlocks) private def rawFileFor(seq: Long) = new File(rawDir, "%06x.raw".format(seq)) private def computeBlockMap(current: BlockState) = { val allBlocks: List[StorageReader] = (current.cooked ++ current.pending.values :+ current.rawLog) SortedMap(allBlocks.map { r => r.id -> r }.toSeq: _*) } def updatedThresholds(current: Map[Int, Int], ids: Seq[Long]): Map[Int, Int] = { (current.toSeq ++ ids.map { i => val EventId(p, s) = EventId.fromLong(i); (p -> s) }).groupBy(_._1).map { case (p, ids) => (p -> ids.map(_._2).max) } } override def receive = { case GetSnapshot => sender ! getSnapshot() case Spoilt(_, _, onComplete) => onComplete() case Cooked(id, _, _, file, onComplete) => // This could be a replacement for an existing id, so we // ned to remove/close any existing cooked block with the same // ID //TODO: LENSES!!!!!!!~ state.blockState = state.blockState.copy( cooked = CookedReader.load(cookedDir, file) :: state.blockState.cooked.filterNot(_.id == id), pending = state.blockState.pending - id ) state.currentBlocks = computeBlockMap(state.blockState) currentState = currentState.copy( cookedMap = currentState.cookedMap + (id -> file.getPath) ) log.debug("Cook complete on %d".format(id)) ProjectionState.toFile(currentState, descriptorFile).unsafePerformIO state.txLog.completeCook(id) onComplete() case Insert(batch, responseRequested) => if (batch.isEmpty) { log.warn("Skipping insert with an empty batch on %s".format(baseDir.getCanonicalPath)) if (responseRequested) sender ! Skipped } else { val (skipValues, keepValues) = batch.partition(_.offset <= currentState.maxOffset) if (keepValues.isEmpty) { log.warn("Skipping entirely seen batch of %d rows prior to offset %d".format(batch.flatMap(_.values).size, currentState.maxOffset)) if (responseRequested) sender ! Skipped } else { val values = keepValues.flatMap(_.values) val offset = keepValues.map(_.offset).max log.debug("Inserting %d rows, skipping %d rows at offset %d for %s".format(values.length, skipValues.length, offset, baseDir.getCanonicalPath)) state.blockState.rawLog.write(offset, values) // Update the producer thresholds for the rows. We know that ids only has one element due to the initial check currentState = currentState.copy(maxOffset = offset) if (state.blockState.rawLog.length >= cookThreshold) { log.debug("Starting cook on %s after threshold exceeded".format(baseDir.getCanonicalPath)) cook(false).unsafePerformIO } log.debug("Insert complete on %d rows at offset %d for %s".format(values.length, offset, baseDir.getCanonicalPath)) if (responseRequested) sender ! Inserted(offset, values.length) } } case Cook => cook(true).unsafePerformIO case GetStatus => sender ! Status(state.blockState.cooked.length, state.blockState.pending.size, state.blockState.rawLog.length) case Quiesce => quiesce.unsafePerformIO sender ! (()) } } private[niflheim] case class ProjectionState(maxOffset: Long, cookedMap: Map[Long, String], authorities: Authorities) { def readers(baseDir: File): List[CookedReader] = cookedMap.map { case (id, metadataFile) => CookedReader.load(baseDir, new File(metadataFile)) }.toList } private[niflheim] object ProjectionState { import Extractor.Error def empty(authorities: Authorities) = ProjectionState(-1L, Map.empty, authorities) // FIXME: Add version for this format val v1Schema = "maxOffset" :: "cookedMap" :: "authorities" :: HNil implicit val stateDecomposer = decomposer[ProjectionState](v1Schema) implicit val stateExtractor = extractor[ProjectionState](v1Schema) def fromFile(input: File): IO[Validation[Error, ProjectionState]] = IO { JParser.parseFromFile(input).bimap(Extractor.Thrown(_): Extractor.Error, x => x).flatMap { jv => jv.validated[ProjectionState] } } def toFile(state: ProjectionState, output: File): IO[Boolean] = { IOUtils.safeWriteToFile(state.serialize.renderCompact, output) } }
drostron/quasar
niflheim/src/main/scala/quasar/niflheim/NIHDBActor.scala
Scala
apache-2.0
17,002
package org.jetbrains.plugins.dotty.lang.parser.parsing.statements import org.jetbrains.plugins.dotty.lang.parser.parsing.types.Type /** * @author adkozlov */ object VarDef extends org.jetbrains.plugins.scala.lang.parser.parsing.statements.VarDef { override protected val patDef = PatDef override protected val `type` = Type }
whorbowicz/intellij-scala
src/org/jetbrains/plugins/dotty/lang/parser/parsing/statements/VarDef.scala
Scala
apache-2.0
337
/* * Happy Melly Teller * Copyright (C) 2013 - 2016, Happy Melly http://www.happymelly.com * * This file is part of the Happy Melly Teller. * * Happy Melly Teller is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * Happy Melly Teller is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with Happy Melly Teller. If not, see <http://www.gnu.org/licenses/>. * * If you have questions concerning this license or the applicable additional * terms, you may contact by email Sergey Kotlov, [email protected] or * in writing Happy Melly One, Handelsplein 37, Rotterdam, The Netherlands, 3071 PR */ package controllers import play.api.http.Writeable import play.api.i18n.I18nSupport import play.api.libs.json.{JsValue, Json} import play.api.mvc._ import play.twirl.api.Html import scala.concurrent.Future /** * Provides a set of functions for handling JSON */ trait AsyncController extends Controller with I18nSupport { protected def jsonBadRequest(msg: String) = Future.successful(BadRequest(Json.obj("message" -> msg))) protected def jsonConflict(msg: String) = Future.successful(Conflict(Json.obj("message" -> msg))) protected def jsonForbidden(msg: String) = Future.successful(Forbidden(Json.obj("message" -> msg))) protected def jsonFormError(errors: JsValue) = Future.successful(BadRequest(Json.obj("data" -> errors))) protected def jsonInternalError(msg: String) = Future.successful(InternalServerError(Json.obj("message" -> msg))) protected def jsonNotFound(msg: String) = Future.successful(NotFound(Json.obj("message" -> msg))) protected def jsonOk(data: JsValue) = Future.successful(Ok(Json.prettyPrint(data))) protected def jsonRequest(status: Int, msg: String) = status match { case NOT_FOUND ⇒ jsonNotFound(msg) case CONFLICT ⇒ jsonConflict(msg) case _ ⇒ jsonBadRequest(msg) } protected def jsonSuccess(msg: String, data: Option[JsValue] = None, body: Option[Html] = None) = { val reply = Json.obj( "message" -> msg, "data" -> data, "body" -> body.getOrElse(Html("")).toString() ) jsonOk(reply) } protected def jsonUnauthorized = Future.successful(Unauthorized("Unauthorized")) protected def badRequest[C](content: C)(implicit writeable: Writeable[C]) = Future.successful(BadRequest(content)) protected def forbidden[C](content: C)(implicit writeable: Writeable[C]) = Future.successful(Forbidden(content)) protected def notFound[C](content: C)(implicit writeable: Writeable[C]) = Future.successful(NotFound(content)) protected def ok[C](content: C)(implicit writeable: Writeable[C]) = Future.successful(Ok(content)) protected def redirect(url: String, flashing: (String, String)*) = if (flashing.isEmpty) Future.successful(Redirect(url)) else Future.successful(Redirect(url).flashing(flashing:_*)) protected def redirect(call: Call, flashing: (String, String)*) = if (flashing.isEmpty) Future.successful(Redirect(call)) else Future.successful(Redirect(call).flashing(flashing:_*)) protected def redirect(url: String, session: Session, flashing: (String, String)*) = if (flashing.isEmpty) Future.successful(Redirect(url).withSession(session)) else Future.successful(Redirect(url).flashing(flashing:_*).withSession(session)) }
HappyMelly/teller
app/controllers/AsyncController.scala
Scala
gpl-3.0
3,742
package sk.scalagine.math /** * Created with IntelliJ IDEA. * User: zladovan * Date: 25.5.2014 * Time: 12:04 */ class InverseMatrixNotExistsException(matrix: MatrixNxN[_]) extends RuntimeException("Non invertible matrix: " + matrix)
zladovan/scalagine
engine/math/src/main/scala/sk/scalagine/math/InverseMatrixNotExistsException.scala
Scala
mit
241
package com.idyria.osi.vui.implementation.swing import java.awt.Menu import java.awt.MenuItem import java.awt.event.ActionListener import java.awt.event.ActionEvent import javax.swing.MenuElement import javax.swing.JMenuItem import javax.swing.JMenu import javax.swing.JPopupMenu trait SwingUtilsTrait { def addActionMenu(start: MenuElement)(path: String)(cl: => Unit) = { createPathMenu(start)(path) match { case Some(menuItem) => menuItem.addActionListener(new ActionListener { def actionPerformed(ev: ActionEvent) = { cl } }) case None => sys.error("Cannot find/create menu for path: " + path) } } def createPathMenu(start: MenuElement)(path: String) = { // Split path and take existing menus or create new one var currentMenu = start var resultItem: Option[JMenuItem] = None var splittedPaths = path.split("->").filterNot { _.length() == 0 }.map(_.trim) splittedPaths.zipWithIndex.foreach { case (menuText, i) => currentMenu.getSubElements.collect { case i: JMenuItem => i }.find(_.getText == menuText) match { //-- Found existing case Some(foundMenu: JMenu) => currentMenu = foundMenu resultItem = Some(foundMenu) //-- Create new one as Menu or Menu Item case None if (i == splittedPaths.size - 1) => var item = new JMenuItem(menuText) currentMenu match { case m: JPopupMenu => m.add(item) case other: JMenuItem => other.add(item) } resultItem = Some(item) case None => var item = new JMenu(menuText) currentMenu match { case m: JPopupMenu => m.add(item) case other: JMenuItem => other.add(item) } currentMenu = item // Don' keep going and stop case Some(_) => throw new RuntimeException("stop") } } resultItem } }
richnou/vui2
vui2-javafx/src/main/scala/com/idyria/osi/vui/implementation/swing/SwingUtilsTrait.scala
Scala
agpl-3.0
2,020
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.catalyst /** * Identifies a `table` in `database`. If `database` is not defined, the current database is used. * 在`database`中标识`table`。 如果未定义`database`,则使用当前数据库 */ private[sql] case class TableIdentifier(table: String, database: Option[String] = None) { def withDatabase(database: String): TableIdentifier = this.copy(database = Some(database)) def toSeq: Seq[String] = database.toSeq :+ table override def toString: String = quotedString def quotedString: String = toSeq.map("`" + _ + "`").mkString(".") def unquotedString: String = toSeq.mkString(".") }
tophua/spark1.52
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/TableIdentifier.scala
Scala
apache-2.0
1,443
package rml.args.conversions.basic import rml.args.arg._ import rml.args.arg.input._ import rml.args.exceptions.IllegalArgException import rml.args.arg.restriction.NotRestricted import rml.args.arg.input.SingleArg import rml.args.arg.input.PositionalArg import rml.args.arg.input.ListArg0 import rml.args.arg.input.ListArg import rml.args.arg.input.JoinArg trait ToFloat extends NotRestricted { val baseType: String = "Float" def mapToType(value: String): Float = try { value.toFloat } catch { case nfe: NumberFormatException => throw new IllegalArgException("Value '" + value + "' is not a valid Float") } } object AFloat { def apply(key: String) = InputArg(key, new SingleArg[Float] with ToFloat) } object JFloat { def apply(key: String) = InputArg(key, new JoinArg[Float] with ToFloat { override val sep = ""} ) } object Floats { def apply(key: String) = InputArg(key, new ListArg[Float] with ToFloat) } object Floats0{ def apply(key: String) = InputArg(key, new ListArg0[Float] with ToFloat) } object PFloat { def apply(pos: Int) = InputArg("-", new ToFloat with PositionalArg[Float]{ val position = pos }) }
rml/scala_args
src/main/scala/rml/args/conversions/basic/ToFloat.scala
Scala
gpl-3.0
1,156
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql import java.math.BigDecimal import org.apache.spark.sql.api.java._ import org.apache.spark.sql.catalyst.plans.logical.Project import org.apache.spark.sql.execution.QueryExecution import org.apache.spark.sql.execution.columnar.InMemoryRelation import org.apache.spark.sql.execution.command.{CreateDataSourceTableAsSelectCommand, ExplainCommand} import org.apache.spark.sql.execution.datasources.InsertIntoHadoopFsRelationCommand import org.apache.spark.sql.functions.{lit, udf} import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.test.SharedSQLContext import org.apache.spark.sql.test.SQLTestData._ import org.apache.spark.sql.types._ import org.apache.spark.sql.util.QueryExecutionListener private case class FunctionResult(f1: String, f2: String) class UDFSuite extends QueryTest with SharedSQLContext { import testImplicits._ test("built-in fixed arity expressions") { val df = spark.emptyDataFrame df.selectExpr("rand()", "randn()", "rand(5)", "randn(50)") } test("built-in vararg expressions") { val df = Seq((1, 2)).toDF("a", "b") df.selectExpr("array(a, b)") df.selectExpr("struct(a, b)") } test("built-in expressions with multiple constructors") { val df = Seq(("abcd", 2)).toDF("a", "b") df.selectExpr("substr(a, 2)", "substr(a, 2, 3)").collect() } test("count") { val df = Seq(("abcd", 2)).toDF("a", "b") df.selectExpr("count(a)") } test("count distinct") { val df = Seq(("abcd", 2)).toDF("a", "b") df.selectExpr("count(distinct a)") } test("SPARK-8003 spark_partition_id") { val df = Seq((1, "Tearing down the walls that divide us")).toDF("id", "saying") df.createOrReplaceTempView("tmp_table") checkAnswer(sql("select spark_partition_id() from tmp_table").toDF(), Row(0)) spark.catalog.dropTempView("tmp_table") } test("SPARK-8005 input_file_name") { withTempPath { dir => val data = sparkContext.parallelize(0 to 10, 2).toDF("id") data.write.parquet(dir.getCanonicalPath) spark.read.parquet(dir.getCanonicalPath).createOrReplaceTempView("test_table") val answer = sql("select input_file_name() from test_table").head().getString(0) assert(answer.contains(dir.toURI.getPath)) assert(sql("select input_file_name() from test_table").distinct().collect().length >= 2) spark.catalog.dropTempView("test_table") } } test("error reporting for incorrect number of arguments - builtin function") { val df = spark.emptyDataFrame val e = intercept[AnalysisException] { df.selectExpr("substr('abcd', 2, 3, 4)") } assert(e.getMessage.contains("Invalid number of arguments for function substr. Expected:")) } test("error reporting for incorrect number of arguments - udf") { val df = spark.emptyDataFrame val e = intercept[AnalysisException] { spark.udf.register("foo", (_: String).length) df.selectExpr("foo(2, 3, 4)") } assert(e.getMessage.contains("Invalid number of arguments for function foo. Expected:")) } test("error reporting for undefined functions") { val df = spark.emptyDataFrame val e = intercept[AnalysisException] { df.selectExpr("a_function_that_does_not_exist()") } assert(e.getMessage.contains("Undefined function")) assert(e.getMessage.contains("a_function_that_does_not_exist")) } test("Simple UDF") { spark.udf.register("strLenScala", (_: String).length) assert(sql("SELECT strLenScala('test')").head().getInt(0) === 4) } test("UDF defined using UserDefinedFunction") { import functions.udf val foo = udf((x: Int) => x + 1) spark.udf.register("foo", foo) assert(sql("select foo(5)").head().getInt(0) == 6) } test("ZeroArgument non-deterministic UDF") { val foo = udf(() => Math.random()) spark.udf.register("random0", foo.asNondeterministic()) val df = sql("SELECT random0()") assert(df.logicalPlan.asInstanceOf[Project].projectList.forall(!_.deterministic)) assert(df.head().getDouble(0) >= 0.0) val foo1 = foo.asNondeterministic() val df1 = testData.select(foo1()) assert(df1.logicalPlan.asInstanceOf[Project].projectList.forall(!_.deterministic)) assert(df1.head().getDouble(0) >= 0.0) val bar = udf(() => Math.random(), DataTypes.DoubleType).asNondeterministic() val df2 = testData.select(bar()) assert(df2.logicalPlan.asInstanceOf[Project].projectList.forall(!_.deterministic)) assert(df2.head().getDouble(0) >= 0.0) val javaUdf = udf(new UDF0[Double] { override def call(): Double = Math.random() }, DoubleType).asNondeterministic() val df3 = testData.select(javaUdf()) assert(df3.logicalPlan.asInstanceOf[Project].projectList.forall(!_.deterministic)) assert(df3.head().getDouble(0) >= 0.0) } test("TwoArgument UDF") { spark.udf.register("strLenScala", (_: String).length + (_: Int)) assert(sql("SELECT strLenScala('test', 1)").head().getInt(0) === 5) } test("UDF in a WHERE") { withTempView("integerData") { spark.udf.register("oneArgFilter", (n: Int) => { n > 80 }) val df = sparkContext.parallelize( (1 to 100).map(i => TestData(i, i.toString))).toDF() df.createOrReplaceTempView("integerData") val result = sql("SELECT * FROM integerData WHERE oneArgFilter(key)") assert(result.count() === 20) } } test("UDF in a HAVING") { withTempView("groupData") { spark.udf.register("havingFilter", (n: Long) => { n > 5 }) val df = Seq(("red", 1), ("red", 2), ("blue", 10), ("green", 100), ("green", 200)).toDF("g", "v") df.createOrReplaceTempView("groupData") val result = sql( """ | SELECT g, SUM(v) as s | FROM groupData | GROUP BY g | HAVING havingFilter(s) """.stripMargin) assert(result.count() === 2) } } test("UDF in a GROUP BY") { withTempView("groupData") { spark.udf.register("groupFunction", (n: Int) => { n > 10 }) val df = Seq(("red", 1), ("red", 2), ("blue", 10), ("green", 100), ("green", 200)).toDF("g", "v") df.createOrReplaceTempView("groupData") val result = sql( """ | SELECT SUM(v) | FROM groupData | GROUP BY groupFunction(v) """.stripMargin) assert(result.count() === 2) } } test("UDFs everywhere") { withTempView("groupData") { spark.udf.register("groupFunction", (n: Int) => { n > 10 }) spark.udf.register("havingFilter", (n: Long) => { n > 2000 }) spark.udf.register("whereFilter", (n: Int) => { n < 150 }) spark.udf.register("timesHundred", (n: Long) => { n * 100 }) val df = Seq(("red", 1), ("red", 2), ("blue", 10), ("green", 100), ("green", 200)).toDF("g", "v") df.createOrReplaceTempView("groupData") val result = sql( """ | SELECT timesHundred(SUM(v)) as v100 | FROM groupData | WHERE whereFilter(v) | GROUP BY groupFunction(v) | HAVING havingFilter(v100) """.stripMargin) assert(result.count() === 1) } } test("struct UDF") { spark.udf.register("returnStruct", (f1: String, f2: String) => FunctionResult(f1, f2)) val result = sql("SELECT returnStruct('test', 'test2') as ret") .select($"ret.f1").head().getString(0) assert(result === "test") } test("udf that is transformed") { spark.udf.register("makeStruct", (x: Int, y: Int) => (x, y)) // 1 + 1 is constant folded causing a transformation. assert(sql("SELECT makeStruct(1 + 1, 2)").first().getAs[Row](0) === Row(2, 2)) } test("type coercion for udf inputs") { spark.udf.register("intExpected", (x: Int) => x) // pass a decimal to intExpected. assert(sql("SELECT intExpected(1.0)").head().getInt(0) === 1) } test("udf in different types") { spark.udf.register("testDataFunc", (n: Int, s: String) => { (n, s) }) spark.udf.register("decimalDataFunc", (a: java.math.BigDecimal, b: java.math.BigDecimal) => { (a, b) }) spark.udf.register("binaryDataFunc", (a: Array[Byte], b: Int) => { (a, b) }) spark.udf.register("arrayDataFunc", (data: Seq[Int], nestedData: Seq[Seq[Int]]) => { (data, nestedData) }) spark.udf.register("mapDataFunc", (data: scala.collection.Map[Int, String]) => { data }) spark.udf.register("complexDataFunc", (m: Map[String, Int], a: Seq[Int], b: Boolean) => { (m, a, b) } ) checkAnswer( sql("SELECT tmp.t.* FROM (SELECT testDataFunc(key, value) AS t from testData) tmp").toDF(), testData) checkAnswer( sql(""" | SELECT tmp.t.* FROM | (SELECT decimalDataFunc(a, b) AS t FROM decimalData) tmp """.stripMargin).toDF(), decimalData) checkAnswer( sql(""" | SELECT tmp.t.* FROM | (SELECT binaryDataFunc(a, b) AS t FROM binaryData) tmp """.stripMargin).toDF(), binaryData) checkAnswer( sql(""" | SELECT tmp.t.* FROM | (SELECT arrayDataFunc(data, nestedData) AS t FROM arrayData) tmp """.stripMargin).toDF(), arrayData.toDF()) checkAnswer( sql(""" | SELECT mapDataFunc(data) AS t FROM mapData """.stripMargin).toDF(), mapData.toDF()) checkAnswer( sql(""" | SELECT tmp.t.* FROM | (SELECT complexDataFunc(m, a, b) AS t FROM complexData) tmp """.stripMargin).toDF(), complexData.select("m", "a", "b")) } test("SPARK-11716 UDFRegistration does not include the input data type in returned UDF") { val myUDF = spark.udf.register("testDataFunc", (n: Int, s: String) => { (n, s.toInt) }) // Without the fix, this will fail because we fail to cast data type of b to string // because myUDF does not know its input data type. With the fix, this query should not // fail. checkAnswer( testData2.select(myUDF($"a", $"b").as("t")), testData2.selectExpr("struct(a, b)")) checkAnswer( sql("SELECT tmp.t.* FROM (SELECT testDataFunc(a, b) AS t from testData2) tmp").toDF(), testData2) } test("SPARK-19338 Provide identical names for UDFs in the EXPLAIN output") { def explainStr(df: DataFrame): String = { val explain = ExplainCommand(df.queryExecution.logical, extended = false) val sparkPlan = spark.sessionState.executePlan(explain).executedPlan sparkPlan.executeCollect().map(_.getString(0).trim).headOption.getOrElse("") } val udf1Name = "myUdf1" val udf2Name = "myUdf2" val udf1 = spark.udf.register(udf1Name, (n: Int) => n + 1) val udf2 = spark.udf.register(udf2Name, (n: Int) => n * 1) assert(explainStr(sql("SELECT myUdf1(myUdf2(1))")).contains(s"UDF:$udf1Name(UDF:$udf2Name(1))")) assert(explainStr(spark.range(1).select(udf1(udf2(functions.lit(1))))) .contains(s"UDF:$udf1Name(UDF:$udf2Name(1))")) } test("SPARK-23666 Do not display exprId in argument names") { withTempView("x") { Seq(((1, 2), 3)).toDF("a", "b").createOrReplaceTempView("x") spark.udf.register("f", (a: Int) => a) val outputStream = new java.io.ByteArrayOutputStream() Console.withOut(outputStream) { spark.sql("SELECT f(a._1) FROM x").show } assert(outputStream.toString.contains("UDF:f(a._1 AS `_1`)")) } } test("cached Data should be used in the write path") { withTable("t") { withTempPath { path => var numTotalCachedHit = 0 val listener = new QueryExecutionListener { override def onFailure(f: String, qe: QueryExecution, e: Exception): Unit = {} override def onSuccess(funcName: String, qe: QueryExecution, duration: Long): Unit = { qe.withCachedData match { case c: CreateDataSourceTableAsSelectCommand if c.query.isInstanceOf[InMemoryRelation] => numTotalCachedHit += 1 case i: InsertIntoHadoopFsRelationCommand if i.query.isInstanceOf[InMemoryRelation] => numTotalCachedHit += 1 case _ => } } } spark.listenerManager.register(listener) val udf1 = udf({ (x: Int, y: Int) => x + y }) val df = spark.range(0, 3).toDF("a") .withColumn("b", udf1($"a", lit(10))) df.cache() df.write.saveAsTable("t") sparkContext.listenerBus.waitUntilEmpty(1000) assert(numTotalCachedHit == 1, "expected to be cached in saveAsTable") df.write.insertInto("t") sparkContext.listenerBus.waitUntilEmpty(1000) assert(numTotalCachedHit == 2, "expected to be cached in insertInto") df.write.save(path.getCanonicalPath) sparkContext.listenerBus.waitUntilEmpty(1000) assert(numTotalCachedHit == 3, "expected to be cached in save for native") } } } test("SPARK-24891 Fix HandleNullInputsForUDF rule") { val udf1 = udf({(x: Int, y: Int) => x + y}) val df = spark.range(0, 3).toDF("a") .withColumn("b", udf1($"a", udf1($"a", lit(10)))) .withColumn("c", udf1($"a", lit(null))) val plan = spark.sessionState.executePlan(df.logicalPlan).analyzed comparePlans(df.logicalPlan, plan) checkAnswer( df, Seq( Row(0, 10, null), Row(1, 12, null), Row(2, 14, null))) } test("SPARK-24891 Fix HandleNullInputsForUDF rule - with table") { withTable("x") { Seq((1, "2"), (2, "4")).toDF("a", "b").write.format("json").saveAsTable("x") sql("insert into table x values(3, null)") sql("insert into table x values(null, '4')") spark.udf.register("f", (a: Int, b: String) => a + b) val df = spark.sql("SELECT f(a, b) FROM x") val plan = spark.sessionState.executePlan(df.logicalPlan).analyzed comparePlans(df.logicalPlan, plan) checkAnswer(df, Seq(Row("12"), Row("24"), Row("3null"), Row(null))) } } test("SPARK-25044 Verify null input handling for primitive types - with udf()") { val input = Seq( (null, Integer.valueOf(1), "x"), ("M", null, "y"), ("N", Integer.valueOf(3), null)).toDF("a", "b", "c") val udf1 = udf((a: String, b: Int, c: Any) => a + b + c) val df = input.select(udf1('a, 'b, 'c)) checkAnswer(df, Seq(Row("null1x"), Row(null), Row("N3null"))) // test Java UDF. Java UDF can't have primitive inputs, as it's generic typed. val udf2 = udf(new UDF3[String, Integer, Object, String] { override def call(t1: String, t2: Integer, t3: Object): String = { t1 + t2 + t3 } }, StringType) val df2 = input.select(udf2('a, 'b, 'c)) checkAnswer(df2, Seq(Row("null1x"), Row("Mnully"), Row("N3null"))) } test("SPARK-25044 Verify null input handling for primitive types - with udf.register") { withTable("t") { Seq((null, Integer.valueOf(1), "x"), ("M", null, "y"), ("N", Integer.valueOf(3), null)) .toDF("a", "b", "c").write.format("json").saveAsTable("t") spark.udf.register("f", (a: String, b: Int, c: Any) => a + b + c) val df = spark.sql("SELECT f(a, b, c) FROM t") checkAnswer(df, Seq(Row("null1x"), Row(null), Row("N3null"))) // test Java UDF. Java UDF can't have primitive inputs, as it's generic typed. spark.udf.register("f2", new UDF3[String, Integer, Object, String] { override def call(t1: String, t2: Integer, t3: Object): String = { t1 + t2 + t3 } }, StringType) val df2 = spark.sql("SELECT f2(a, b, c) FROM t") checkAnswer(df2, Seq(Row("null1x"), Row("Mnully"), Row("N3null"))) } } test("SPARK-25044 Verify null input handling for primitive types - with udf(Any, DataType)") { val f = udf((x: Int) => x, IntegerType) checkAnswer( Seq(new Integer(1), null).toDF("x").select(f($"x")), Row(1) :: Row(0) :: Nil) val f2 = udf((x: Double) => x, DoubleType) checkAnswer( Seq(new java.lang.Double(1.1), null).toDF("x").select(f2($"x")), Row(1.1) :: Row(0.0) :: Nil) } test("SPARK-26308: udf with decimal") { val df1 = spark.createDataFrame( sparkContext.parallelize(Seq(Row(new BigDecimal("2011000000000002456556")))), StructType(Seq(StructField("col1", DecimalType(30, 0))))) val udf1 = org.apache.spark.sql.functions.udf((value: BigDecimal) => { if (value == null) null else value.toBigInteger.toString }) checkAnswer(df1.select(udf1(df1.col("col1"))), Seq(Row("2011000000000002456556"))) } test("SPARK-26308: udf with complex types of decimal") { val df1 = spark.createDataFrame( sparkContext.parallelize(Seq(Row(Array(new BigDecimal("2011000000000002456556"))))), StructType(Seq(StructField("col1", ArrayType(DecimalType(30, 0)))))) val udf1 = org.apache.spark.sql.functions.udf((arr: Seq[BigDecimal]) => { arr.map(value => if (value == null) null else value.toBigInteger.toString) }) checkAnswer(df1.select(udf1($"col1")), Seq(Row(Array("2011000000000002456556")))) val df2 = spark.createDataFrame( sparkContext.parallelize(Seq(Row(Map("a" -> new BigDecimal("2011000000000002456556"))))), StructType(Seq(StructField("col1", MapType(StringType, DecimalType(30, 0)))))) val udf2 = org.apache.spark.sql.functions.udf((map: Map[String, BigDecimal]) => { map.mapValues(value => if (value == null) null else value.toBigInteger.toString) }) checkAnswer(df2.select(udf2($"col1")), Seq(Row(Map("a" -> "2011000000000002456556")))) } test("SPARK-26323 Verify input type check - with udf()") { val f = udf((x: Long, y: Any) => x) val df = Seq(1 -> "a", 2 -> "b").toDF("i", "j").select(f($"i", $"j")) checkAnswer(df, Seq(Row(1L), Row(2L))) } test("SPARK-26323 Verify input type check - with udf.register") { withTable("t") { Seq(1 -> "a", 2 -> "b").toDF("i", "j").write.format("json").saveAsTable("t") spark.udf.register("f", (x: Long, y: Any) => x) val df = spark.sql("SELECT f(i, j) FROM t") checkAnswer(df, Seq(Row(1L), Row(2L))) } } test("Using java.time.Instant in UDF") { withSQLConf(SQLConf.DATETIME_JAVA8API_ENABLED.key -> "true") { val expected = java.time.Instant.parse("2019-02-27T00:00:00Z") val plusSec = udf((i: java.time.Instant) => i.plusSeconds(1)) val df = spark.sql("SELECT TIMESTAMP '2019-02-26 23:59:59Z' as t") .select(plusSec('t)) assert(df.collect().toSeq === Seq(Row(expected))) } } test("Using java.time.LocalDate in UDF") { withSQLConf(SQLConf.DATETIME_JAVA8API_ENABLED.key -> "true") { val expected = java.time.LocalDate.parse("2019-02-27") val plusDay = udf((i: java.time.LocalDate) => i.plusDays(1)) val df = spark.sql("SELECT DATE '2019-02-26' as d") .select(plusDay('d)) assert(df.collect().toSeq === Seq(Row(expected))) } } }
aosagie/spark
sql/core/src/test/scala/org/apache/spark/sql/UDFSuite.scala
Scala
apache-2.0
19,832
package l3 import l3.input.{ FileReader, SeqReader } import scala.util.parsing.input.{ StreamReader } object Main { def passThrough[T](f: T => Unit): T=>T = { t: T => f(t); t } def treePrinter[T <: Formatable](msg: String): T=>T = passThrough { tree => val writer = new java.io.PrintWriter(System.out) println(msg) tree.toDocument.format(78, writer) writer.println() writer.flush() } def seqPrinter[T](msg: String): Seq[T]=>Seq[T] = passThrough { program => println(msg) for (elem <- program) println(elem) } def main(args: Array[String]): Unit = { val inFiles = args.toList val inReader = SeqReader(inFiles map { FileReader(_) }) L3Parser.program(new L3Scanner.Scanner(inReader)) match { case L3Parser.Success(program, _) => val backEnd = ( CL3NameAnalyzer andThen treePrinter("========== After name analysis") //andThen CL3Interpreter andThen CL3ToCPSTranslator //andThen treePrinter("========== After CL3 to CPS translation") andThen SymbolicCPSInterpreter // andThen CPSDataRepresenter // andThen CPSHoister // andThen CPSRegisterAllocator // andThen CPSToASMTranslator // andThen ASMLabelResolver // andThen ASMFileWriter("out.asm") ) backEnd(program) case failure @ L3Parser.NoSuccess(_, _) => Console.println(failure) exit(1) } } }
sana/WorkAtEPFL
l3-compiler/compiler-backend/compiler/src/l3/Main.scala
Scala
gpl-3.0
1,514
/* * Happy Melly Teller * Copyright (C) 2013 - 2016, Happy Melly http://www.happymelly.com * * This file is part of the Happy Melly Teller. * * Happy Melly Teller is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * Happy Melly Teller is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with Happy Melly Teller. If not, see <http://www.gnu.org/licenses/>. * * If you have questions concerning this license or the applicable additional terms, you may contact * by email Sergey Kotlov, [email protected] or * in writing Happy Melly One, Handelsplein 37, Rotterdam, The Netherlands, 3071 PR */ package modules import com.google.inject.AbstractModule import models.actors._ import play.api.libs.concurrent.AkkaGuiceSupport import services.integrations.EmailActor /** * Initialises all actors */ class Actors extends AbstractModule with AkkaGuiceSupport { override def configure(): Unit = { bindActor[EmailActor]("email") bindActor[ProfileStrengthRecalculator]("profile-strength") bindActor[SlackServant]("slack-servant") } }
HappyMelly/teller
app/modules/Actors.scala
Scala
gpl-3.0
1,515
package probability.bayes import au.id.cxd.math.probability.bayes.DiscreteBayes import au.id.cxd.math.probability.discrete.TableDistribution import breeze.linalg.{DenseMatrix, DenseVector} import org.scalatest._ trait TestData { /** * test the discrete bayes distribution * **/ val data = DenseMatrix( (0d, 1d / 6d, 0d), (1d, 1d / 6d, 4d / 26d), (2d, 1d / 6d, 6d / 26d), (3d, 1d / 6d, 6d / 26d), (4d, 1d / 6d, 4d / 26d), (5d, 1d / 6d, 0d) ) /** * the joint likelihood table * Its columns are * * [ Y, P(Y|X_0), P(Y|X_1), P(Y|X_2), P(Y|X_3), P(Y|X_4), P(Y|X_5) ] */ val jointYData = DenseMatrix( (0d, 20d / 120d, 12d / 120d, 6d / 120d, 2d / 120d, 0d, 0d), (1d, 0d, 4d / 120d, 6d / 120d, 6d / 120d, 4d / 120d, 0d), (2d, 0d, 4d / 120d, 6d / 120d, 6d / 120d, 4d / 120d, 0d), (3d, 0d, 0d, 2d / 120d, 6d / 120d, 12d / 120d, 20d / 120d) ) val prior = TableDistribution( table = data ) val likelihoods = List(TableDistribution( table = jointYData, domainColumn = 0, rangeColumn = 1 ), TableDistribution( table = jointYData, domainColumn = 0, rangeColumn = 2 ), TableDistribution( table = jointYData, domainColumn = 0, rangeColumn = 3 ), TableDistribution( table = jointYData, domainColumn = 0, rangeColumn = 4 ), TableDistribution( table = jointYData, domainColumn = 0, rangeColumn = 5 ), TableDistribution( table = jointYData, domainColumn = 0, rangeColumn = 6 ) ) // calculate posterior P(X|Y) // for each value of X and Y val x = data(::, 0).toArray val y = jointYData(::, 1).toArray.distinct def computePosterior() = { val xRange = x.zip(likelihoods) // compute the posterior distribution // for P(X|Y) val xy = x.map { x1 => y.zip(likelihoods).map { y1 => (x1, y1._1, y1._2) } }.foldLeft(Seq[(Double, Double, TableDistribution)]()) { (accum, series) => accum ++ series.toSeq } xy.map { pair => { val dist = new DiscreteBayes( likelihood = pair._3, prior = prior ) ((pair._1, pair._2), dist.posterior(pair._2, pair._1, xRange)) } } } def tabulate(data: Seq[((Double, Double), Double)]) = { val matData = DenseMatrix.zeros[Double](data.length, 3) data.foldLeft((0, matData)) { (accum, row) => { val (x, y) = row._1 val i = accum._1 accum._2(i, 0 to 2) := DenseVector(x, y, row._2).t (accum._1 + 1, accum._2) } } } } /** * Created by cd on 27/03/2016. */ class TestDiscreteBayes extends FlatSpec with Matchers { "Discrete Bayes" should "find posterior" in new TestData { val posterior = computePosterior val table = tabulate(posterior) println(table) } }
cxd/scala-au.id.cxd.math
math/src/test/scala/probability/bayes/TestDiscreteBayes.scala
Scala
mit
2,901
/* __ *\\ ** ________ ___ / / ___ __ ____ Scala.js Test Suite ** ** / __/ __// _ | / / / _ | __ / // __/ (c) 2013, LAMP/EPFL ** ** __\\ \\/ /__/ __ |/ /__/ __ |/_// /_\\ \\ http://scala-js.org/ ** ** /____/\\___/_/ |_/____/_/ | |__/ /____/ ** ** |/____/ ** \\* */ package org.scalajs.testsuite.typedarray import org.junit.Assert._ import org.junit.Test import org.scalajs.testsuite.utils.Requires import scala.scalajs.js import js.typedarray._ object TypedArrayConversionTest extends Requires.TypedArray class TypedArrayConversionTest { def data(factor: Double): js.Array[Double] = js.Array(-1, 1, 2, 3, 4, 5, 6, 7, 8).map((_: Int) * factor) def sum(factor: Double): Double = (8 * 9 / 2 - 1) * factor @Test def convert_an_Int8Array_to_a_scala_Array_Byte(): Unit = { val x = new Int8Array(data(1)) val y = x.toArray assertTrue(y.getClass == classOf[scala.Array[Byte]]) assertEquals(sum(1), y.sum) // Ensure its a copy x(0) = 0 assertEquals(sum(1), y.sum) } @Test def convert_an_Int16Array_to_a_scala_Array_Short(): Unit = { val x = new Int16Array(data(100)) val y = x.toArray assertTrue(y.getClass == classOf[scala.Array[Short]]) assertEquals(sum(100), y.sum) // Ensure its a copy x(0) = 0 assertEquals(sum(100), y.sum) } @Test def convert_an_Uint16Array_to_a_scala_Array_Char(): Unit = { val data = js.Array((1 to 6).map(_ * 10000): _*) val sum = (6*7/2*10000).toChar val x = new Uint16Array(data) val y = x.toArray assertTrue(y.getClass == classOf[scala.Array[Char]]) assertEquals(sum, y.sum) // Ensure its a copy x(0) = 0 assertEquals(sum, y.sum) } @Test def convert_an_Int32Array_to_a_scala_Array_Int(): Unit = { val x = new Int32Array(data(10000)) val y = x.toArray assertTrue(y.getClass == classOf[scala.Array[Int]]) assertEquals(sum(10000), y.sum) // Ensure its a copy x(0) = 0 assertEquals(sum(10000), y.sum) } @Test def convert_a_Float32Array_to_a_scala_Array_Float(): Unit = { val x = new Float32Array(data(0.2)) val y = x.toArray assertTrue(y.getClass == classOf[scala.Array[Float]]) assertEquals(sum(0.2), y.sum, 1E-6) // Ensure its a copy x(0) = 0 assertEquals(sum(0.2), y.sum, 1E-6) } @Test def convert_a_Float64Array_to_a_scala_Array_Double(): Unit = { val x = new Float64Array(data(0.2)) val y = x.toArray assertTrue(y.getClass == classOf[scala.Array[Double]]) assertEquals(sum(0.2), y.sum) // Ensure its a copy x(0) = 0 assertEquals(sum(0.2), y.sum) } @Test def convert_a_scala_Array_Byte__to_an_Int8Array(): Unit = { val x = (Byte.MinValue to Byte.MaxValue).map(_.toByte).toArray val y = x.toTypedArray assertTrue(y.isInstanceOf[Int8Array]) assertEquals(x.length, y.length) for (i <- 0 until y.length) assertEquals(x(i), y(i)) // Ensure its a copy x(0) = 0 assertEquals(Byte.MinValue, y(0)) } @Test def convert_a_scala_Array_Short__to_an_Int16Array(): Unit = { val x = ((Short.MinValue to (Short.MinValue + 1000)) ++ ((Short.MaxValue - 1000) to Short.MaxValue)).map(_.toShort).toArray val y = x.toTypedArray assertTrue(y.isInstanceOf[Int16Array]) assertEquals(x.length, y.length) for (i <- 0 until y.length) assertEquals(x(i), y(i)) // Ensure its a copy x(0) = 0 assertEquals(Short.MinValue, y(0)) } @Test def convert_a_scala_Array_Char__to_an_Uint16Array(): Unit = { val x = ((Char.MaxValue - 1000) to Char.MaxValue).map(_.toChar).toArray val y = x.toTypedArray assertTrue(y.isInstanceOf[Uint16Array]) assertEquals(x.length, y.length) for (i <- 0 until y.length) assertEquals(x(i).toInt, y(i)) // Ensure its a copy x(0) = 0 assertEquals(Char.MaxValue - 1000, y(0)) } @Test def convert_a_scala_Array_Int__to_an_Int32Array(): Unit = { val x = ((Int.MinValue to (Int.MinValue + 1000)) ++ ((Int.MaxValue - 1000) to Int.MaxValue)).toArray val y = x.toTypedArray assertTrue(y.isInstanceOf[Int32Array]) assertEquals(x.length, y.length) for (i <- 0 until y.length) assertEquals(x(i), y(i)) // Ensure its a copy x(0) = 0 assertEquals(Int.MinValue, y(0)) } @Test def convert_a_scala_Array_Float__to_a_Float32Array(): Unit = { val x = Array[Float](1.0f, 2.0f, -2.3f, 5.3f) val y = x.toTypedArray assertTrue(y.isInstanceOf[Float32Array]) assertEquals(x.length, y.length) for (i <- 0 until y.length) assertEquals(x(i), y(i)) // Ensure its a copy x(0) = 0 assertEquals(1.0f, y(0)) } @Test def convert_a_scala_Array_Double__to_a_Float64Array(): Unit = { val x = Array[Double](1.0, 2.0, -2.3, 5.3) val y = x.toTypedArray assertTrue(y.isInstanceOf[Float64Array]) assertEquals(x.length, y.length) for (i <- 0 until y.length) assertEquals(x(i), y(i)) // Ensure its a copy x(0) = 0 assertEquals(1.0, y(0)) } }
japgolly/scala-js
test-suite/js/src/test/scala/org/scalajs/testsuite/typedarray/TypedArrayConversionTest.scala
Scala
bsd-3-clause
5,302
package zeroadv.position import zeroadv.{DimM, PosM} import org.apache.commons.math3.optim.nonlinear.scalar.noderiv.{MultiDirectionalSimplex, SimplexOptimizer} import org.apache.commons.math3.optim.nonlinear.scalar.{GoalType, ObjectiveFunction} import org.apache.commons.math3.analysis.MultivariateFunction import org.apache.commons.math3.optim.{MaxEval, InitialGuess} import com.typesafe.scalalogging.slf4j.Logging class CalculatePosition extends Logging { def calculate(data: List[(PosM, DimM)]): (PosM, DimM) = { val xs = data.map(_._1.x.coord) val ys = data.map(_._1.y.coord) val maxCoord = List(xs.min, xs.max, ys.min, ys.max).map(math.abs).max def randomCoord = math.random*maxCoord*2-maxCoord val result = new SimplexOptimizer(1e-3, 1e-6).optimize( new ObjectiveFunction(scoreFunction(data)), GoalType.MINIMIZE, new MultiDirectionalSimplex(2), new InitialGuess(Array(randomCoord, randomCoord)), new MaxEval(100000) ) val resultPosM = PosM(DimM(result.getPoint.apply(0)), DimM(result.getPoint.apply(1))) val resultDistance = DimM(result.getValue) logger.debug(s"Position calculation result from $data: $resultPosM - $resultDistance") (resultPosM, resultDistance) } private def scoreFunction(data: List[(PosM, DimM)]) = new MultivariateFunction() { def value(point: Array[Double]) = { val p = PosM(DimM(point(0)), DimM(point(1))) data .map { case (c, r) => math.pow((PosM.dist(c, p) - r).coord, 2) } .sum } } } /* val opt = new CMAESOptimizer(Integer.MAX_VALUE, Double.NegativeInfinity, true, 1, 1, new JDKRandomGenerator(), false, new SimpleValueChecker(1E-3, 1E-6)) val r2 = opt.optimize(new ObjectiveFunction(scoreFunction(data)), GoalType.MINIMIZE, new CMAESOptimizer.Sigma(Array.fill(2)(5.0d)), new SimpleBounds(Array(-100.0d, -100.0d), Array(100.0d, 100.0d)), new InitialGuess(Array(0.0d, 0.0d)), new MaxEval(10000), new CMAESOptimizer.PopulationSize(25)) */
adamw/zeroadv
collector/src/main/scala/zeroadv/position/CalculatePosition.scala
Scala
gpl-2.0
2,038
package org.openguard.core.ftp import javax.sql.DataSource import org.apache.ftpserver.ftplet.{FtpException, User} import org.apache.ftpserver.usermanager.PasswordEncryptor import org.apache.ftpserver.usermanager.impl.{BaseUser, DbUserManager} import play.api.Play import play.api.Play.current class UserManager(dataSource: DataSource, selectAllStmt: String, selectUserStmt: String, insertUserStmt: String, updateUserStmt: String, deleteUserStmt: String, authenticateStmt: String, isAdminStmt: String, passwordEncryptor: PasswordEncryptor, adminName: String) extends DbUserManager(dataSource, selectAllStmt, selectUserStmt, insertUserStmt, updateUserStmt, deleteUserStmt, authenticateStmt, isAdminStmt, passwordEncryptor, adminName) { @throws(classOf[FtpException]) override def getUserByName(name: String): User = { val user = super.getUserByName(name) val baseuser = new BaseUser; baseuser.setAuthorities(user.getAuthorities()) baseuser.setEnabled(user.getEnabled) baseuser.setHomeDirectory(user.getHomeDirectory.replace("${ftphome}", Play.configuration.getString("ovg.ftpDirectory").getOrElse("~/"))) baseuser.setMaxIdleTime(user.getMaxIdleTime) baseuser.setName(user.getName) baseuser.setPassword(user.getPassword) return baseuser } }
pbolle/openvideoguard
core/src/main/scala/org/openguard/core/ftp/UserManager.scala
Scala
apache-2.0
1,358
import eu.inn.binders.naming.CamelCaseToSnakeCaseConverter import org.scalatest.{FlatSpec, Matchers} class TestConvertJsonSerializer extends FlatSpec with Matchers { import eu.inn.binders.json._ "Json " should " serialize class with Int with Converter" in { implicit val factory = new DefaultSerializerFactory[CamelCaseToSnakeCaseConverter] val t = TestInt(1234) val str = t.toJson assert (str === """{"int_val":1234}""") } }
InnovaCo/binders-json
src/test/scala/TestConvertJsonSerializer.scala
Scala
bsd-3-clause
460
/*********************************************************************** * Copyright (c) 2013-2015 Commonwealth Computer Research, Inc. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Apache License, Version 2.0 which * accompanies this distribution and is available at * http://www.opensource.org/licenses/apache2.0.php. *************************************************************************/ package org.locationtech.geomesa.convert.fixedwidth import com.typesafe.config.ConfigFactory import com.vividsolutions.jts.geom.{Coordinate, Point} import org.junit.runner.RunWith import org.locationtech.geomesa.convert.SimpleFeatureConverters import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes import org.specs2.mutable.Specification import org.specs2.runner.JUnitRunner @RunWith(classOf[JUnitRunner]) class FixedWidthConverterTest extends Specification { "FixedWidthConverter" >> { val conf = ConfigFactory.parseString( """ | converter = { | type = "fixed-width" | id-field = "uuid()" | fields = [ | { name = "lat", transform = "$0::double", start = 1, width = 2 }, | { name = "lon", transform = "$0::double", start = 3, width = 2 }, | { name = "geom", transform = "point($lon, $lat)" } | ] | } """.stripMargin) val sft = SimpleFeatureTypes.createType(ConfigFactory.load("sft_testsft.conf")) val converter = SimpleFeatureConverters.build[String](sft, conf) "process fixed with data" >> { val data = """ |14555 |16565 """.stripMargin converter must not beNull val res = converter.processInput(data.split("\\n").toIterator.filterNot( s => "^\\\\s*$".r.findFirstIn(s).size > 0)).toList res.size must be equalTo 2 res(0).getDefaultGeometry.asInstanceOf[Point].getCoordinate must be equalTo new Coordinate(55.0, 45.0) res(1).getDefaultGeometry.asInstanceOf[Point].getCoordinate must be equalTo new Coordinate(65.0, 65.0) } } }
drackaer/geomesa
geomesa-convert/geomesa-convert-fixedwidth/src/test/scala/org/locationtech/geomesa/convert/fixedwidth/FixedWidthConverterTest.scala
Scala
apache-2.0
2,111
package unfiltered.response trait BaseContentType extends Responder[Any] { def respond(res: HttpResponse[Any]) { res.setContentType(contentType) } def contentType: String } case class CharContentType(content_type: String) extends BaseContentType { def contentType = "%s; charset=%s".format(content_type, charset) def charset = "utf-8" } object CssContent extends CharContentType("text/css") object HtmlContent extends CharContentType("text/html") object JsContent extends CharContentType("text/javascript") object CsvContent extends CharContentType("text/csv") object TextXmlContent extends CharContentType("text/xml") object PlainTextContent extends CharContentType("text/plain") object JsonContent extends CharContentType("application/json") object ApplicationXmlContent extends CharContentType("application/xml") object FormEncodedContent extends ContentType("application/x-www-form-urlencoded") case class ContentType(override val contentType: String) extends BaseContentType object PdfContent extends ContentType("application/pdf")
softprops/Unfiltered
library/src/main/scala/response/types.scala
Scala
mit
1,055
package uk.co.pragmasoft.graphdb.orient import com.tinkerpop.blueprints.{Query, TransactionalGraph, Vertex} import com.tinkerpop.blueprints.impls.orient._ import uk.co.pragmasoft.graphdb.GraphDAO import uk.co.pragmasoft.graphdb.marshalling.{GraphMarshallingDSL, GraphMarshaller} import uk.co.pragmasoft.graphdb.validation.GraphDAOValidations import uk.co.pragmasoft.graphdb.marshalling.GraphMarshallingDSL import scala.collection.JavaConversions._ trait OrientDbDAO[T] extends GraphDAO[T] with OrientDBBasicConversions with GraphMarshallingDSL { self: GraphDAOValidations[T] => override def marshaller: OrientGraphMarshaller[T] def graphFactory: OrientGraphFactory override protected def createTransactionalGraph: TransactionalGraph = graphFactory.getTx() // FOR ORIENTDB THIS METHOD CAN BE USED ALSO NESTED // See https://github.com/orientechnologies/orientdb/wiki/Transaction-propagation about transaction propagation // Using pools: http://www.orientechnologies.com/new-orientdb-graph-factory/ // We should be fine in using nested trasaction opening and committing local operations // https://github.com/orientechnologies/orientdb/wiki/Transactions override protected def withGraphDb[T](block : TransactionalGraph=> T): T = { val graphDb = createTransactionalGraph try { val result = block(graphDb) graphDb.commit() result } catch { case e: Exception => graphDb.rollback() throw e } } override def create(newInstance: T): T = { validateNew(newInstance) withGraphDb { implicit graphDB => val newVertex = createNewVertex(_marshaller.getModelObjectID(newInstance)) newInstance writeTo newVertex // Need to close the first transaction to have the ID created valid graphDB.commit() newVertex.as[T] } } override def update(existingInstance: T): T = { validateUpdate(existingInstance) val updatedVertex = withGraphDb { implicit graphDB => val id = _marshaller.getModelObjectID(existingInstance) val vertexOp = getAsVertexById(id) require(vertexOp.isDefined, s"unable to update entity with Id ${existingInstance.getVertexId}. Not in the DB") val vertex = vertexOp.get existingInstance updateTo vertex vertex } readWithGraphDb { implicit graphDB => updatedVertex.as[T] } } /** * Query entities of the vertex class using OrientDB custom features * See https://groups.google.com/forum/#!topic/orient-database/9lnoOeN7Y3U * * @param graph * * @return a query object with an already set filter on entites of the handled vertex class type */ protected def queryForEntityClass(implicit graph: TransactionalGraph): Query = { graph.query().asInstanceOf[OrientGraphQuery].labels(marshaller.vertexClassName) } /** * Collects a vertex OF THE HANDLED VERTEX CLASS given the specified ID * * @param id * @param graphDB * * @return Some(vertex) if the vertex is found and is of the right class type, None otherwise */ override def getAsVertexById(id: Any)(implicit graphDB: TransactionalGraph): Option[Vertex] = { val theVertex: OrientVertex = graphDB.getVertex(id).asInstanceOf[OrientVertex] if (theVertex != null && theVertex.getVertexInstance.getVertexInstance.getLabel == marshaller.vertexClassName) { Some(theVertex) } else { None } } /** * Find vertices of the given class by the given property name. The property is expected to have been indexed * According to OrientDB class index strategy. */ protected def findByIndexedProperty(className: String, propertyName: String, value: Any)(implicit graph: TransactionalGraph): Iterator[Vertex] = { findWithIndex(s"$className.$propertyName", value) } protected def findByIndexedProperty(propertyName: String, value: Any)(implicit graph: TransactionalGraph): Iterator[T] = findByIndexedProperty(marshaller.vertexClassName, propertyName, value).map(_.as[T]) /** * As findByIndexedProperty for many indexed properties */ protected def findByIndexedProperties(className: String, propertyNames: Iterable[String], values: Iterable[Any])(implicit graph: OrientGraph): Iterator[Vertex] = { graph.getVertices( className, Array(propertyNames.toList:_*), Array( (values.map {_.asInstanceOf[java.lang.Object]} .toList):_*) ).iterator() } protected def findByIndexedProperties(propertyNames: Iterable[String], values: Iterable[Any])(implicit graph: OrientGraph): Iterator[T] = { findByIndexedProperties(marshaller.vertexClassName, propertyNames, values).map(_.as[T]) } /** * Finds vertices using orient's index with the given name */ protected def findWithIndex(indexFullName: String, value: Any)(implicit graph: TransactionalGraph): Iterator[Vertex] = { graph.getVertices(indexFullName, value).iterator() } /** * Finds vertices using orient's composite index with the given name */ protected def findByCompositeIndex(indexFullName: String, values: Any*)(implicit graph: TransactionalGraph): Iterator[Vertex] = { graph.getVertices(indexFullName, seqAsJavaList(values.toSeq) ).iterator() } /** * The ID in Orient can be used to specify the vertex class type * * @param id * @param graphDb * @return */ override protected def createNewVertex(id: Any)(implicit graphDb: TransactionalGraph): Vertex = { val orientGraph = graphDb.asInstanceOf[OrientGraph] orientGraph.addVertex(marshaller.vertexClassSpec, Array.empty[String]: _*) } }
galarragas/sgal
sgal-orient/src/main/scala/uk/co/pragmasoft/graphdb/orient/OrientDbDAO.scala
Scala
apache-2.0
5,627
package org.scalatra package auth import servlet.ServletBase import collection.mutable.{ HashMap, Map => MMap } import scala.PartialFunction import ScentryAuthStore.{SessionAuthStore, ScentryAuthStore} import util.RicherString import collection.immutable.List._ object Scentry { type StrategyFactory[UserType <: AnyRef] = ServletBase => ScentryStrategy[UserType] private val _globalStrategies = new HashMap[Symbol, StrategyFactory[_ <: AnyRef]]() def registerStrategy[UserType <: AnyRef](name: Symbol, strategyFactory: StrategyFactory[UserType]) = _globalStrategies += (name -> strategyFactory) def globalStrategies = _globalStrategies def clearGlobalStrategies() { _globalStrategies.clear() } val scentryAuthKey = "scentry.auth.default.user".intern() val ScentryRequestKey = "org.scalatra.auth.Scentry".intern } class Scentry[UserType <: AnyRef]( app: ServletBase, serialize: PartialFunction[UserType, String], deserialize: PartialFunction[String, UserType] ) { import RicherString._ type StrategyType = ScentryStrategy[UserType] type StrategyFactory = ServletBase => StrategyType import Scentry._ private val _strategies = new HashMap[Symbol, StrategyFactory]() private var _user: UserType = null.asInstanceOf[UserType] private var _store: ScentryAuthStore = new SessionAuthStore(app.session) @deprecated("use store_= instead", "2.0.0") def setStore(newStore: ScentryAuthStore) { store = newStore } def store = _store def store_=(newStore: ScentryAuthStore) { _store = newStore } def isAuthenticated = { userOption.isDefined } @deprecated("use isAuthenticated", "2.0.0") def authenticated_? = isAuthenticated //def session = app.session def params = app.params def redirect(uri: String) { app.redirect(uri) } def registerStrategy(name: Symbol, strategyFactory: StrategyFactory) = _strategies += (name -> strategyFactory) def strategies: MMap[Symbol, ScentryStrategy[UserType]] = (globalStrategies ++ _strategies) map { case (nm, fact) => (nm -> fact.asInstanceOf[StrategyFactory](app)) } def userOption: Option[UserType] = Option(user) def user : UserType = if (_user != null) _user else { val key = store.get if (key.nonBlank) { runCallbacks() { _.beforeFetch(key) } val res = fromSession(key) if (res != null) runCallbacks() { _.afterFetch(res) } _user = res res } else null.asInstanceOf[UserType] } def user_=(v: UserType) = { _user = v if (v != null) { runCallbacks() { _.beforeSetUser(v) } val res = toSession(v) store.set(res) runCallbacks() { _.afterSetUser(v) } res } else v } def fromSession = deserialize orElse missingDeserializer def toSession = serialize orElse missingSerializer private def missingSerializer: PartialFunction[UserType, String] = { case _ => throw new RuntimeException("You need to provide a session serializer for Scentry") } private def missingDeserializer: PartialFunction[String, UserType] = { case _ => throw new RuntimeException("You need to provide a session deserializer for Scentry") } def authenticate(names: Symbol*) = { runAuthentication(names:_*) map { case (stratName, usr) => runCallbacks() { _.afterAuthenticate(stratName, usr) } user = usr user } orElse { runUnauthenticated } } private def runAuthentication(names: Symbol*) = { (List[(Symbol, UserType)]() /: strategies) { case (acc, (nm, strat)) => runCallbacks(_.isValid) { _.beforeAuthenticate } val r = if(acc.isEmpty && strat.isValid && (names.isEmpty || names.contains(nm))) { strat.authenticate() match { case Some(usr) => (nm, usr) :: Nil case _ => List.empty[(Symbol, UserType)] } } else List.empty[(Symbol, UserType)] acc ::: r } headOption } private def runUnauthenticated = { strategies filter { case (_, strat) => strat.isValid } map { case (_, s) => s } toList match { case Nil => defaultUnauthenticated foreach { _.apply() } case l => { l foreach { s => runCallbacks() { _.unauthenticated() } } defaultUnauthenticated foreach { _.apply() } } } None } private var defaultUnauthenticated: Option[() => Unit] = None def unauthenticated(callback: => Unit) { defaultUnauthenticated = Some(() => callback) } def logout() { val usr = user.asInstanceOf[UserType] runCallbacks() { _.beforeLogout(usr) } if (_user != null) _user = null.asInstanceOf[UserType] store.invalidate runCallbacks() { _.afterLogout(usr) } } private def runCallbacks(guard: StrategyType => Boolean = s => true)(which: StrategyType => Unit) { strategies foreach { case (_, v) if guard(v) => which(v) case _ => // guard failed } } }
louk/scalatra
auth/src/main/scala/org/scalatra/auth/Scentry.scala
Scala
bsd-2-clause
4,884
/*********************************************************************** * Copyright (c) 2013-2020 Commonwealth Computer Research, Inc. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Apache License, Version 2.0 * which accompanies this distribution and is available at * http://www.opensource.org/licenses/apache2.0.php. ***********************************************************************/ package org.locationtech.geomesa.fs.storage.common.partitions import org.junit.runner.RunWith import org.locationtech.geomesa.fs.storage.api.{NamedOptions, PartitionSchemeFactory} import org.locationtech.geomesa.fs.storage.common.StorageSerialization import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes import org.specs2.mutable.Specification import org.specs2.runner.JUnitRunner import org.specs2.specification.AllExpectations @RunWith(classOf[JUnitRunner]) class PartitionSchemeConfTest extends Specification with AllExpectations { sequential "PartitionScheme" should { "load from conf" >> { val conf = """ | { | scheme = "datetime,z2" | options = { | datetime-format = "yyyy/DDD/HH" | step-unit = HOURS | step = 1 | dtg-attribute = dtg | geom-attribute = geom | z2-resolution = 10 | leaf-storage = true | } | } """.stripMargin val sft = SimpleFeatureTypes.createType("test", "name:String,age:Int,dtg:Date,*geom:Point:srid=4326") val scheme = PartitionSchemeFactory.load(sft, StorageSerialization.deserialize(conf)) scheme must beAnInstanceOf[CompositeScheme] } "load, serialize, deserialize" >> { val sft = SimpleFeatureTypes.createType("test", "name:String,age:Int,dtg:Date,*geom:Point:srid=4326") val options = NamedOptions("daily,z2-2bit") val scheme = PartitionSchemeFactory.load(sft, options) scheme must beAnInstanceOf[CompositeScheme] val schemeStr = StorageSerialization.serialize(options) val scheme2 = PartitionSchemeFactory.load(sft, StorageSerialization.deserialize(schemeStr)) scheme2 mustEqual scheme } "load dtg, geom, and step defaults" >> { val conf = """ | { | scheme = "datetime,z2" | options = { | datetime-format = "yyyy/DDD/HH" | step-unit = HOURS | z2-resolution = 10 | } | } """.stripMargin val sft = SimpleFeatureTypes.createType("test", "name:String,age:Int,foo:Date,*bar:Point:srid=4326") val scheme = PartitionSchemeFactory.load(sft, StorageSerialization.deserialize(conf)) scheme must beAnInstanceOf[CompositeScheme] scheme.asInstanceOf[CompositeScheme].schemes must haveLength(2) scheme.asInstanceOf[CompositeScheme].schemes must contain(beAnInstanceOf[DateTimeScheme], beAnInstanceOf[Z2Scheme]).copy(checkOrder = true) } } }
aheyne/geomesa
geomesa-fs/geomesa-fs-storage/geomesa-fs-storage-common/src/test/scala/org/locationtech/geomesa/fs/storage/common/partitions/PartitionSchemeConfTest.scala
Scala
apache-2.0
3,073
/** * Copyright (C) 2010 Orbeon, Inc. * * This program is free software; you can redistribute it and/or modify it under the terms of the * GNU Lesser General Public License as published by the Free Software Foundation; either version * 2.1 of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. * See the GNU Lesser General Public License for more details. * * The full text of the license is available at http://www.gnu.org/copyleft/lesser.html */ package org.orbeon.oxf.xforms.function import java.util.{Calendar, GregorianCalendar} import org.orbeon.oxf.xml.RuntimeDependentFunction import org.orbeon.saxon.expr.XPathContext import org.orbeon.saxon.value.{DateValue, StringValue} /** * XForms local-date() function (XForms 1.1). */ class LocalDate extends XFormsFunction with RuntimeDependentFunction { override def evaluateItem(context: XPathContext): StringValue = { val value = stringArgumentOpt(0)(context) match { case Some("test") ⇒ new DateValue("2004-12-31-07:00") case _ ⇒ val now = new GregorianCalendar new DateValue(now, now.get(Calendar.ZONE_OFFSET) / 1000 / 60) } new StringValue(value.getStringValue) } }
brunobuzzi/orbeon-forms
xforms/jvm/src/main/scala/org/orbeon/oxf/xforms/function/LocalDate.scala
Scala
lgpl-2.1
1,413
// scalac: -opt:l:inline -opt-inline-from:** -Yopt-inline-heuristics:everything -opt-warnings:_ -Werror class Test { def foo = A_1.test }
lrytz/scala
test/files/neg/inlineIndyLambdaPrivate/Test_2.scala
Scala
apache-2.0
140
/* (c) copyright This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this library; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package com.twitter.processing import com.google.gson._ /** * Methods to avoid the tedium of getting a value, checking to see if * it's null, and then performing some function on it. Used like * <code> * ss(jsonObj, "foo") { * myObj.foo = _ * } * </code> } */ trait JsonSugar { /** * Apply a function to a String value held in a JSON Object (if it's non-null) */ def ss(json: JsonObject, key: String)(f: (String) => Unit) = { if (!json.get(key).isJsonNull) f(json.get(key).getAsString()) } /** * Apply a function to a Boolean value held in a JSON Object (if it's non-null) */ def sb(json: JsonObject, key: String)(f: (Boolean) => Unit) = { if (!json.get(key).isJsonNull) f(json.get(key).getAsBoolean()) } /** * Apply a function to a Long value held in a JSON Object (if it's non-null) */ def sl(json: JsonObject, key: String)(f: (Long) => Unit) = { if (!json.get(key).isJsonNull) f(json.get(key).getAsLong()) } /** * Apply a function to an Int value held in a JSON Object (if it's non-null) */ def si(json: JsonObject, key: String)(f: (Int) => Unit) = { if (!json.get(key).isJsonNull) f(json.get(key).getAsInt()) } } /** * Represents a Tweet */ class Status { var favorited = false; var text = ""; var inReplyToUserId = -1L; var inReplyToStatusId = -1L; var inReplyToScreenName = ""; var geo:Geo = null; var source = ""; var createdAt = ""; var user:User = null; var truncated = false; var id = -1L; } /** * Provides methods for unmarshalling a status from Json */ object Status extends JsonSugar{ /** * One Json parser to use. Note: this makes it not thread safe. */ val parser = new JsonParser(); /** * Get Some(Status) from a Json String, * or None if we can't unmarshal from the given String */ def fromJson(json: String): Option[Status] = { fromJson(parser.parse(json).getAsJsonObject()) } /** * Get Some(Status) from a Json String, * or None if the given json object isn't a status */ def fromJson(rootElem: JsonObject): Option[Status] = { if(rootElem.has("delete") || rootElem.has("limit")) { None } else if(rootElem.has("text") && rootElem.has("id")){ val status = new Status() status.favorited = rootElem.get("favorited").getAsBoolean() status.text = rootElem.get("text").getAsString() sl(rootElem, "in_reply_to_user_id") {status.inReplyToUserId = _} sl(rootElem, "in_reply_to_status_id") {status.inReplyToStatusId = _} ss(rootElem, "in_reply_to_screen_name") {status.inReplyToScreenName = _} if (!rootElem.get("geo").isJsonNull) status.geo = Geo.fromJson(rootElem.get("geo").getAsJsonObject()) status.source = rootElem.get("source").getAsString() status.createdAt = rootElem.get("created_at").getAsString() status.user = User.fromJson(rootElem.get("user").getAsJsonObject) status.truncated = rootElem.get("truncated").getAsBoolean() status.id = rootElem.get("id").getAsLong() Some(status) } else { None } } } /** * Represents a geo coordinate */ class Geo() { var latitude: Double = 0.0; var longitude: Double = 0.0; } /** * Provides methods for unmarshalling a Geo from Json */ object Geo { /** * One Json parser to use. Note: this makes it not thread safe. */ val parser = new JsonParser(); /** * Get a geo object from the given string. * Note that we don't return options here, as we assume that basic * verification is done by the Status object */ def fromJson(json: String): Geo = { fromJson(parser.parse(json).getAsJsonObject()) } /** * Get a geo object from the given Json object * Note that we don't return options here, as we assume that basic * verification is done by the Status object */ def fromJson(json: JsonObject): Geo = { val arr = json.get("coordinates").getAsJsonArray() val geo = new Geo() geo.latitude = arr.get(0).getAsDouble() geo.longitude = arr.get(1).getAsDouble() geo } } /** * Represents a Twitter User */ class User { var profileBackgroundTile = true; var profileSidebarBorderColor = ""; var url = ""; var verified = false; var followersCount = -1L; var friendsCount = -1L; var description = ""; var profileBackgroundColor = ""; var geoEnabled = false; var favouritesCount = -1L; var notifications: String = null; var createdAt = ""; var profileTextColor = ""; var timeZone = ""; var isProtected = false; var profileImageURL = ""; var statusesCount = -1L; var profileLinkColor = ""; var location = ""; var name = ""; var following = ""; var profileBackgroundImageURL = ""; var screenName = ""; var id = -1L; var utcOffset = 0; var profileSidebarFillColor = ""; } /** * Provides methods for unmarshalling a User from Json */ object User extends JsonSugar{ /** * One Json parser to use. Note: this makes it not thread safe. */ val parser = new JsonParser(); /** * Get a User object from the given string. * Note that we don't return options here, as we assume that basic * verification is done by the Status object */ def fromJson(json: String): User = { fromJson(parser.parse(json).getAsJsonObject()) } /** * Get a geo object from the given Json object. * Note that we don't return options here, as we assume that basic * verification is done by the Status object */ def fromJson(rootElem: JsonObject): User = { val user = new User() user.profileBackgroundTile = rootElem.get("profile_background_tile").getAsBoolean() user.profileSidebarBorderColor = rootElem.get("profile_sidebar_border_color").getAsString() ss(rootElem, "url") {user.url = _} user.verified = rootElem.get("verified").getAsBoolean() user.followersCount = rootElem.get("followers_count").getAsLong() user.friendsCount = rootElem.get("friends_count").getAsLong() ss(rootElem, "description") { user.description = _ } user.profileBackgroundColor = rootElem.get("profile_background_color").getAsString() user.geoEnabled = rootElem.get("geo_enabled").getAsBoolean() user.favouritesCount = rootElem.get("favourites_count").getAsLong() ss(rootElem, "notifications") {user.notifications = _ } user.createdAt = rootElem.get("created_at").getAsString() user.profileTextColor = rootElem.get("profile_text_color").getAsString() ss(rootElem, "time_zone") {user.timeZone = _} user.isProtected = rootElem.get("protected").getAsBoolean() user.profileImageURL = rootElem.get("profile_image_url").getAsString() user.statusesCount = rootElem.get("statuses_count").getAsLong() user.profileLinkColor = rootElem.get("profile_link_color").getAsString() ss(rootElem, "location") {user.location = _} user.name = rootElem.get("name").getAsString() ss(rootElem, "following") {user.following = _} user.profileBackgroundImageURL = rootElem.get("profile_background_image_url").getAsString() user.screenName = rootElem.get("screen_name").getAsString() user.id = rootElem.get("id").getAsLong() si(rootElem, "utc_offset") {user.utcOffset = _} user.profileSidebarFillColor = rootElem.get("profile_sidebar_fill_color").getAsString() user } }
mccv/processing-tweet-stream
src/main/scala/com/twitter/processing/Tweet.scala
Scala
lgpl-3.0
8,010
package robco import org.apache.spark.{SparkContext, SparkConf} object DataClasses { } object SparkSql { // case class Person(firstName: String, lastName: String, gender: String) case class Node(nodeId: String) case class Port(egressRate: String, nodeId: String, pirHigh: String, pirLow: String, portId: String) case class Queue(nodeId: String, portId: String, queueId: String) case class Reading(nodeId: String, portId: String, queueId: String, readingId: String, timestamp: String, value: String) def main(args: Array[String]) = { // val sparkMaster = args(0) val sparkMaster = "spark://" + args(0) + ":7077" println("spark master: " + sparkMaster) val hdfsRoot = "hdfs://" + args(1) + ":9000" val startTime = System.currentTimeMillis()/1000 val conf = new SparkConf().setMaster(sparkMaster) .set("spark.driver.memory", "6G") .set("spark.storage.memoryFraction", "0") .set("spark.executor.memory", "6g") val dataset = args(2) val sc = new SparkContext(conf) sys.ShutdownHookThread { sc.stop() } val hc = new org.apache.spark.sql.hive.HiveContext(sc) import hc.implicits._ val nodesRDD = sc.textFile(s"$hdfsRoot/data-schema-$dataset-node.csv").map(_.split(",")).map(n => Node(n(0))) val nodes = nodesRDD.toDF nodes.registerTempTable("nodes") val portsRDD = sc.textFile(s"$hdfsRoot/data-schema-$dataset-port.csv").map(_.split(",")).map(p => Port(p(0),p(1),p(2),p(3),p(4))) val ports = portsRDD.toDF ports.registerTempTable("ports") val queuesRDD = sc.textFile(s"$hdfsRoot/data-schema-$dataset-queue.csv").map(_.split(",")).map(q => Queue(q(0),q(1),q(2))) val queues = queuesRDD.toDF queues.registerTempTable("queues") val readingsRDD = sc.textFile(s"$hdfsRoot/data-schema-$dataset-reading.csv").map(_.split(",")).map(r => Reading(r(0),r(1),r(2),r(3),r(4),r(5))) val readings = readingsRDD.toDF readings.registerTempTable("readings") val dataLoadTime = (System.currentTimeMillis()/1000) - startTime println("loading data time: " + dataLoadTime) val startTime2 = System.currentTimeMillis()/1000 val rows = hc.sql(""" select * from nodes n, ports p, queues q, readings r where n.nodeId = p.nodeId and p.nodeId = q.nodeId and q.nodeId = r.nodeId and p.portId = q.portId and q.portId = r.portId and q.queueId = r.queueId """) val results = rows.collect val queryTime = (System.currentTimeMillis()/1000) - startTime2 println("query time: " + queryTime) println("number of results: " + results.length) println("total time taken: " + (dataLoadTime + queryTime)) } }
amazoncop/spark
src/main/scala/robco/SparkSql.scala
Scala
apache-2.0
2,736
/* * Copyright 2009-2010 LinkedIn, Inc * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.linkedin.norbert package cluster import common.{ClusterNotificationManagerComponent, ClusterManagerComponent} import java.util.concurrent.TimeUnit import org.specs.Specification import actors.Actor import Actor._ import org.specs.util.WaitFor import org.specs.mock.Mockito class ClusterClientSpec extends Specification with Mockito with WaitFor { val clusterListenerKey = ClusterListenerKey(10101L) val currentNodes = Set(Node(1, "localhost:31313", true, Set(0, 1)), Node(2, "localhost:31314", true, Set(0, 1)), Node(3, "localhost:31315", true, Set(0, 1))) var clusterActor: Actor = _ var getCurrentNodesCount = 0 var addListenerCount = 0 var currentListeners: List[Actor] = Nil var removeListenerCount = 0 var cnmShutdownCount = 0 var addNodeCount = 0 var nodeAdded: Node = _ var removeNodeCount = 0 var nodeRemovedId = 0 var markNodeAvailableCount = 0 var markNodeAvailableId = 0 var markNodeUnavailableCount = 0 var markNodeUnavailableId = 0 var zkmShutdownCount = 0 val cluster = new ClusterClient with ClusterManagerComponent with ClusterNotificationManagerComponent { def serviceName = "test" val clusterNotificationManager = actor { loop { react { case ClusterNotificationMessages.Connected(nodes) => case ClusterNotificationMessages.AddListener(a) => if (clusterActor == null) clusterActor = a else { addListenerCount += 1 currentListeners = a :: currentListeners } reply(ClusterNotificationMessages.AddedListener(clusterListenerKey)) case ClusterNotificationMessages.RemoveListener(key) => removeListenerCount += 1 case ClusterNotificationMessages.GetCurrentNodes => getCurrentNodesCount += 1 reply(ClusterNotificationMessages.CurrentNodes(currentNodes)) case ClusterNotificationMessages.Shutdown => cnmShutdownCount += 1 case m => println("Got a message " + m) } } } val clusterManager = actor { loop { react { case ClusterManagerMessages.AddNode(node) => addNodeCount += 1 nodeAdded = node reply(ClusterManagerMessages.ClusterManagerResponse(None)) case ClusterManagerMessages.RemoveNode(id) => removeNodeCount += 1 nodeRemovedId = id reply(ClusterManagerMessages.ClusterManagerResponse(None)) case ClusterManagerMessages.MarkNodeAvailable(id, initialCapability) => markNodeAvailableCount += 1 markNodeAvailableId = id reply(ClusterManagerMessages.ClusterManagerResponse(None)) case ClusterManagerMessages.MarkNodeUnavailable(id) => markNodeUnavailableCount += 1 markNodeUnavailableId = id reply(ClusterManagerMessages.ClusterManagerResponse(None)) case ClusterManagerMessages.Shutdown => zkmShutdownCount += 1 } } } } cluster.start "ClusterClient" should { "when starting start the cluster notification and ZooKeeper manager actors" in { var cnmStarted = false var zkmStarted = false val c = new ClusterClient with ClusterManagerComponent with ClusterNotificationManagerComponent { def serviceName = "test" val clusterNotificationManager = new Actor { def act() = { cnmStarted = true react { case ClusterNotificationMessages.AddListener(_) => reply(ClusterNotificationMessages.AddedListener(null)) } } } val clusterManager = new Actor { def act() = zkmStarted = true } } c.start cnmStarted must beTrue zkmStarted must beTrue } "start" in { "disconnected" in { cluster.isConnected must beFalse } "not shutdown" in { cluster.isShutdown must beFalse } } "throw ClusterNotStartedException if the cluster wasn't started when the method was called" in { val c = new ClusterClient with ClusterManagerComponent with ClusterNotificationManagerComponent { def serviceName = null val clusterNotificationManager = null val clusterManager = null } c.nodes must throwA[ClusterNotStartedException] c.nodeWithId(1) must throwA[ClusterNotStartedException] c.addNode(1, null, Set(0)) must throwA[ClusterNotStartedException] c.removeNode(1) must throwA[ClusterNotStartedException] c.markNodeAvailable(1) must throwA[ClusterNotStartedException] c.markNodeUnavailable(1) must throwA[ClusterNotStartedException] c.addListener(null) must throwA[ClusterNotStartedException] c.removeListener(null) must throwA[ClusterNotStartedException] c.awaitConnection must throwA[ClusterNotStartedException] c.awaitConnection(1, TimeUnit.SECONDS) must throwA[ClusterNotStartedException] c.awaitConnectionUninterruptibly must throwA[ClusterNotStartedException] c.isConnected must throwA[ClusterNotStartedException] c.isShutdown must throwA[ClusterNotStartedException] } "throw ClusterShutdownException if shut down for nodes, nodeWith*, *Listener, await*" in { cluster.shutdown cluster.start must throwA[ClusterShutdownException] cluster.nodes must throwA[ClusterShutdownException] cluster.nodeWithId(1) must throwA[ClusterShutdownException] cluster.addListener(null) must throwA[ClusterShutdownException] cluster.removeListener(null) must throwA[ClusterShutdownException] cluster.awaitConnection must throwA[ClusterShutdownException] cluster.awaitConnection(1, TimeUnit.SECONDS) must throwA[ClusterShutdownException] cluster.awaitConnectionUninterruptibly must throwA[ClusterShutdownException] } "throw ClusterDisconnectedException if disconnected for addNode, removeNode, markNodeAvailable" in { cluster.nodes must throwA[ClusterDisconnectedException] cluster.nodeWithId(1) must throwA[ClusterDisconnectedException] cluster.addNode(1, "localhost:31313", Set(0, 1)) must throwA[ClusterDisconnectedException] cluster.removeNode(1) must throwA[ClusterDisconnectedException] cluster.markNodeAvailable(1) must throwA[ClusterDisconnectedException] cluster.markNodeUnavailable(1) must throwA[ClusterDisconnectedException] } "handle a connected event" in { clusterActor ! ClusterEvents.Connected(Set()) cluster.isConnected must eventually(beTrue) } "handle a disconnected event" in { clusterActor ! ClusterEvents.Connected(Set()) cluster.isConnected must eventually(beTrue) clusterActor ! ClusterEvents.Disconnected cluster.isConnected must eventually(beFalse) } "addNode should add a node to ZooKeeperManager" in { clusterActor ! ClusterEvents.Connected(Set()) waitFor(10.ms) cluster.addNode(1, "localhost:31313", Set(1, 2)) must notBeNull addNodeCount must be_==(1) nodeAdded.id must be_==(1) nodeAdded.url must be_==("localhost:31313") nodeAdded.available must be_==(false) } "removeNode should remove a node from ZooKeeperManager" in { clusterActor ! ClusterEvents.Connected(Set()) waitFor(10.ms) cluster.removeNode(1) removeNodeCount must be_==(1) nodeRemovedId must be_==(1) } "markNodeAvailable should mark a node available in ZooKeeperManager" in { clusterActor ! ClusterEvents.Connected(Set()) waitFor(10.ms) cluster.markNodeAvailable(11) markNodeAvailableCount must be_==(1) markNodeAvailableId must be_==(11) } "markNodeUnavailable should mark a node unavailable in ZooKeeperMonitor" in { clusterActor ! ClusterEvents.Connected(Set()) waitFor(10.ms) cluster.markNodeUnavailable(111) markNodeUnavailableCount must be_==(1) markNodeUnavailableId must be_==(111) } "nodes should ask the ClusterNotificationManager for the current node list" in { clusterActor ! ClusterEvents.Connected(Set()) waitFor(10.ms) val nodes = cluster.nodes nodes.size must be_==(3) nodes must containAll(currentNodes) getCurrentNodesCount must be_==(1) } "when handling nodeWithId" in { "return the node that matches the specified id" in { clusterActor ! ClusterEvents.Connected(currentNodes) waitFor(10.ms) cluster.nodeWithId(2) must beSome[Node].which(currentNodes must contain(_)) } "return None if no matching id" in { clusterActor ! ClusterEvents.Connected(currentNodes) waitFor(10.ms) cluster.nodeWithId(4) must beNone } } "send an AddListener message to ClusterNotificationManager for addListener" in { val listener = new ClusterListener { var callCount = 0 def handleClusterEvent(event: ClusterEvent): Unit = callCount += 1 } cluster.addListener(listener) must notBeNull addListenerCount must be_==(1) currentListeners.head ! ClusterEvents.Disconnected listener.callCount must eventually(be_==(1)) } "send a RemoveListener message to ClusterNotificationManager for removeListener" in { cluster.removeListener(ClusterListenerKey(1L)) removeListenerCount must eventually(be_==(1)) } "shutdown ClusterNotificationManager and ZooKeeperManager when shut down" in { cluster.shutdown cnmShutdownCount must eventually(be_==(1)) zkmShutdownCount must be_==(1) cluster.isShutdown must beTrue } "handle a listener throwing an exception" in { val listener = new ClusterListener { var callCount = 0 def handleClusterEvent(event: ClusterEvent) = { callCount += 1 throw new Exception } } cluster.addListener(listener) must notBeNull addListenerCount must be_==(1) currentListeners.head ! ClusterEvents.NodesChanged(Set()) currentListeners.head ! ClusterEvents.NodesChanged(Set()) listener.callCount must eventually(be_==(2)) } doAfterSpec { actors.Scheduler.shutdown } } }
jhartman/norbert
cluster/src/test/scala/com/linkedin/norbert/cluster/ClusterClientSpec.scala
Scala
apache-2.0
10,880