code
stringlengths
5
1M
repo_name
stringlengths
5
109
path
stringlengths
6
208
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
5
1M
package com.tribbloids.spookystuff.session import com.tribbloids.spookystuff.utils.lifespan.Cleanable import com.tribbloids.spookystuff.utils.{CommonUtils, TreeThrowable} import scala.util.Try /** * Created by peng on 14/01/17. */ trait ConflictDetection extends Cleanable { def _resourceIDs: Map[String, Set[_]] def _resourceQualifier: String = this.getClass.getCanonicalName final def resourceIDs: Map[String, Set[Any]] = _resourceIDs.map { tuple => val rawK = if (tuple._1.isEmpty) null else tuple._1 val k = CommonUtils./:/(_resourceQualifier, rawK) val v = tuple._2.map(_.asInstanceOf[Any]) k -> v } } object ConflictDetection { def conflicts: Seq[Try[Unit]] = { val allObj = Cleanable.getTyped[ConflictDetection] val allResourceIDs: Map[String, Seq[Any]] = allObj .map { _.resourceIDs.mapValues(_.toSeq) } .reduceOption { (m1, m2) => val keys = (m1.keys ++ m2.keys).toSeq.distinct val kvs = keys.map { k => val v = m1.getOrElse(k, Nil) ++ m2.getOrElse(k, Nil) k -> v } Map(kvs: _*) } .getOrElse(Map.empty) allResourceIDs.toSeq .flatMap { tuple => tuple._2 .groupBy(identity) .values .map { vs => Try { assert( vs.size == 1, { s""" |${tuple._1}: resource ${vs.head} is used by ${vs.size} objects: |${allObj .filter( v => v.resourceIDs .getOrElse(tuple._1, Set.empty) .contains(vs.head) ) .map(v => s"$v -> ${vs.head}") .mkString("\\n")} """.stripMargin } ) } } } } def detectConflict(extra: Seq[Throwable] = Nil): Unit = { TreeThrowable.&&&(conflicts, extra = extra) } }
tribbloid/spookystuff
core/src/main/scala/com/tribbloids/spookystuff/session/ConflictDetection.scala
Scala
apache-2.0
2,076
package play.api.data import scala.language.existentials import format._ import validation._ /** * Helper to manage HTML form description, submission and validation. * * For example, a form handling a `User` case class submission: * {{{ * import play.api.data._ * import play.api.data.Forms._ * import play.api.data.format.Formats._ * * val userForm = Form( * mapping( * "name" -> of[String], * "age" -> of[Int], * "email" -> of[String] * )(User.apply)(User.unapply) * ) * }}} * * @tparam T the type managed by this form * @param mapping the form mapping, which describes all form fields * @param data the current form data, used to display the form * @param errors the collection of errors associated with this form * @param value a concrete value of type `T` if the form submission was successful */ case class Form[T](mapping: Mapping[T], data: Map[String, String], errors: Seq[FormError], value: Option[T]) { /** * Constraints associated with this form, indexed by field name. */ val constraints: Map[String, Seq[(String, Seq[Any])]] = mapping.mappings.map { m => m.key -> m.constraints.collect { case Constraint(Some(name), args) => name -> args } }.filterNot(_._2.isEmpty).toMap /** * Formats associated to this form, indexed by field name. * */ val formats: Map[String, (String, Seq[Any])] = mapping.mappings.map { m => m.key -> m.format }.collect { case (k, Some(f)) => k -> f }.toMap /** * Binds data to this form, i.e. handles form submission. * * @param data the data to submit * @return a copy of this form, filled with the new data */ def bind(data: Map[String, String]): Form[T] = mapping.bind(data).fold( newErrors => this.copy(data = data, errors = errors ++ newErrors, value = None), value => this.copy(data = data, errors = errors, value = Some(value))) /** * Binds data to this form, i.e. handles form submission. * * @param data Json data to submit * @return a copy of this form, filled with the new data */ def bind(data: play.api.libs.json.JsValue): Form[T] = bind(FormUtils.fromJson(js = data)) /** * Binds request data to this form, i.e. handles form submission. * * @return a copy of this form filled with the new data */ def bindFromRequest()(implicit request: play.api.mvc.Request[_]): Form[T] = { bindFromRequest { (request.body match { case body: play.api.mvc.AnyContent if body.asFormUrlEncoded.isDefined => body.asFormUrlEncoded.get case body: play.api.mvc.AnyContent if body.asMultipartFormData.isDefined => body.asMultipartFormData.get.asFormUrlEncoded case body: play.api.mvc.AnyContent if body.asJson.isDefined => FormUtils.fromJson(js = body.asJson.get).mapValues(Seq(_)) case body: Map[_, _] => body.asInstanceOf[Map[String, Seq[String]]] case body: play.api.mvc.MultipartFormData[_] => body.asFormUrlEncoded case body: play.api.libs.json.JsValue => FormUtils.fromJson(js = body).mapValues(Seq(_)) case _ => Map.empty[String, Seq[String]] }) ++ request.queryString } } def bindFromRequest(data: Map[String, Seq[String]]): Form[T] = { bind { data.foldLeft(Map.empty[String, String]) { case (s, (key, values)) if key.endsWith("[]") => s ++ values.zipWithIndex.map { case (v, i) => (key.dropRight(2) + "[" + i + "]") -> v } case (s, (key, values)) => s + (key -> values.headOption.getOrElse("")) } } } /** * Fills this form with a existing value, used for edit forms. * * @param value an existing value of type `T`, used to fill this form * @return a copy of this form filled with the new data */ def fill(value: T): Form[T] = { val result = mapping.unbind(value) this.copy(data = result._1, value = Some(value)) } /** * Fills this form with a existing value, and performs a validation. * * @param value an existing value of type `T`, used to fill this form * @return a copy of this form filled with the new data */ def fillAndValidate(value: T): Form[T] = { val result = mapping.unbind(value) this.copy(data = result._1, errors = result._2, value = Some(value)) } /** * Handles form results. Either the form has errors, or the submission was a success and a * concrete value is available. * * For example: * {{{ * anyForm.bindFromRequest().fold( * f => redisplayForm(f), * t => handleValidFormSubmission(t) * ) * }}} * * @tparam R common result type * @param hasErrors a function to handle forms with errors * @param success a function to handle form submission success * @return a result `R`. */ def fold[R](hasErrors: Form[T] => R, success: T => R): R = value match { case Some(v) if errors.isEmpty => success(v) case _ => hasErrors(this) } /** * Retrieves a field. * * For example: * {{{ * val usernameField = userForm("username") * }}} * * @param key the field name * @return the field, returned even if the field does not exist */ def apply(key: String): Field = Field( this, key, constraints.get(key).getOrElse(Nil), formats.get(key), errors.collect { case e if e.key == key => e }, data.get(key)) /** * Retrieves the first global error, if it exists, i.e. an error without any key. * * @return an error */ def globalError: Option[FormError] = globalErrors.headOption /** * Retrieves all global errors, i.e. errors without a key. * * @return all global errors */ def globalErrors: Seq[FormError] = errors.filter(_.key.isEmpty) /** * Applies a function for a field. * * For example: * {{{ * userForm.forField("username") { field => * <input type="text" name={field.name} value={field.value.getOrElse("")} /> * } * }}} * * @tparam R result type * @param key field name * @param handler field handler (transform the field to `R`) */ def forField[R](key: String)(handler: Field => R): R = handler(this(key)) /** * Returns `true` if there is an error related to this form. */ def hasErrors: Boolean = !errors.isEmpty /** * Retrieve the first error for this key. * * @param key field name. */ def error(key: String): Option[FormError] = errors.find(_.key == key) /** * Retrieve all errors for this key. * * @param key field name. */ def errors(key: String): Seq[FormError] = errors.filter(_.key == key) /** * Returns `true` if there is a global error related to this form. */ def hasGlobalErrors: Boolean = !globalErrors.isEmpty /** * Returns the concrete value, if the submission was a success. * * Note that this method fails with an Exception if this form as errors. */ def get: T = value.get /** * Returns the form errors serialized as Json. */ def errorsAsJson(implicit lang: play.api.i18n.Lang): play.api.libs.json.JsValue = { import play.api.libs.json._ Json.toJson( errors.groupBy(_.key).mapValues { errors => errors.map(e => play.api.i18n.Messages(e.message, e.args: _*)) } ) } /** * Adds an error to this form * @param error Error to add * @return a copy of this form with the added error */ def withError(error: FormError): Form[T] = this.copy(errors = errors :+ error, value = None) /** * Convenient overloaded method adding an error to this form * @param key Key of the field having the error * @param message Error message * @param args Error message arguments * @return a copy of this form with the added error */ def withError(key: String, message: String, args: Any*): Form[T] = withError(FormError(key, message, args)) /** * Adds a global error to this form * @param message Error message * @param args Error message arguments * @return a copy of this form with the added global error */ def withGlobalError(message: String, args: Any*): Form[T] = withError(FormError("", message, args)) /** * Discards this form’s errors * @return a copy of this form without errors */ def discardingErrors: Form[T] = this.copy(errors = Seq.empty) } /** * A form field. * * @param name the field name * @param constraints the constraints associated with the field * @param format the format expected for this field * @param errors the errors associated to this field * @param value the field value, if any */ case class Field(private val form: Form[_], name: String, constraints: Seq[(String, Seq[Any])], format: Option[(String, Seq[Any])], errors: Seq[FormError], value: Option[String]) { /** * The field ID - the same as the field name but with '.' replaced by '_'. */ lazy val id: String = name.replace('.', '_').replace('[', '_').replace(']', '_') /** * Returns the first error associated with this field, if it exists. * * @return an error */ lazy val error: Option[FormError] = errors.headOption /** * Check if this field has errors. */ lazy val hasErrors: Boolean = !errors.isEmpty /** * Retrieve a field from the same form, using a key relative to this field key. * * @param key Relative key. */ def apply(key: String): Field = { form(Option(name).filterNot(_.isEmpty).map(_ + (if (key(0) == '[') "" else ".")).getOrElse("") + key) } /** * Retrieve available indexes defined for this field (if this field is repeated). */ lazy val indexes: Seq[Int] = { RepeatedMapping.indexes(name, form.data) } } /** * Provides a set of operations for creating `Form` values. */ object Form { /** * Creates a new form from a mapping. * * For example: * {{{ * import play.api.data._ * import play.api.data.Forms._ * import play.api.data.format.Formats._ * * val userForm = Form( * tuple( * "name" -> of[String], * "age" -> of[Int], * "email" -> of[String] * ) * ) * }}} * * @param mapping the form mapping * @return a form definition */ def apply[T](mapping: Mapping[T]): Form[T] = Form(mapping, Map.empty, Nil, None) /** * Creates a new form from a mapping, with a root key. * * For example: * {{{ * import play.api.data._ * import play.api.data.Forms._ * import play.api.data.format.Formats._ * * val userForm = Form( * "user" -> tuple( * "name" -> of[String], * "age" -> of[Int], * "email" -> of[String] * ) * ) * }}} * * @param mapping the root key, form mapping association * @return a form definition */ def apply[T](mapping: (String, Mapping[T])): Form[T] = Form(mapping._2.withPrefix(mapping._1), Map.empty, Nil, None) } private[data] object FormUtils { import play.api.libs.json._ def fromJson(prefix: String = "", js: JsValue): Map[String, String] = js match { case JsObject(fields) => { fields.map { case (key, value) => fromJson(Option(prefix).filterNot(_.isEmpty).map(_ + ".").getOrElse("") + key, value) }.foldLeft(Map.empty[String, String])(_ ++ _) } case JsArray(values) => { values.zipWithIndex.map { case (value, i) => fromJson(prefix + "[" + i + "]", value) }.foldLeft(Map.empty[String, String])(_ ++ _) } case JsNull => Map.empty case JsUndefined() => Map.empty case JsBoolean(value) => Map(prefix -> value.toString) case JsNumber(value) => Map(prefix -> value.toString) case JsString(value) => Map(prefix -> value.toString) } } /** * A form error. * * @param key The error key (should be associated with a field using the same key). * @param message The form message (often a simple message key needing to be translated). * @param args Arguments used to format the message. */ case class FormError(key: String, message: String, args: Seq[Any] = Nil) { /** * Copy this error with a new Message. * * @param message The new message. */ def withMessage(message: String): FormError = FormError(key, message) } /** * A mapping is a two-way binder to handle a form field. */ trait Mapping[T] { self => /** * The field key. */ val key: String /** * Sub-mappings (these can be seen as sub-keys). */ val mappings: Seq[Mapping[_]] /** * The Format expected for this field, if it exists. */ val format: Option[(String, Seq[Any])] = None /** * The constraints associated with this field. */ val constraints: Seq[Constraint[T]] /** * Binds this field, i.e. construct a concrete value from submitted data. * * @param data the submitted data * @return either a concrete value of type `T` or a set of errors, if the binding failed */ def bind(data: Map[String, String]): Either[Seq[FormError], T] /** * Unbinds this field, i.e. transforms a concrete value to plain data. * * @param value the value to unbind * @return either the plain data or a set of errors, if the unbinding failed */ def unbind(value: T): (Map[String, String], Seq[FormError]) /** * Constructs a new Mapping based on this one, adding a prefix to the key. * * @param prefix the prefix to add to the key * @return the same mapping, with only the key changed */ def withPrefix(prefix: String): Mapping[T] /** * Constructs a new Mapping based on this one, by adding new constraints. * * For example: * {{{ * import play.api.data._ * import validation.Constraints._ * * Form("phonenumber" -> text.verifying(required) ) * }}} * * @param constraints the constraints to add * @return the new mapping */ def verifying(constraints: Constraint[T]*): Mapping[T] /** * Constructs a new Mapping based on this one, by adding a new ad-hoc constraint. * * For example: * {{{ * import play.api.data._ * import validation.Constraints._ * * Form("phonenumber" -> text.verifying {_.grouped(2).size == 5}) * }}} * * @param constraint a function describing the constraint that returns `false` on failure * @return the new mapping */ def verifying(constraint: (T => Boolean)): Mapping[T] = verifying("error.unknown", constraint) /** * Constructs a new Mapping based on this one, by adding a new ad-hoc constraint. * * For example: * {{{ * import play.api.data._ * import validation.Constraints._ * * Form("phonenumber" -> text.verifying("Bad phone number", {_.grouped(2).size == 5})) * }}} * * @param error The error message used if the constraint fails * @param constraint a function describing the constraint that returns `false` on failure * @return the new mapping */ def verifying(error: => String, constraint: (T => Boolean)): Mapping[T] = { verifying(Constraint { t: T => if (constraint(t)) Valid else Invalid(Seq(ValidationError(error))) }) } /** * Transform this Mapping[T] to a Mapping[B]. * * @tparam B The type of the new mapping. * @param f1 Transform value of T to a value of B * @param f2 Transform value of B to a value of T */ def transform[B](f1: T => B, f2: B => T): Mapping[B] = WrappedMapping(this, f1, f2) // Internal utilities protected def addPrefix(prefix: String) = { Option(prefix).filterNot(_.isEmpty).map(p => p + Option(key).filterNot(_.isEmpty).map("." + _).getOrElse("")) } protected def applyConstraints(t: T): Either[Seq[FormError], T] = { Right(t).right.flatMap { v => Option(collectErrors(v)).filterNot(_.isEmpty).toLeft(v) } } protected def collectErrors(t: T): Seq[FormError] = { constraints.map(_(t)).collect { case Invalid(errors) => errors.toSeq }.flatten.map(ve => FormError(key, ve.message, ve.args)) } } /** * A mapping wrapping another existing mapping with transformation functions. * * @param wrapped Existing wrapped mapping * @param f1 Transformation function from A to B * @param f2 Transformation function from B to A * @param additionalConstraints Additional constraints of type B */ case class WrappedMapping[A, B](wrapped: Mapping[A], f1: A => B, f2: B => A, val additionalConstraints: Seq[Constraint[B]] = Nil) extends Mapping[B] { /** * The field key. */ val key = wrapped.key /** * Sub-mappings (these can be seen as sub-keys). */ val mappings = wrapped.mappings /** * The Format expected for this field, if it exists. */ override val format = wrapped.format /** * The constraints associated with this field. */ val constraints: Seq[Constraint[B]] = wrapped.constraints.map { constraintOfT => Constraint[B](constraintOfT.name, constraintOfT.args) { b => constraintOfT(f2(b)) } } ++ additionalConstraints /** * Binds this field, i.e. construct a concrete value from submitted data. * * @param data the submitted data * @return either a concrete value of type `B` or a set of errors, if the binding failed */ def bind(data: Map[String, String]): Either[Seq[FormError], B] = { wrapped.bind(data).right.map(t => f1(t)).right.flatMap(applyConstraints) } /** * Unbinds this field, i.e. transforms a concrete value to plain data. * * @param value the value to unbind * @return either the plain data or a set of errors, if the unbinding failed */ def unbind(value: B): (Map[String, String], Seq[FormError]) = { (wrapped.unbind(f2(value))._1, collectErrors(value)) } /** * Constructs a new Mapping based on this one, adding a prefix to the key. * * @param prefix the prefix to add to the key * @return the same mapping, with only the key changed */ def withPrefix(prefix: String): Mapping[B] = { copy(wrapped = wrapped.withPrefix(prefix)) } /** * Constructs a new Mapping based on this one, by adding new constraints. * * For example: * {{{ * import play.api.data._ * import validation.Constraints._ * * Form("phonenumber" -> text.verifying(required) ) * }}} * * @param constraints the constraints to add * @return the new mapping */ def verifying(constraints: Constraint[B]*): Mapping[B] = copy(additionalConstraints = additionalConstraints ++ constraints) } /** * Provides a set of operations related to `RepeatedMapping` values. */ object RepeatedMapping { /** * Computes the available indexes for the given key in this set of data. */ def indexes(key: String, data: Map[String, String]): Seq[Int] = { val KeyPattern = ("^" + java.util.regex.Pattern.quote(key) + """\\[(\\d+)\\].*$""").r data.toSeq.collect { case (KeyPattern(index), _) => index.toInt }.sorted.distinct } } /** * A mapping for repeated elements. * * @param wrapped The wrapped mapping */ case class RepeatedMapping[T](wrapped: Mapping[T], val key: String = "", val constraints: Seq[Constraint[List[T]]] = Nil) extends Mapping[List[T]] { /** * The Format expected for this field, if it exists. */ override val format: Option[(String, Seq[Any])] = wrapped.format /** * Constructs a new Mapping based on this one, by adding new constraints. * * For example: * {{{ * import play.api.data._ * import validation.Constraints._ * * Form("phonenumber" -> text.verifying(required) ) * }}} * * @param constraints the constraints to add * @return the new mapping */ def verifying(addConstraints: Constraint[List[T]]*): Mapping[List[T]] = { this.copy(constraints = constraints ++ addConstraints.toSeq) } /** * Binds this field, i.e. construct a concrete value from submitted data. * * @param data the submitted data * @return either a concrete value of type `List[T]` or a set of errors, if the binding failed */ def bind(data: Map[String, String]): Either[Seq[FormError], List[T]] = { val allErrorsOrItems: Seq[Either[Seq[FormError], T]] = RepeatedMapping.indexes(key, data).map(i => wrapped.withPrefix(key + "[" + i + "]").bind(data)) if (allErrorsOrItems.forall(_.isRight)) { Right(allErrorsOrItems.map(_.right.get).toList).right.flatMap(applyConstraints) } else { Left(allErrorsOrItems.collect { case Left(errors) => errors }.flatten) } } /** * Unbinds this field, i.e. transforms a concrete value to plain data. * * @param value the value to unbind * @return either the plain data or a set of errors, if the unbinding failed */ def unbind(value: List[T]): (Map[String, String], Seq[FormError]) = { val (datas, errors) = value.zipWithIndex.map { case (t, i) => wrapped.withPrefix(key + "[" + i + "]").unbind(t) }.unzip (datas.foldLeft(Map.empty[String, String])(_ ++ _), errors.flatten ++ collectErrors(value)) } /** * Constructs a new Mapping based on this one, adding a prefix to the key. * * @param prefix the prefix to add to the key * @return the same mapping, with only the key changed */ def withPrefix(prefix: String): Mapping[List[T]] = { addPrefix(prefix).map(newKey => this.copy(key = newKey)).getOrElse(this) } /** * Sub-mappings (these can be seen as sub-keys). */ val mappings: Seq[Mapping[_]] = wrapped.mappings } /** * A mapping for optional elements * * @param wrapped the wrapped mapping */ case class OptionalMapping[T](wrapped: Mapping[T], val constraints: Seq[Constraint[Option[T]]] = Nil) extends Mapping[Option[T]] { override val format: Option[(String, Seq[Any])] = wrapped.format /** * The field key. */ val key = wrapped.key /** * Constructs a new Mapping based on this one, by adding new constraints. * * For example: * {{{ * import play.api.data._ * import validation.Constraints._ * * Form("phonenumber" -> text.verifying(required) ) * }}} * * @param constraints the constraints to add * @return the new mapping */ def verifying(addConstraints: Constraint[Option[T]]*): Mapping[Option[T]] = { this.copy(constraints = constraints ++ addConstraints.toSeq) } /** * Binds this field, i.e. constructs a concrete value from submitted data. * * @param data the submitted data * @return either a concrete value of type `T` or a set of error if the binding failed */ def bind(data: Map[String, String]): Either[Seq[FormError], Option[T]] = { data.keys.filter(p => p == key || p.startsWith(key + ".") || p.startsWith(key + "[")).map(k => data.get(k).filterNot(_.isEmpty)).collect { case Some(v) => v }.headOption.map { _ => wrapped.bind(data).right.map(Some(_)) }.getOrElse { Right(None) }.right.flatMap(applyConstraints) } /** * Unbinds this field, i.e. transforms a concrete value to plain data. * * @param value The value to unbind. * @return Either the plain data or a set of error if the unbinding failed. */ def unbind(value: Option[T]): (Map[String, String], Seq[FormError]) = { val errors = collectErrors(value) value.map(wrapped.unbind(_)).map(r => r._1 -> (r._2 ++ errors)).getOrElse(Map.empty -> errors) } /** * Constructs a new Mapping based on this one, adding a prefix to the key. * * @param prefix the prefix to add to the key * @return the same mapping, with only the key changed */ def withPrefix(prefix: String): Mapping[Option[T]] = { copy(wrapped = wrapped.withPrefix(prefix)) } /** Sub-mappings (these can be seen as sub-keys). */ val mappings: Seq[Mapping[_]] = wrapped.mappings } /** * A mapping for a single field. * * @param key the field key * @param constraints the constraints associated with this field. */ case class FieldMapping[T](val key: String = "", val constraints: Seq[Constraint[T]] = Nil)(implicit val binder: Formatter[T]) extends Mapping[T] { /** * The Format expected for this field, if it exists. */ override val format: Option[(String, Seq[Any])] = binder.format /** * Constructs a new Mapping based on this one, by adding new constraints. * * For example: * {{{ * import play.api.data._ * import validation.Constraints._ * * Form("phonenumber" -> text.verifying(required) ) * }}} * * @param constraints the constraints to add * @return the new mapping */ def verifying(addConstraints: Constraint[T]*): Mapping[T] = { this.copy(constraints = constraints ++ addConstraints.toSeq) } /** * Changes the binder used to handle this field. * * @param binder the new binder to use * @return the same mapping with a new binder */ def as(binder: Formatter[T]): Mapping[T] = { this.copy()(binder) } /** * Binds this field, i.e. constructs a concrete value from submitted data. * * @param data the submitted data * @return either a concrete value of type `T` or a set of errors, if binding failed */ def bind(data: Map[String, String]): Either[Seq[FormError], T] = { binder.bind(key, data).right.flatMap { applyConstraints(_) } } /** * Unbinds this field, i.e. transforms a concrete value to plain data. * * @param value the value to unbind * @return either the plain data or a set of errors, if unbinding failed */ def unbind(value: T): (Map[String, String], Seq[FormError]) = { binder.unbind(key, value) -> collectErrors(value) } /** * Constructs a new Mapping based on this one, adding a prefix to the key. * * @param prefix the prefix to add to the key * @return the same mapping, with only the key changed */ def withPrefix(prefix: String): Mapping[T] = { addPrefix(prefix).map(newKey => this.copy(key = newKey)).getOrElse(this) } /** Sub-mappings (these can be seen as sub-keys). */ val mappings: Seq[Mapping[_]] = Seq(this) } /** * Common helper methods for all object mappings - mappings including several fields. */ trait ObjectMapping { /** * Merges the result of two bindings. * * @see bind() */ def merge2(a: Either[Seq[FormError], Seq[Any]], b: Either[Seq[FormError], Seq[Any]]): Either[Seq[FormError], Seq[Any]] = (a, b) match { case (Left(errorsA), Left(errorsB)) => Left(errorsA ++ errorsB) case (Left(errorsA), Right(_)) => Left(errorsA) case (Right(_), Left(errorsB)) => Left(errorsB) case (Right(a), Right(b)) => Right(a ++ b) } /** * Merges the result of multiple bindings. * * @see bind() */ def merge(results: Either[Seq[FormError], Any]*): Either[Seq[FormError], Seq[Any]] = { val all: Seq[Either[Seq[FormError], Seq[Any]]] = results.map(_.right.map(Seq(_))) all.fold(Right(Nil)) { (s, i) => merge2(s, i) } } } /** * Represents an object binding (ie. a binding for several fields). * * This is used for objects with one field. Other versions exist, e.g. `ObjectMapping2`, `ObjectMapping3`, etc. * * @tparam T the complex object type * @tparam A the first field type * @param apply a constructor function that creates a instance of `T` using field `A` * @param fa a mapping for field `A` * @param constraints constraints associated with this mapping */ case class ObjectMapping1[R, A1](apply: Function1[A1, R], unapply: Function1[R, Option[(A1)]], f1: (String, Mapping[A1]), val key: String = "", val constraints: Seq[Constraint[R]] = Nil) extends Mapping[R] with ObjectMapping { val field1 = f1._2.withPrefix(f1._1).withPrefix(key) def bind(data: Map[String, String]) = { merge(field1.bind(data)) match { case Left(errors) => Left(errors) case Right(values) => { applyConstraints(apply( values(0).asInstanceOf[A1])) } } } def unbind(value: R) = { unapply(value).map { fields => val (v1) = fields val a1 = field1.unbind(v1) (a1._1) -> (a1._2) }.getOrElse(Map.empty -> Seq(FormError(key, "unbind.failed"))) } def withPrefix(prefix: String) = addPrefix(prefix).map(newKey => this.copy(key = newKey)).getOrElse(this) def verifying(addConstraints: Constraint[R]*) = { this.copy(constraints = constraints ++ addConstraints.toSeq) } val mappings = Seq(this) ++ field1.mappings }
vangav/vos_backend
play-2.2.6/framework/src/play/src/main/scala/play/api/data/Form.scala
Scala
mit
28,025
/*********************************************************************** * Copyright (c) 2013-2017 Commonwealth Computer Research, Inc. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Apache License, Version 2.0 * which accompanies this distribution and is available at * http://www.opensource.org/licenses/apache2.0.php. ***********************************************************************/ package org.locationtech.geomesa.accumulo.data import java.util import com.typesafe.scalalogging.LazyLogging import org.apache.accumulo.core.client.mock.MockInstance import org.apache.accumulo.core.client.security.tokens.PasswordToken import org.geotools.data.DataStoreFinder import org.geotools.data.simple.SimpleFeatureSource import org.geotools.filter.text.ecql.ECQL import org.junit.runner.RunWith import org.locationtech.geomesa.accumulo.index.{RecordIndex, XZ2Index} import org.locationtech.geomesa.accumulo.iterators.TestData import org.locationtech.geomesa.accumulo.iterators.TestData._ import org.locationtech.geomesa.utils.collection.SelfClosingIterator import org.opengis.filter._ import org.specs2.mutable.Specification import org.specs2.runner.JUnitRunner import scala.collection.JavaConversions._ @RunWith(classOf[JUnitRunner]) class TableSharingTest extends Specification with LazyLogging { sequential val tableName = "sharingTest" val ds = { import AccumuloDataStoreParams._ DataStoreFinder.getDataStore(Map( InstanceIdParam.key -> "mycloud", ZookeepersParam.key -> "zoo1:2181,zoo2:2181,zoo3:2181", UserParam.key -> "myuser", PasswordParam.key -> "mypassword", AuthsParam.key -> "A,B,C", CatalogParam.key -> tableName, MockParam.key -> "true")).asInstanceOf[AccumuloDataStore] } // Check existence of tables? val mockInstance = new MockInstance("mycloud") val c = mockInstance.getConnector("myuser", new PasswordToken("mypassword".getBytes("UTF8"))) // Three datasets. Each with a common field: attr2? val sft1 = TestData.getFeatureType("1", tableSharing = true) val sft2 = TestData.getFeatureType("2", tableSharing = true) val sft3 = TestData.getFeatureType("3", tableSharing = false) // Load up data val mediumData1 = mediumData.map(createSF(_, sft1)) val mediumData2 = mediumData.map(createSF(_, sft2)) val mediumData3 = mediumData.map(createSF(_, sft3)) val fs1 = getFeatureStore(ds, sft1, mediumData1) val fs2 = getFeatureStore(ds, sft2, mediumData2) val fs3 = getFeatureStore(ds, sft3, mediumData3) // TODO: Add tests to check if the correct tables exist and if the metadata is all correct. // Check the sft's indexschema val retrievedSFT1 = ds.getSchema(sft1.getTypeName) val list2: util.SortedSet[String] = c.tableOperations().list // At least three queries: st, attr, id. def filterCount(f: Filter) = mediumData1.count(f.evaluate) // note: size returns an estimated amount, instead we need to actually count the features def queryCount(f: Filter, fs: SimpleFeatureSource) = SelfClosingIterator(fs.getFeatures(f)).length val id = "IN(100001, 100011)" val st = "INTERSECTS(geom, POLYGON ((41 28, 42 28, 42 29, 41 29, 41 28)))" val at = "attr2 = '2nd100001'" // This function compares the number of returned results. def compare(fs: String, step: Int, featureStore2: SimpleFeatureSource = fs2) = { val f = ECQL.toFilter(fs) val fc = filterCount(f) val q1 = queryCount(f, fs1) val q3 = queryCount(f, fs3) step match { case 1 => check(q3) case 2 => case 3 => check(0) // Feature source #2 should be empty case 4 => check(q3) } // Checks feature source #2's query count against the input. def check(count: Int) = { val q2 = queryCount(f, featureStore2) s"fs2 must get $count results from filter $fs" >> { q2 mustEqual count } } s"fc and fs1 get the same results from filter $fs" >> { fc mustEqual q1 } s"fs1 and fs3 get the same results from filter $fs" >> { q1 mustEqual q3 } } "all three queries" should { "work for all three features (after setup) " >> { compare(id, 1) compare(st, 1) compare(at, 1) } } // Delete one shared table feature to ensure that deleteSchema works. s"Removing ${sft2.getTypeName}" should { val sft2Scanner = ds.connector.createScanner(XZ2Index.getTableName(sft2.getTypeName, ds), ds.auths) val sft2RecordScanner = ds.connector.createScanner(RecordIndex.getTableName(sft2.getTypeName, ds), ds.auths) ds.removeSchema(sft2.getTypeName) // TODO: Add tests to measure what tables exist, etc. // TODO: test ds.getNames. // TODO: Observe that this kind of collection is empty. sft2Scanner.setRange(new org.apache.accumulo.core.data.Range()) sft2Scanner.iterator .map(e => s"ST Key: ${e.getKey}") .filter(_.contains("feature2")) .take(10) .foreach(s => logger.debug(s)) sft2RecordScanner.setRange(new org.apache.accumulo.core.data.Range()) sft2RecordScanner.iterator.take(10).foreach { e => logger.debug(s"Record Key: ${e.getKey}") } s"result in FeatureStore named ${sft2.getTypeName} being gone" >> { ds.getNames.contains(sft2.getTypeName) must beFalse } } // Query again. "all three queries" should { "work for all three features (after delete) " >> { compare(id, 2) compare(st, 2) compare(at, 2) } } // Query again after recreating just the SFT for feature source 2. "all three queries" should { ds.createSchema(sft2) val newfs2 = ds.getFeatureSource(sft2.getTypeName) "work for all three features (after recreating the schema for SFT2) " >> { compare(id, 3, newfs2) compare(st, 3, newfs2) compare(at, 3, newfs2) } } // Query again after reingesting a feature source #2. "all three queries" should { val fs2ReIngested = getFeatureStore(ds, sft2, mediumData2) "work for all three features (after re-ingest) " >> { compare(id, 4, featureStore2 = fs2ReIngested) compare(st, 4, featureStore2 = fs2ReIngested) compare(at, 4, featureStore2 = fs2ReIngested) } } }
ronq/geomesa
geomesa-accumulo/geomesa-accumulo-datastore/src/test/scala/org/locationtech/geomesa/accumulo/data/TableSharingTest.scala
Scala
apache-2.0
6,253
package utils.testhelpers import models.storage.event.EventType import models.storage.event.EventTypeRegistry.TopLevelEvents._ import models.storage.event.control.Control import models.storage.event.control.ControlAttributes.{ ControlAlcohol, ControlCleaning, ControlPest, ControlTemperature } import models.storage.event.envreq.EnvRequirement import models.storage.event.move.{MoveNode, MoveObject} import models.storage.event.observation.Observation import models.storage.event.observation.ObservationAttributes._ import models.storage.{FromToDouble, Interval, LifeCycle} import no.uio.musit.models.{ObjectTypes, ObjectUUID, StorageNodeId} import org.joda.time.DateTime trait EventGenerators { self: BaseDummyData => val defaultObjectUUID = ObjectUUID.generate() val defaultNodeId = StorageNodeId.generate() val firstNodeId = StorageNodeId.generate() val secondNodeId = StorageNodeId.generate() def createMoveObject( objectId: Option[ObjectUUID] = Some(defaultObjectUUID), from: Option[StorageNodeId], to: StorageNodeId ): MoveObject = { MoveObject( id = None, doneDate = DateTime.now.minusDays(1), registeredBy = Some(defaultActorId), registeredDate = Some(DateTime.now), doneBy = Some(defaultActorId), affectedThing = objectId, eventType = EventType.fromEventTypeId(MoveObjectType.id), objectType = ObjectTypes.CollectionObject, from = from, to = to ) } def createMoveNode( nodeId: Option[StorageNodeId] = Some(defaultNodeId), from: Option[StorageNodeId], to: StorageNodeId ): MoveNode = { MoveNode( id = None, doneDate = DateTime.now.minusDays(1), registeredBy = Some(defaultActorId), registeredDate = Some(DateTime.now), doneBy = Some(defaultActorId), affectedThing = nodeId, eventType = EventType.fromEventTypeId(MoveNodeType.id), from = from, to = to ) } def createEnvRequirement(affectedNodeId: Option[StorageNodeId] = None) = { EnvRequirement( id = None, doneDate = DateTime.now.minusDays(1), note = Some("This is an envreq note"), registeredBy = Some(defaultActorId), registeredDate = Some(DateTime.now), doneBy = Some(defaultActorId), affectedThing = affectedNodeId, eventType = EventType.fromEventTypeId(EnvRequirementEventType.id), temperature = Some(Interval(20, Some(5))), airHumidity = Some(Interval(60.0, Some(10))), hypoxicAir = Some(Interval(0, Some(15))), cleaning = Some("keep it clean, dude"), light = Some("dim") ) } def createControl(affectedNodeId: Option[StorageNodeId] = None) = { Control( id = None, doneDate = DateTime.now.minusDays(1), registeredBy = Some(defaultActorId), registeredDate = Some(DateTime.now), doneBy = Some(defaultActorId), affectedThing = affectedNodeId, eventType = EventType.fromEventTypeId(ControlEventType.id), temperature = Some(createTemperatureControl()), alcohol = Some(createAlcoholControl()), cleaning = Some(createCleaningControl(ok = true)), pest = Some(createPestControl()) ) } def createObservation(affectedNodeId: Option[StorageNodeId] = None) = { Observation( id = None, doneDate = DateTime.now.minusDays(1), registeredBy = Some(defaultActorId), registeredDate = Some(DateTime.now), doneBy = Some(defaultActorId), affectedThing = affectedNodeId, eventType = EventType.fromEventTypeId(ObservationEventType.id), alcohol = Some(createAlcoholObservation), cleaning = Some(createCleaningObservation), gas = Some(createGasObservation), hypoxicAir = Some(createHypoxicObservation), lightingCondition = Some(createLightingObservation), mold = Some(createMoldObservation), pest = Some(createPestObservation), relativeHumidity = Some(createHumidityObservation), temperature = Some(createTemperatureObservation), theftProtection = Some(createTheftObservation), fireProtection = Some(createFireObservation), perimeterSecurity = Some(createPerimeterObservation), waterDamageAssessment = Some(createWaterDmgObservation) ) } def createTemperatureControl(ok: Boolean = false): ControlTemperature = { ControlTemperature(ok, if (ok) None else Some(createTemperatureObservation)) } def createTemperatureObservation: ObservationTemperature = { ObservationTemperature( note = Some("This is an observation temperature note"), range = FromToDouble(Some(12.32), Some(24.12)) ) } def createAlcoholControl(ok: Boolean = false): ControlAlcohol = ControlAlcohol(ok, if (ok) None else Some(createAlcoholObservation)) def createAlcoholObservation: ObservationAlcohol = ObservationAlcohol( note = Some("This is an observation alcohol note"), condition = Some("pretty strong"), volume = Some(92.30) ) def createCleaningControl(ok: Boolean = false): ControlCleaning = ControlCleaning(ok, if (ok) None else Some(createCleaningObservation)) def createCleaningObservation: ObservationCleaning = ObservationCleaning( note = Some("This is an observation cleaning note"), cleaning = Some("Pretty dirty stuff") ) def createGasObservation: ObservationGas = ObservationGas( note = Some("This is an observation gas note"), gas = Some("Smells like methane") ) def createHypoxicObservation: ObservationHypoxicAir = ObservationHypoxicAir( note = Some("This is an observation hypoxic air note"), range = FromToDouble(Some(11.11), Some(12.12)) ) def createLightingObservation: ObservationLightingCondition = ObservationLightingCondition( note = Some("This is an observation lighting condition note"), lightingCondition = Some("Quite dim") ) def createMoldObservation: ObservationMold = ObservationMold( note = Some("This is an observation mold note"), mold = Some("Mold is a fun guy") ) def createHumidityObservation: ObservationRelativeHumidity = ObservationRelativeHumidity( note = Some("This is an observation humidity note"), range = FromToDouble(Some(70.0), Some(75.5)) ) def createTheftObservation: ObservationTheftProtection = ObservationTheftProtection( note = Some("This is an observation theft note"), theftProtection = Some("They stole all our stuff!!") ) def createFireObservation: ObservationFireProtection = ObservationFireProtection( note = Some("This is an observation fire note"), fireProtection = Some("Fire extinguisher is almost empty") ) def createPerimeterObservation: ObservationPerimeterSecurity = ObservationPerimeterSecurity( note = Some("This is an observation perimeter note"), perimeterSecurity = Some("Someone has cut a hole in the fence") ) def createWaterDmgObservation: ObservationWaterDamageAssessment = ObservationWaterDamageAssessment( note = Some("This is an observation water damage note"), waterDamageAssessment = Some("The cellar is flooded") ) def createPestControl(ok: Boolean = false): ControlPest = ControlPest(ok, if (ok) None else Some(createPestObservation)) def createPestObservation: ObservationPest = ObservationPest( note = Some("This is an observation pest note"), identification = Some("termintes"), lifecycles = Seq( LifeCycle( stage = Some("mature colony"), quantity = Some(100) ), LifeCycle( stage = Some("new colony"), quantity = Some(4) ) ) ) }
kpmeen/musit
service_storagefacility/test/utils/testhelpers/EventGenerators.scala
Scala
gpl-2.0
7,759
package breeze /** * * @author dlwh */ package object optimize { def minimize[Objective, Vector](fn: Objective, init: Vector, options: OptimizationOption*)(implicit optimization: OptimizationPackage[Objective,Vector]) = { optimization.minimize(fn, init, options:_*) } }
wavelets/breeze
src/main/scala/breeze/optimize/package.scala
Scala
apache-2.0
348
package $package$ import android.os.Bundle import android.app.Activity import android.widget.TextView class MainActivity extends Activity with TypedActivity { override def onCreate(bundle: Bundle) { super.onCreate(bundle) setContentView(R.layout.main) findView(TR.textview).setText("hello, world!") } }
ikuo/android-app-scala.g8
src/main/g8/src/main/scala/$package$/$mainActivity$.scala
Scala
mit
321
/** * Licensed to Big Data Genomics (BDG) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The BDG licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.bdgenomics.adam.io import org.bdgenomics.adam.util.ADAMFunSuite import org.apache.spark.rdd.RDD import org.apache.hadoop.io.Text class InterleavedFastqInputFormatSuite extends ADAMFunSuite { (1 to 4) foreach { testNumber => val inputName = "interleaved_fastq_sample%d.ifq".format(testNumber) val expectedOutputName = inputName + ".output" val expectedOutputPath = ClassLoader.getSystemClassLoader.getResource(expectedOutputName).getFile val expectedOutputData = scala.io.Source.fromFile(expectedOutputPath).mkString sparkTest("interleaved FASTQ hadoop reader: %s->%s".format(inputName, expectedOutputName)) { def ifq_reader: RDD[(Void, Text)] = { val path = ClassLoader.getSystemClassLoader.getResource(inputName).getFile sc.newAPIHadoopFile(path, classOf[InterleavedFastqInputFormat], classOf[Void], classOf[Text]) } val ifq_reads = ifq_reader.collect() val testOutput = new StringBuilder() ifq_reads.foreach(pair => { testOutput.append(">>>interleaved fastq record start>>>\n") testOutput.append(pair._2) testOutput.append("<<<interleaved fastq record end<<<\n") }) assert(testOutput.toString() == expectedOutputData) } } }
VinACE/adam
adam-core/src/test/scala/org/bdgenomics/adam/io/InterleavedFastqInputFormatSuite.scala
Scala
apache-2.0
2,080
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.carbondata.spark.util import java.{lang, util} import java.io.IOException import java.lang.ref.Reference import java.text.SimpleDateFormat import java.util.Date import scala.collection.mutable import scala.util.Try import com.univocity.parsers.common.TextParsingException import org.apache.spark.SparkException import org.apache.spark.sql._ import org.apache.spark.sql.catalyst.catalog.CatalogTablePartition import org.apache.spark.sql.catalyst.util.DateTimeUtils import org.apache.spark.sql.execution.command.{DataTypeInfo, UpdateTableModel} import org.apache.spark.sql.types._ import org.apache.spark.util.CarbonReflectionUtils import org.apache.carbondata.common.exceptions.MetadataProcessException import org.apache.carbondata.common.exceptions.sql.MalformedCarbonCommandException import org.apache.carbondata.common.logging.{LogService, LogServiceFactory} import org.apache.carbondata.core.cache.{Cache, CacheProvider, CacheType} import org.apache.carbondata.core.cache.dictionary.{Dictionary, DictionaryColumnUniqueIdentifier} import org.apache.carbondata.core.constants.CarbonCommonConstants import org.apache.carbondata.core.keygenerator.directdictionary.DirectDictionaryKeyGeneratorFactory import org.apache.carbondata.core.metadata.ColumnIdentifier import org.apache.carbondata.core.metadata.datatype.{DataType => CarbonDataType, DataTypes => CarbonDataTypes, DecimalType => CarbonDecimalType, StructField => CarbonStructField} import org.apache.carbondata.core.metadata.encoder.Encoding import org.apache.carbondata.core.metadata.schema.table.{CarbonTable, DataMapSchema} import org.apache.carbondata.core.metadata.schema.table.column.{CarbonColumn, ColumnSchema} import org.apache.carbondata.core.util.DataTypeUtil import org.apache.carbondata.processing.exception.DataLoadingException import org.apache.carbondata.processing.loading.FailureCauses import org.apache.carbondata.processing.loading.exception.CarbonDataLoadingException import org.apache.carbondata.processing.util.CarbonDataProcessorUtil import org.apache.carbondata.streaming.parser.FieldConverter object CarbonScalaUtil { val LOGGER = LogServiceFactory.getLogService(this.getClass.getCanonicalName) // TODO: move this to spark module def convertSparkToCarbonDataType(dataType: DataType): CarbonDataType = { dataType match { case StringType => CarbonDataTypes.STRING case ShortType => CarbonDataTypes.SHORT case IntegerType => CarbonDataTypes.INT case LongType => CarbonDataTypes.LONG case DoubleType => CarbonDataTypes.DOUBLE case FloatType => CarbonDataTypes.FLOAT case DateType => CarbonDataTypes.DATE case BooleanType => CarbonDataTypes.BOOLEAN case TimestampType => CarbonDataTypes.TIMESTAMP case ArrayType(elementType, _) => CarbonDataTypes.createArrayType(CarbonScalaUtil.convertSparkToCarbonDataType(elementType)) case StructType(fields) => val carbonFields = new util.ArrayList[CarbonStructField] fields.map { field => carbonFields.add( new CarbonStructField( field.name, CarbonScalaUtil.convertSparkToCarbonDataType(field.dataType))) } CarbonDataTypes.createStructType(carbonFields) case NullType => CarbonDataTypes.NULL case decimal: DecimalType => CarbonDataTypes.createDecimalType(decimal.precision, decimal.scale) case _ => throw new UnsupportedOperationException("getting " + dataType + " from spark") } } def convertCarbonToSparkDataType(dataType: CarbonDataType): types.DataType = { if (CarbonDataTypes.isDecimal(dataType)) { DecimalType(dataType.asInstanceOf[CarbonDecimalType].getPrecision, dataType.asInstanceOf[CarbonDecimalType].getScale) } else { dataType match { case CarbonDataTypes.STRING => StringType case CarbonDataTypes.SHORT => ShortType case CarbonDataTypes.INT => IntegerType case CarbonDataTypes.LONG => LongType case CarbonDataTypes.DOUBLE => DoubleType case CarbonDataTypes.BOOLEAN => BooleanType case CarbonDataTypes.TIMESTAMP => TimestampType case CarbonDataTypes.DATE => DateType case CarbonDataTypes.VARCHAR => StringType } } } def getString(value: Any, serializationNullFormat: String, delimiterLevel1: String, delimiterLevel2: String, timeStampFormat: SimpleDateFormat, dateFormat: SimpleDateFormat, isVarcharType: Boolean = false, level: Int = 1): String = { FieldConverter.objectToString(value, serializationNullFormat, delimiterLevel1, delimiterLevel2, timeStampFormat, dateFormat, isVarcharType = isVarcharType, level) } /** * Converts incoming value to String after converting data as per the data type. * @param value Input value to convert * @param dataType Datatype to convert and then convert to String * @param timeStampFormat Timestamp format to convert in case of timestamp datatypes * @param dateFormat DataFormat to convert in case of DateType datatype * @return converted String */ def convertToDateAndTimeFormats( value: String, dataType: DataType, timeStampFormat: SimpleDateFormat, dateFormat: SimpleDateFormat): String = { val defaultValue = value != null && value.equalsIgnoreCase(hivedefaultpartition) try { dataType match { case TimestampType if timeStampFormat != null => if (defaultValue) { timeStampFormat.format(new Date()) } else { timeStampFormat.format(DateTimeUtils.stringToTime(value)) } case DateType if dateFormat != null => if (defaultValue) { dateFormat.format(new Date()) } else { dateFormat.format(DateTimeUtils.stringToTime(value)) } case _ => val convertedValue = DataTypeUtil .getDataBasedOnDataType(value, convertSparkToCarbonDataType(dataType)) if (convertedValue == null) { if (defaultValue) { return dataType match { case BooleanType => "false" case _ => "0" } } throw new MalformedCarbonCommandException( s"Value $value with datatype $dataType on static partition is not correct") } value } } catch { case e: Exception => throw new MalformedCarbonCommandException( s"Value $value with datatype $dataType on static partition is not correct") } } /** * Converts incoming value to String after converting data as per the data type. * @param value Input value to convert * @param column column which it value belongs to * @return converted String */ def convertToCarbonFormat( value: String, column: CarbonColumn, forwardDictionaryCache: Cache[DictionaryColumnUniqueIdentifier, Dictionary], table: CarbonTable): String = { if (column.hasEncoding(Encoding.DICTIONARY)) { if (column.hasEncoding(Encoding.DIRECT_DICTIONARY)) { if (column.getDataType.equals(CarbonDataTypes.TIMESTAMP)) { val time = DirectDictionaryKeyGeneratorFactory.getDirectDictionaryGenerator( column.getDataType, CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT ).getValueFromSurrogate(value.toInt) if (time == null) { return null } return DateTimeUtils.timestampToString(time.toString.toLong * 1000) } else if (column.getDataType.equals(CarbonDataTypes.DATE)) { val date = DirectDictionaryKeyGeneratorFactory.getDirectDictionaryGenerator( column.getDataType, CarbonCommonConstants.CARBON_DATE_DEFAULT_FORMAT ).getValueFromSurrogate(value.toInt) if (date == null) { return null } return DateTimeUtils.dateToString(date.toString.toInt) } } val dictionaryPath = table.getTableInfo.getFactTable.getTableProperties.get( CarbonCommonConstants.DICTIONARY_PATH) val dictionaryColumnUniqueIdentifier = new DictionaryColumnUniqueIdentifier( table.getAbsoluteTableIdentifier, column.getColumnIdentifier, column.getDataType, dictionaryPath) return forwardDictionaryCache.get( dictionaryColumnUniqueIdentifier).getDictionaryValueForKey(value.toInt) } try { column.getDataType match { case CarbonDataTypes.TIMESTAMP => DateTimeUtils.timestampToString(value.toLong * 1000) case CarbonDataTypes.DATE => DateTimeUtils.dateToString(DateTimeUtils.millisToDays(value.toLong)) case _ => value } } catch { case e: Exception => value } } /** * Converts incoming value to String after converting data as per the data type. * @param value Input value to convert * @param column column which it value belongs to * @return converted String */ def convertStaticPartitions( value: String, column: ColumnSchema, table: CarbonTable): String = { try { if (column.hasEncoding(Encoding.DIRECT_DICTIONARY)) { if (column.getDataType.equals(CarbonDataTypes.TIMESTAMP)) { return DirectDictionaryKeyGeneratorFactory.getDirectDictionaryGenerator( column.getDataType, CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT ).generateDirectSurrogateKey(value).toString } else if (column.getDataType.equals(CarbonDataTypes.DATE)) { return DirectDictionaryKeyGeneratorFactory.getDirectDictionaryGenerator( column.getDataType, CarbonCommonConstants.CARBON_DATE_DEFAULT_FORMAT ).generateDirectSurrogateKey(value).toString } } else if (column.hasEncoding(Encoding.DICTIONARY)) { val cacheProvider: CacheProvider = CacheProvider.getInstance val reverseCache: Cache[DictionaryColumnUniqueIdentifier, Dictionary] = cacheProvider.createCache(CacheType.REVERSE_DICTIONARY) val dictionaryPath = table.getTableInfo.getFactTable.getTableProperties.get( CarbonCommonConstants.DICTIONARY_PATH) val dictionaryColumnUniqueIdentifier = new DictionaryColumnUniqueIdentifier( table.getAbsoluteTableIdentifier, new ColumnIdentifier( column.getColumnUniqueId, column.getColumnProperties, column.getDataType), column.getDataType, dictionaryPath) return reverseCache.get(dictionaryColumnUniqueIdentifier).getSurrogateKey(value).toString } column.getDataType match { case CarbonDataTypes.TIMESTAMP => DateTimeUtils.stringToTime(value).getTime.toString case CarbonDataTypes.DATE => DateTimeUtils.stringToTime(value).getTime.toString case _ => value } } catch { case e: Exception => value } } private val hivedefaultpartition = "__HIVE_DEFAULT_PARTITION__" /** * Update partition values as per the right date and time format * @return updated partition spec */ def updatePartitions(partitionSpec: mutable.LinkedHashMap[String, String], table: CarbonTable): mutable.LinkedHashMap[String, String] = { val cacheProvider: CacheProvider = CacheProvider.getInstance val forwardDictionaryCache: Cache[DictionaryColumnUniqueIdentifier, Dictionary] = cacheProvider.createCache(CacheType.FORWARD_DICTIONARY) partitionSpec.map { case (col, pvalue) => // replace special string with empty value. val value = if (pvalue == null) { hivedefaultpartition } else if (pvalue.equals(CarbonCommonConstants.MEMBER_DEFAULT_VAL)) { "" } else { pvalue } val carbonColumn = table.getColumnByName(table.getTableName, col.toLowerCase) val dataType = CarbonScalaUtil.convertCarbonToSparkDataType(carbonColumn.getDataType) try { if (value.equals(hivedefaultpartition)) { (col, value) } else { val convertedString = CarbonScalaUtil.convertToCarbonFormat( value, carbonColumn, forwardDictionaryCache, table) if (convertedString == null) { (col, hivedefaultpartition) } else { (col, convertedString) } } } catch { case e: Exception => (col, value) } } } /** * Update partition values as per the right date and time format */ def updatePartitions( parts: Seq[CatalogTablePartition], table: CarbonTable): Seq[CatalogTablePartition] = { parts.map { f => val specLinkedMap: mutable.LinkedHashMap[String, String] = mutable.LinkedHashMap .empty[String, String] f.spec.foreach(fSpec => specLinkedMap.put(fSpec._1, fSpec._2)) val changedSpec = updatePartitions( specLinkedMap, table).toMap f.copy(spec = changedSpec) }.groupBy(p => p.spec).map(f => f._2.head).toSeq // Avoid duplicates by do groupby } /** * returns all fields except tupleId field as it is not required in the value * * @param fields * @return */ def getAllFieldsWithoutTupleIdField(fields: Array[StructField]): Seq[Column] = { // getting all fields except tupleId field as it is not required in the value val otherFields = fields.toSeq .filter(field => !field.name .equalsIgnoreCase(CarbonCommonConstants.CARBON_IMPLICIT_COLUMN_TUPLEID)) .map(field => { new Column(field.name) }) otherFields } /** * If the table is from an old store then the table parameters are in lowercase. In the current * code we are reading the parameters as camel case. * This method will convert all the schema parts to camel case * * @param parameters * @return */ def getDeserializedParameters(parameters: Map[String, String]): Map[String, String] = { val keyParts = parameters.getOrElse("spark.sql.sources.options.keys.numparts", "0").toInt if (keyParts == 0) { parameters } else { val keyStr = 0 until keyParts map { i => parameters(s"spark.sql.sources.options.keys.part.$i") } val finalProperties = scala.collection.mutable.Map.empty[String, String] keyStr foreach { key => var value = "" for (numValues <- 0 until parameters(key.toLowerCase() + ".numparts").toInt) { value += parameters(key.toLowerCase() + ".part" + numValues) } finalProperties.put(key, value) } // Database name would be extracted from the parameter first. There can be a scenario where // the dbName is not written to the old schema therefore to be on a safer side we are // extracting dbName from tableName if it exists. val dbAndTableName = finalProperties("tableName").split(".") if (dbAndTableName.length > 1) { finalProperties.put("dbName", dbAndTableName(0)) finalProperties.put("tableName", dbAndTableName(1)) } else { finalProperties.put("tableName", dbAndTableName(0)) } // Overriding the tablePath in case tablepath already exists. This will happen when old // table schema is updated by the new code then both `path` and `tablepath` will exist. In // this case use tablepath parameters.get("tablepath") match { case Some(tablePath) => finalProperties.put("tablePath", tablePath) case None => } finalProperties.toMap } } /** * Retrieve error message from exception */ def retrieveAndLogErrorMsg(ex: Throwable, logger: LogService): (String, String) = { var errorMessage = "DataLoad failure" var executorMessage = "" if (ex != null) { ex match { case sparkException: SparkException => if (sparkException.getCause.isInstanceOf[IOException]) { if (sparkException.getCause.getCause.isInstanceOf[MetadataProcessException]) { executorMessage = sparkException.getCause.getCause.getMessage errorMessage = errorMessage + ": " + executorMessage } else { executorMessage = sparkException.getCause.getMessage errorMessage = errorMessage + ": " + executorMessage } } else if (sparkException.getCause.isInstanceOf[DataLoadingException] || sparkException.getCause.isInstanceOf[CarbonDataLoadingException]) { executorMessage = sparkException.getCause.getMessage errorMessage = errorMessage + ": " + executorMessage } else if (sparkException.getCause.isInstanceOf[TextParsingException]) { executorMessage = CarbonDataProcessorUtil .trimErrorMessage(sparkException.getCause.getMessage) errorMessage = errorMessage + " : " + executorMessage } else if (sparkException.getCause.isInstanceOf[SparkException]) { val (executorMsgLocal, errorMsgLocal) = retrieveAndLogErrorMsg(sparkException.getCause, logger) executorMessage = executorMsgLocal errorMessage = errorMsgLocal } case aex: AnalysisException => logger.error(aex.getMessage()) throw aex case _ => if (ex.getCause != null) { executorMessage = ex.getCause.getMessage errorMessage = errorMessage + ": " + executorMessage } } } (executorMessage, errorMessage) } /** * Update error inside update model */ def updateErrorInUpdateModel(updateModel: UpdateTableModel, executorMessage: String): Unit = { if (updateModel.executorErrors.failureCauses == FailureCauses.NONE) { updateModel.executorErrors.failureCauses = FailureCauses.EXECUTOR_FAILURE if (null != executorMessage && !executorMessage.isEmpty) { updateModel.executorErrors.errorMsg = executorMessage } else { updateModel.executorErrors.errorMsg = "Update failed as the data load has failed." } } } /** * Generate unique number to be used as partition number of file name */ def generateUniqueNumber(taskId: Int, segmentId: String, partitionNumber: lang.Long): String = { String.valueOf(Math.pow(10, 2).toInt + segmentId.toInt) + String.valueOf(Math.pow(10, 5).toInt + taskId) + String.valueOf(partitionNumber + Math.pow(10, 5).toInt) } /** * Use reflection to clean the parser objects which are set in thread local to avoid memory issue */ def cleanParserThreadLocals(): Unit = { try { // Get a reference to the thread locals table of the current thread val thread = Thread.currentThread val threadLocalsField = classOf[Thread].getDeclaredField("inheritableThreadLocals") threadLocalsField.setAccessible(true) val threadLocalTable = threadLocalsField.get(thread) // Get a reference to the array holding the thread local variables inside the // ThreadLocalMap of the current thread val threadLocalMapClass = Class.forName("java.lang.ThreadLocal$ThreadLocalMap") val tableField = threadLocalMapClass.getDeclaredField("table") tableField.setAccessible(true) val table = tableField.get(threadLocalTable) // The key to the ThreadLocalMap is a WeakReference object. The referent field of this object // is a reference to the actual ThreadLocal variable val referentField = classOf[Reference[Thread]].getDeclaredField("referent") referentField.setAccessible(true) var i = 0 while (i < lang.reflect.Array.getLength(table)) { // Each entry in the table array of ThreadLocalMap is an Entry object val entry = lang.reflect.Array.get(table, i) if (entry != null) { // Get a reference to the thread local object and remove it from the table val threadLocal = referentField.get(entry).asInstanceOf[ThreadLocal[_]] if (threadLocal != null && threadLocal.getClass.getName.startsWith("scala.util.DynamicVariable")) { threadLocal.remove() } } i += 1 } table } catch { case e: Exception => // ignore it } } /** * Create datamap provider using class name */ def createDataMapProvider( className: String, sparkSession: SparkSession, table: CarbonTable, schema: DataMapSchema): Object = { CarbonReflectionUtils.createObject( className, table, sparkSession, schema)._1.asInstanceOf[Object] } /** * this method validates the local dictionary columns configurations * * @param tableProperties * @param localDictColumns */ def validateLocalDictionaryColumns(tableProperties: mutable.Map[String, String], localDictColumns: Seq[String]): Unit = { var dictIncludeColumns: Seq[String] = Seq[String]() // check if the duplicate columns are specified in table schema if (localDictColumns.distinct.lengthCompare(localDictColumns.size) != 0) { val duplicateColumns = localDictColumns .diff(localDictColumns.distinct).distinct val errMsg = "LOCAL_DICTIONARY_INCLUDE/LOCAL_DICTIONARY_EXCLUDE contains Duplicate Columns: " + duplicateColumns.mkString(",") + ". Please check the DDL." throw new MalformedCarbonCommandException(errMsg) } // check if the same column is present in both dictionary include and local dictionary columns // configuration if (tableProperties.get(CarbonCommonConstants.DICTIONARY_INCLUDE).isDefined) { dictIncludeColumns = tableProperties(CarbonCommonConstants.DICTIONARY_INCLUDE).split(",").map(_.trim) localDictColumns.foreach { distCol => if (dictIncludeColumns.exists(x => x.equalsIgnoreCase(distCol.trim))) { val commonColumn = (dictIncludeColumns ++ localDictColumns) .diff((dictIncludeColumns ++ localDictColumns).distinct).distinct val errormsg = "LOCAL_DICTIONARY_INCLUDE/LOCAL_DICTIONARY_EXCLUDE column: " + commonColumn.mkString(",") + " specified in Dictionary include. Local Dictionary will not be " + "generated for Dictionary include columns. Please check the DDL." throw new MalformedCarbonCommandException(errormsg) } } } } /** * this method validates the local dictionary enable property * * @param localDictionaryEnable * @return */ def validateLocalDictionaryEnable(localDictionaryEnable: String): Boolean = { Try(localDictionaryEnable.toBoolean) match { case scala.util.Success(value) => true case scala.util.Failure(ex) => false } } /** * this method validates the local dictionary threshold property * * @param localDictionaryThreshold * @return */ def validateLocalDictionaryThreshold(localDictionaryThreshold: String): Boolean = { // if any invalid value is configured for LOCAL_DICTIONARY_THRESHOLD, then default value // will be // considered which is 1000 Try(localDictionaryThreshold.toInt) match { case scala.util.Success(value) => if (value < CarbonCommonConstants.LOCAL_DICTIONARY_MIN || value > CarbonCommonConstants.LOCAL_DICTIONARY_MAX) { false } else { true } case scala.util.Failure(ex) => false } } /** * This method validate if both local dictionary include and exclude contains same column * * @param tableProperties */ def validateDuplicateLocalDictIncludeExcludeColmns(tableProperties: mutable.Map[String, String]): Unit = { val isLocalDictIncludeDefined = tableProperties .get(CarbonCommonConstants.LOCAL_DICTIONARY_INCLUDE) .isDefined val isLocalDictExcludeDefined = tableProperties .get(CarbonCommonConstants.LOCAL_DICTIONARY_EXCLUDE) .isDefined if (isLocalDictIncludeDefined && isLocalDictExcludeDefined) { val localDictIncludeCols = tableProperties(CarbonCommonConstants.LOCAL_DICTIONARY_INCLUDE) .split(",").map(_.trim) val localDictExcludeCols = tableProperties(CarbonCommonConstants.LOCAL_DICTIONARY_EXCLUDE) .split(",").map(_.trim) localDictIncludeCols.foreach { distCol => if (localDictExcludeCols.exists(x => x.equalsIgnoreCase(distCol.trim))) { val duplicateColumns = (localDictIncludeCols ++ localDictExcludeCols) .diff((localDictIncludeCols ++ localDictExcludeCols).distinct).distinct val errMsg = "Column ambiguity as duplicate column(s):" + duplicateColumns.mkString(",") + " is present in LOCAL_DICTIONARY_INCLUDE " + "and LOCAL_DICTIONARY_EXCLUDE. Duplicate columns are not allowed." throw new MalformedCarbonCommandException(errMsg) } } } } def isStringDataType(dataType: DataType): Boolean = { dataType == StringType } }
jatin9896/incubator-carbondata
integration/spark-common/src/main/scala/org/apache/carbondata/spark/util/CarbonScalaUtil.scala
Scala
apache-2.0
26,094
/** * Copyright (C) 2012 Typesafe, Inc. <http://www.typesafe.com> */ package org.pantsbuild.zinc.options import java.io.File import scala.annotation.tailrec /** * Parsing command-line options, immutably. */ object Options { def parse[Context](context: Context, options: Set[OptionDef[Context]], args: Seq[String], stopOnError: Boolean): Parsed[Context] = parseOptions(context, options, args, Seq.empty, Seq.empty, stopOnError) @tailrec def parseOptions[Context](context: Context, options: Set[OptionDef[Context]], args: Seq[String], residual: Seq[String], errors: Seq[String], stopOnError: Boolean): Parsed[Context] = { if (args.isEmpty || (stopOnError && !errors.isEmpty)) { Parsed(context, residual, errors) } else { val arg = args.head options find (_ claims arg) match { case Some(option) => val parsed = option.process(context, args) parseOptions(parsed.context, options, parsed.remaining, residual, errors ++ parsed.errors, stopOnError) case None => parseOptions(context, options, args.tail, residual :+ arg, errors, stopOnError) } } } } case class Parsed[Context](context: Context, remaining: Seq[String], errors: Seq[String] = Seq.empty) abstract class OptionDef[Context] { def options: Seq[String] def description: String def process(context: Context, args: Seq[String]): Parsed[Context] def claims(option: String): Boolean = options contains option def help: String = options mkString (" | ") def length: Int = help.length def usage(column: Int): String = (" " + help.padTo(column, ' ') + description) def extraline: Boolean = false } abstract class FlagOption[Context] extends OptionDef[Context] { def action: Context => Context def process(context: Context, args: Seq[String]) = Parsed(action(context), args.tail) } abstract class ArgumentOption[Value, Context] extends OptionDef[Context] { def argument: String def parse(arg: String): Option[Value] def action: (Context, Value) => Context def process(context: Context, args: Seq[String]): Parsed[Context] = { val rest = args.tail def error = Parsed(context, rest, Seq(invalid)) if (rest.isEmpty) error else parse(rest.head) match { case Some(value) => Parsed(action(context, value), rest.tail) case None => error } } def invalid = "Invalid option for " + options.headOption.getOrElse("") override def help = options.mkString(" | ") + " <" + argument + ">" } class BooleanOption[Context]( val options: Seq[String], val description: String, val action: Context => Context) extends FlagOption[Context] class StringOption[Context]( val options: Seq[String], val argument: String, val description: String, val action: (Context, String) => Context) extends ArgumentOption[String, Context] { def parse(arg: String): Option[String] = { Some(arg) } } class IntOption[Context]( val options: Seq[String], val argument: String, val description: String, val action: (Context, Int) => Context) extends ArgumentOption[Int, Context] { def parse(arg: String): Option[Int] = { try { Some(arg.toInt) } catch { case _: NumberFormatException => None } } } class DoubleOption[Context]( val options: Seq[String], val argument: String, val description: String, val action: (Context, Double) => Context) extends ArgumentOption[Double, Context] { def parse(arg: String): Option[Double] = { try { Some(arg.toDouble) } catch { case _: NumberFormatException => None } } } class FractionOption[Context]( val options: Seq[String], val argument: String, val description: String, val action: (Context, Double) => Context) extends ArgumentOption[Double, Context] { def parse(arg: String): Option[Double] = { try { val fraction = arg.toDouble if (fraction < 0.0 || fraction > 1.0) None else Some(fraction) } catch { case _: NumberFormatException => None } } } class FileOption[Context]( val options: Seq[String], val argument: String, val description: String, val action: (Context, File) => Context) extends ArgumentOption[File, Context] { def parse(arg: String): Option[File] = { Some(new File(arg)) } } class PathOption[Context]( val options: Seq[String], val argument: String, val description: String, val action: (Context, Seq[File]) => Context) extends ArgumentOption[Seq[File], Context] { def parse(arg: String): Option[Seq[File]] = ParseHelpers.parseFileSeq(arg) } class PrefixOption[Context]( val prefix: String, val argument: String, val description: String, val action: (Context, String) => Context) extends OptionDef[Context] { def options = Seq(prefix) override def claims(option: String) = option startsWith prefix def process(context: Context, args: Seq[String]): Parsed[Context] = { val prefixed = args.head.substring(prefix.length) Parsed(action(context, prefixed), args.tail) } override def help = prefix + argument } class FilePairOption[Context]( val options: Seq[String], val argument: String, val description: String, val action: (Context, (File, File)) => Context) extends ArgumentOption[(File, File), Context] { val pairSeparator = File.pathSeparatorChar def parse(arg: String): Option[(File, File)] = { val p = arg split pairSeparator if (p.length == 2) Some((new File(p(0)), new File(p(1)))) else None } } class FileMapOption[Context]( val options: Seq[String], val description: String, val action: (Context, Map[File, File]) => Context) extends ArgumentOption[Map[File, File], Context] { val argument = "mapping" val argSeparator = ',' val pairSeparator = File.pathSeparatorChar def parse(arg: String): Option[Map[File, File]] = { val pairs = arg split argSeparator val files = pairs map parseFilePair if (files exists (_.isEmpty)) None else Some(files.flatten.toMap) } def parseFilePair(pair: String): Option[(File, File)] = { val p = pair split pairSeparator p.length match { case 1 => Some((new File(p(0)), new File(""))) case 2 => Some((new File(p(0)), new File(p(1)))) case _ => None } } } class FileSeqMapOption[Context]( val options: Seq[String], val description: String, val action: (Context, Map[Seq[File], File]) => Context) extends ArgumentOption[Map[Seq[File], File], Context] { val argument = "mapping" val argSeparator = ',' val openSeq = '{' val closeSeq = '}' val pairSeparator = File.pathSeparatorChar def parse(arg: String): Option[Map[Seq[File], File]] = { val pairs = arg split argSeparator val files = pairs map parseFileSeqPair if (files exists (_.isEmpty)) None else Some(files.flatten.toMap) } def parseFileSeqPair(pair: String): Option[(Seq[File], File)] = { val pairSeparatorIdx = pair lastIndexOf pairSeparator if (pairSeparatorIdx == -1) { None } else { val p = Array(pair.substring(0, pairSeparatorIdx), pair.substring(pairSeparatorIdx + 1)) parseFileSeq(p(0)) match { case Some(fileSeq) => Some((fileSeq, new File(p(1)))) case None => None } } } def parseFileSeq(arg: String): Option[Seq[File]] = { val seqArg = if (arg.head == openSeq && arg.last == closeSeq) arg.init.tail else arg ParseHelpers.parseFileSeq(seqArg) } } class HeaderOption[Context]( val header: String) extends OptionDef[Context] { def options: Seq[String] = Seq.empty def description = "" def process(context: Context, args: Seq[String]) = Parsed(context, args.tail) override def claims(option: String): Boolean = false override def help = "" override def length = 0 override def usage(column: Int) = header override def extraline = true } class DummyOption[Context]( val optionHelp: String, val description: String) extends OptionDef[Context] { def options: Seq[String] = Seq.empty def process(context: Context, args: Seq[String]) = Parsed(context, args.tail) override def claims(option: String): Boolean = false override def help = optionHelp } object ParseHelpers { def parseFileSeq(arg: String): Option[Seq[File]] = { val expanded = scala.tools.nsc.util.ClassPath.expandPath(arg) val files = expanded map (new File(_)) Some(files) } }
fkorotkov/pants
src/scala/org/pantsbuild/zinc/options/Options.scala
Scala
apache-2.0
8,289
package composition.webserviceclients.audit2 import com.tzavellas.sse.guice.ScalaModule import org.mockito.Matchers.any import org.mockito.Mockito.when import org.scalatest.mock.MockitoSugar import scala.concurrent.Future import uk.gov.dvla.vehicles.presentation.common.clientsidesession.TrackingId import webserviceclients.audit2.AuditMicroService import webserviceclients.audit2.AuditRequest final class AuditMicroServiceCallFails extends ScalaModule with MockitoSugar { val stub = { val webService = mock[AuditMicroService] when(webService.invoke(request = any[AuditRequest], trackingId = any[TrackingId])).thenReturn(fail) webService } def configure() = bind[AuditMicroService].toInstance(stub) private def fail = Future.failed { new RuntimeException("This error is generated deliberately for test purposes" + " by the stub AuditMicroServiceCallFails") } }
dvla/vrm-assign-online
test/composition/webserviceclients/audit2/AuditMicroServiceCallFails.scala
Scala
mit
898
/* * Copyright (C) 2016-2019 Lightbend Inc. <https://www.lightbend.com> */ package com.lightbend.lagom.api.tools.tests.scaladsl import akka.NotUsed import com.lightbend.lagom.scaladsl.api.transport.Method import com.lightbend.lagom.scaladsl.api.Descriptor import com.lightbend.lagom.scaladsl.api.Service import com.lightbend.lagom.scaladsl.api.ServiceCall import com.lightbend.lagom.scaladsl.api.Service._ import scala.concurrent.Future trait UndescribedService extends Service { def getMock(id: String): ServiceCall[NotUsed, NotUsed] def descriptor: Descriptor = named("/noaclservice").withCalls( restCall(Method.GET, "/mocks/:id", getMock _) ) } class UndescribedServiceImpl extends UndescribedService { def getMock(id: String) = ServiceCall { _ => Future.successful(NotUsed) } }
rcavalcanti/lagom
api-tools/src/test/scala/com/lightbend/lagom/api/tools/tests/scaladsl/UndescribedService.scala
Scala
apache-2.0
816
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.rdd import java.io.File import scala.collection.Map import scala.io.Codec import scala.language.postfixOps import scala.sys.process._ import scala.util.Try import org.apache.hadoop.fs.Path import org.apache.hadoop.io.{LongWritable, Text} import org.apache.hadoop.mapred.{FileSplit, JobConf, TextInputFormat} import org.apache.spark._ import org.apache.spark.util.Utils class PipedRDDSuite extends SparkFunSuite with SharedSparkContext { test("basic pipe") { if (testCommandAvailable("cat")) { val nums = sc.makeRDD(Array(1, 2, 3, 4), 2) val piped = nums.pipe(Seq("cat")) val c = piped.collect() assert(c.size === 4) assert(c(0) === "1") assert(c(1) === "2") assert(c(2) === "3") assert(c(3) === "4") } else { assert(true) } } test("basic pipe with tokenization") { if (testCommandAvailable("wc")) { val nums = sc.makeRDD(Array(1, 2, 3, 4), 2) // verify that both RDD.pipe(command: String) and RDD.pipe(command: String, env) work good for (piped <- Seq(nums.pipe("wc -l"), nums.pipe("wc -l", Map[String, String]()))) { val c = piped.collect() assert(c.size === 2) assert(c(0).trim === "2") assert(c(1).trim === "2") } } else { assert(true) } } test("failure in iterating over pipe input") { if (testCommandAvailable("cat")) { val nums = sc.makeRDD(Array(1, 2, 3, 4), 2) .mapPartitionsWithIndex((index, iterator) => { new Iterator[Int] { def hasNext = true def next() = { throw new SparkException("Exception to simulate bad scenario") } } }) val piped = nums.pipe(Seq("cat")) intercept[SparkException] { piped.collect() } } } test("advanced pipe") { if (testCommandAvailable("cat")) { val nums = sc.makeRDD(Array(1, 2, 3, 4), 2) val bl = sc.broadcast(List("0")) val piped = nums.pipe(Seq("cat"), Map[String, String](), (f: String => Unit) => { bl.value.map(f(_)); f("\\u0001") }, (i: Int, f: String => Unit) => f(i + "_")) val c = piped.collect() assert(c.size === 8) assert(c(0) === "0") assert(c(1) === "\\u0001") assert(c(2) === "1_") assert(c(3) === "2_") assert(c(4) === "0") assert(c(5) === "\\u0001") assert(c(6) === "3_") assert(c(7) === "4_") val nums1 = sc.makeRDD(Array("a\\t1", "b\\t2", "a\\t3", "b\\t4"), 2) val d = nums1.groupBy(str => str.split("\\t")(0)). pipe(Seq("cat"), Map[String, String](), (f: String => Unit) => { bl.value.map(f(_)); f("\\u0001") }, (i: Tuple2[String, Iterable[String]], f: String => Unit) => { for (e <- i._2) { f(e + "_") } }).collect() assert(d.size === 8) assert(d(0) === "0") assert(d(1) === "\\u0001") assert(d(2) === "b\\t2_") assert(d(3) === "b\\t4_") assert(d(4) === "0") assert(d(5) === "\\u0001") assert(d(6) === "a\\t1_") assert(d(7) === "a\\t3_") } else { assert(true) } } test("pipe with empty partition") { val data = sc.parallelize(Seq("foo", "bing"), 8) val piped = data.pipe("wc -c") assert(piped.count == 8) val charCounts = piped.map(_.trim.toInt).collect().toSet assert(Set(0, 4, 5) == charCounts) } test("pipe with env variable") { if (testCommandAvailable("printenv")) { val nums = sc.makeRDD(Array(1, 2, 3, 4), 2) val piped = nums.pipe(Seq("printenv", "MY_TEST_ENV"), Map("MY_TEST_ENV" -> "LALALA")) val c = piped.collect() assert(c.size === 2) assert(c(0) === "LALALA") assert(c(1) === "LALALA") } else { assert(true) } } test("pipe with process which cannot be launched due to bad command") { if (!testCommandAvailable("some_nonexistent_command")) { val nums = sc.makeRDD(Array(1, 2, 3, 4), 2) val command = Seq("some_nonexistent_command") val piped = nums.pipe(command) val exception = intercept[SparkException] { piped.collect() } assert(exception.getMessage.contains(command.mkString(" "))) } } test("pipe with process which is launched but fails with non-zero exit status") { if (testCommandAvailable("cat")) { val nums = sc.makeRDD(Array(1, 2, 3, 4), 2) val command = Seq("cat", "nonexistent_file") val piped = nums.pipe(command) val exception = intercept[SparkException] { piped.collect() } assert(exception.getMessage.contains(command.mkString(" "))) } } test("basic pipe with separate working directory") { if (testCommandAvailable("cat")) { val nums = sc.makeRDD(Array(1, 2, 3, 4), 2) val piped = nums.pipe(Seq("cat"), separateWorkingDir = true) val c = piped.collect() assert(c.size === 4) assert(c(0) === "1") assert(c(1) === "2") assert(c(2) === "3") assert(c(3) === "4") val pipedPwd = nums.pipe(Seq("pwd"), separateWorkingDir = true) val collectPwd = pipedPwd.collect() assert(collectPwd(0).contains("tasks/")) val pipedLs = nums.pipe(Seq("ls"), separateWorkingDir = true, bufferSize = 16384).collect() // make sure symlinks were created assert(pipedLs.length > 0) // clean up top level tasks directory Utils.deleteRecursively(new File("tasks")) } else { assert(true) } } test("test pipe exports map_input_file") { testExportInputFile("map_input_file") } test("test pipe exports mapreduce_map_input_file") { testExportInputFile("mapreduce_map_input_file") } def testCommandAvailable(command: String): Boolean = { Try(Process(command) !!).isSuccess } def testExportInputFile(varName: String) { if (testCommandAvailable("printenv")) { val nums = new HadoopRDD(sc, new JobConf(), classOf[TextInputFormat], classOf[LongWritable], classOf[Text], 2) { override def getPartitions: Array[Partition] = Array(generateFakeHadoopPartition()) override val getDependencies = List[Dependency[_]]() override def compute(theSplit: Partition, context: TaskContext) = { new InterruptibleIterator[(LongWritable, Text)](context, Iterator((new LongWritable(1), new Text("b")))) } } val hadoopPart1 = generateFakeHadoopPartition() val pipedRdd = new PipedRDD( nums, PipedRDD.tokenize("printenv " + varName), Map(), null, null, false, 4092, Codec.defaultCharsetCodec.name) val tContext = TaskContext.empty() val rddIter = pipedRdd.compute(hadoopPart1, tContext) val arr = rddIter.toArray assert(arr(0) == "/some/path") } else { // printenv isn't available so just pass the test } } def generateFakeHadoopPartition(): HadoopPartition = { val split = new FileSplit(new Path("/some/path"), 0, 1, Array[String]("loc1", "loc2", "loc3", "loc4", "loc5")) new HadoopPartition(sc.newRddId(), 1, split) } }
gioenn/xSpark
core/src/test/scala/org/apache/spark/rdd/PipedRDDSuite.scala
Scala
apache-2.0
8,086
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.deploy private[deploy] class DriverDescription( val jarUrl: String,//jar的URL val mem: Int,//对Worker的内存要求 val cores: Int,//对Worker的Cores的要求 val supervise: Boolean,//supervise,则drvier挂掉之后可以自动重启 val command: Command) extends Serializable { def copy( jarUrl: String = jarUrl, mem: Int = mem, cores: Int = cores, supervise: Boolean = supervise, command: Command = command): DriverDescription = new DriverDescription(jarUrl, mem, cores, supervise, command) override def toString: String = s"DriverDescription (${command.mainClass})" }
tophua/spark1.52
core/src/main/scala/org/apache/spark/deploy/DriverDescription.scala
Scala
apache-2.0
1,465
package io.netflow.storage.redis import io.netflow.flows.cflow._ import io.netflow.lib._ private[netflow] object NetFlowV7Packet extends FlowPacketMeta[NetFlowV7Packet] { def persist(fp: NetFlowV7Packet): Unit = () }
ayscb/netflow
netflow1/netflow-master/src/main/scala/io/netflow/storage/redis/NetFlowV7Packet.scala
Scala
apache-2.0
220
/* * Copyright (c) 2014 Paul Bernard * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * Spectrum Finance is based in part on: * QuantLib. http://quantlib.org/ * */ package org.quantintel.ql.time.calendars import org.quantintel.ql.time.Month._ import org.quantintel.ql.time.{Western, Date, Calendar} import org.quantintel.ql.time.Weekday._ object TargetEnum extends Enumeration { type TargetEnum = Value val TARGET = Value(1) def valueOf(market: Int) : TargetEnum = market match { case 1 => TARGET case _ => throw new Exception("Valid units = 1") } } object Target { def apply(): Calendar = { new Target() } def apply(market: org.quantintel.ql.time.calendars.TargetEnum.TargetEnum): Calendar = { new Target(market) } } /** * * TARGET calendar relative to the European Central Bank * This is a holiday calendar representing the * Trans-european Automated Real-time Gross Express-settlement Transfer * system calendar. * * Saturdays * Sundays * New Year's Day, JANUARY 1st * Good Friday (since 2000) * Easter Monday (since 2000) * Labour Day, May 1st (since 2000) * Christmas, December 25th * Day of Goodwill, December 26th (since 2000) * December 31st (1998, 1999, and 2001) * * Reference: http://www.ecb.int * * @author Paul Bernard */ class Target extends Calendar { impl = new Target import org.quantintel.ql.time.calendars.TargetEnum._ def this(market: org.quantintel.ql.time.calendars.TargetEnum.TargetEnum ) { this market match { case TARGET => impl = new Target case _ => throw new Exception("Valid units = 1") } } private class Target extends Western { override def name : String = "TARGET" override def isBusinessDay(date: Date): Boolean = { // standard dependencies val w: Weekday = date.weekday val d: Int = date.dayOfMonth val dd: Int = date.dayOfYear val m: Month = date.month val y: Int = date.year val em: Int = easterMonday(y) if (isWeekend(w) || (d == 1 && m == JANUARY) // New Year's Day || (dd == em - 3 && y >= 2000) // Good Friday || (dd == em && y >= 2000) // Easter Monday || (d == 1 && m == MAY && y >= 2000) // Labour Day || (d == 25 && m == DECEMBER) // Christmas || (d == 26 && m == DECEMBER && y >= 2000) // Day of Goodwill || (d == 31 && m == DECEMBER && (y == 1998 || y == 1999 || y == 2001))) // December 31st, 1998, 1999, and 2001 only false else true } } }
quantintel/spectrum
financial/src/main/scala/org/quantintel/ql/time/calendars/Target.scala
Scala
apache-2.0
3,050
package com.github.tonivade.buildtiful import Config._ trait Downloader { def download(build: Build) : Unit } class IvyDownloader extends Downloader { import IvyTasks._ def download(build: Build) { println("download") if (!libs.exists()) libs.mkdirs() ivyDownload(build).execute() } }
tonivade/buildtiful
src/main/scala/Downloader.scala
Scala
mit
316
package gitbucket.core.controller import gitbucket.core.admin.html import gitbucket.core.service.{AccountService, SystemSettingsService} import gitbucket.core.util.AdminAuthenticator import gitbucket.core.ssh.SshServer import SystemSettingsService._ import jp.sf.amateras.scalatra.forms._ class SystemSettingsController extends SystemSettingsControllerBase with AccountService with AdminAuthenticator trait SystemSettingsControllerBase extends ControllerBase { self: AccountService with AdminAuthenticator => private val form = mapping( "baseUrl" -> trim(label("Base URL", optional(text()))), "information" -> trim(label("Information", optional(text()))), "allowAccountRegistration" -> trim(label("Account registration", boolean())), "allowAnonymousAccess" -> trim(label("Anonymous access", boolean())), "isCreateRepoOptionPublic" -> trim(label("Default option to create a new repository", boolean())), "gravatar" -> trim(label("Gravatar", boolean())), "notification" -> trim(label("Notification", boolean())), "activityLogLimit" -> trim(label("Limit of activity logs", optional(number()))), "ssh" -> trim(label("SSH access", boolean())), "sshPort" -> trim(label("SSH port", optional(number()))), "smtp" -> optionalIfNotChecked("notification", mapping( "host" -> trim(label("SMTP Host", text(required))), "port" -> trim(label("SMTP Port", optional(number()))), "user" -> trim(label("SMTP User", optional(text()))), "password" -> trim(label("SMTP Password", optional(text()))), "ssl" -> trim(label("Enable SSL", optional(boolean()))), "fromAddress" -> trim(label("FROM Address", optional(text()))), "fromName" -> trim(label("FROM Name", optional(text()))) )(Smtp.apply)), "ldapAuthentication" -> trim(label("LDAP", boolean())), "ldap" -> optionalIfNotChecked("ldapAuthentication", mapping( "host" -> trim(label("LDAP host", text(required))), "port" -> trim(label("LDAP port", optional(number()))), "bindDN" -> trim(label("Bind DN", optional(text()))), "bindPassword" -> trim(label("Bind Password", optional(text()))), "baseDN" -> trim(label("Base DN", text(required))), "userNameAttribute" -> trim(label("User name attribute", text(required))), "additionalFilterCondition"-> trim(label("Additional filter condition", optional(text()))), "fullNameAttribute" -> trim(label("Full name attribute", optional(text()))), "mailAttribute" -> trim(label("Mail address attribute", optional(text()))), "tls" -> trim(label("Enable TLS", optional(boolean()))), "ssl" -> trim(label("Enable SSL", optional(boolean()))), "keystore" -> trim(label("Keystore", optional(text()))) )(Ldap.apply)) )(SystemSettings.apply).verifying { settings => if(settings.ssh && settings.baseUrl.isEmpty){ Seq("baseUrl" -> "Base URL is required if SSH access is enabled.") } else Nil } private val pluginForm = mapping( "pluginId" -> list(trim(label("", text()))) )(PluginForm.apply) case class PluginForm(pluginIds: List[String]) get("/admin/system")(adminOnly { html.system(flash.get("info")) }) post("/admin/system", form)(adminOnly { form => saveSystemSettings(form) if(form.ssh && SshServer.isActive && context.settings.sshPort != form.sshPort){ SshServer.stop() } if(form.ssh && !SshServer.isActive && form.baseUrl.isDefined){ SshServer.start( form.sshPort.getOrElse(SystemSettingsService.DefaultSshPort), form.baseUrl.get) } else if(!form.ssh && SshServer.isActive){ SshServer.stop() } flash += "info" -> "System settings has been updated." redirect("/admin/system") }) }
intermezzo-fr/gitbucket
src/main/scala/gitbucket/core/controller/SystemSettingsController.scala
Scala
apache-2.0
4,228
/** * Copyright (C) 2017 Orbeon, Inc. * * This program is free software; you can redistribute it and/or modify it under the terms of the * GNU Lesser General Public License as published by the Free Software Foundation; either version * 2.1 of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. * See the GNU Lesser General Public License for more details. * * The full text of the license is available at http://www.gnu.org/copyleft/lesser.html */ package org.orbeon.oxf.util import org.orbeon.oxf.util.ContentTypes._ import org.scalatest.funspec.AnyFunSpec class ContentTypesTest extends AnyFunSpec { describe("The isXMLContentType method") { it("must handle spaces and parameters") { assert(true === isXMLContentType(" text/xml ; charset=utf8")) } it("must support suffixes") { assert(true === isXMLContentType(" application/atom+xml ; charset=utf8")) } } describe("The isJSONContentType method") { it("must handle spaces and parameters") { assert(true === isJSONContentType(" application/json ; charset=utf8")) } it("must support suffixes") { assert(true === isJSONContentType(" application/calendar+json ; charset=utf8")) } } describe("The getContentTypeParameters method") { it("must parse parameters and ignore spaces") { assert(Map("charset" -> "utf8", "foo" -> "bar") === getContentTypeParameters(" text/html ; charset=utf8; foo = bar ")) } it("must ignore blank names") { assert(Map("charset" -> "utf8") === getContentTypeParameters(" text/html ; charset=utf8; = bar ")) } it("must return blank values") { assert(Map("charset" -> "utf8", "foo" -> "") === getContentTypeParameters(" text/html ; charset=utf8; foo = ")) } } describe("The isTextContentType method") { it("must handle spaces") { assert(true === isTextContentType(" text/plain ; charset=utf8")) } } }
orbeon/orbeon-forms
common/shared/src/test/scala/org/orbeon/oxf/util/ContentTypesTest.scala
Scala
lgpl-2.1
2,120
/* * The MIT License * * Copyright (c) 2019 Fulcrum Genomics LLC * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package com.fulcrumgenomics.cmdline import java.nio.file.Paths import com.fulcrumgenomics.FgBioDef.SafelyClosable import com.fulcrumgenomics.bam.api.SamSource import com.fulcrumgenomics.commons.util.SystemUtil.IntelCompressionLibrarySupported import com.fulcrumgenomics.testing.UnitSpec import com.intel.gkl.compression.{IntelDeflaterFactory, IntelInflaterFactory} import htsjdk.samtools.BAMRecordCodec import htsjdk.samtools.util._ import htsjdk.samtools.util.zip.{DeflaterFactory, InflaterFactory} import org.scalatest.Retries import org.scalatest.tagobjects.Retryable class IntelCompressionTest extends UnitSpec with Retries { private val intelSupported = IntelCompressionLibrarySupported private val testBam = Paths.get("src/test/resources/com/fulcrumgenomics/bam/estimate_pooling_fractions/HG01583.bam") private val levels = Seq(2, 5, 9) override def withFixture(test: NoArgTest) = { if (isRetryable(test)) withRetry { super.withFixture(test) } else super.withFixture(test) } "IntelDeflater" should "be available" in { if (!intelSupported) cancel("IntelDeflater is not available on this platform") } levels.foreach { level => it should s"deflate faster than the JDK Deflater on level $level" taggedAs(Retryable) in { if (!intelSupported) cancel("IntelDeflater is not available on this platform") val source = SamSource(testBam) val records = source.toList val header = source.header source.safelyClose() // a little method to deflate given an deflater factory def run(factory: DeflaterFactory): Long = { val output = makeTempFile("test", ".txt") val startTime = System.currentTimeMillis() val os = new BlockCompressedOutputStream(output, level, factory) val codec = new BAMRecordCodec(header) codec.setOutputStream(os) val repetitions = levels.max - level Range.inclusive(0, repetitions).foreach { _ => records.foreach { rec => codec.encode(rec.asSam) } } os.close() val endTime = System.currentTimeMillis() endTime - startTime } val jdkTime = run(new DeflaterFactory) val intelTime = run(new IntelDeflaterFactory) info(f"Intel: ${intelTime}ms JDK: ${jdkTime}ms speedup: ${jdkTime/intelTime.toFloat}%.2fx") intelTime.toDouble should be <= (jdkTime * 1.05) } } "IntelInflater" should "be available" in { if (!intelSupported) cancel("IntelDeflater is not available on this platform") } levels.foreach { level => it should s"inflate faster than the JDK Inflater on level $level" in { if (!intelSupported) cancel("IntelInflater is not available on this platform") val source = SamSource(testBam) val records = source.toList val header = source.header source.safelyClose() // create a new compressed file val output = makeTempFile("test", ".txt") val os = new BlockCompressedOutputStream(output, level, new IntelDeflaterFactory) val codec = new BAMRecordCodec(header) codec.setOutputStream(os) records.foreach { rec => codec.encode(rec.asSam) } os.close() // a little method to inflate given an inflater factory def run(factory: InflaterFactory): Long = { val startTime = System.currentTimeMillis() Range.inclusive(1, 25).foreach { i => val is = new BlockCompressedInputStream(output.toFile, factory) val codec = new BAMRecordCodec(header) codec.setInputStream(is, testBam.toFile.toString) while (null != codec.decode()) { () } is.close() } val endTime = System.currentTimeMillis() endTime - startTime } val jdkTime = run(new InflaterFactory) val intelTime = run(new IntelInflaterFactory) info(f"Intel: ${intelTime}ms JDK: ${jdkTime}ms speedup: ${jdkTime/intelTime.toFloat}%.2fx") intelTime should be <= jdkTime } } }
fulcrumgenomics/fgbio
src/test/scala/com/fulcrumgenomics/cmdline/IntelCompressionTest.scala
Scala
mit
5,207
package se.apogo.kdom.api.model import org.json4s.NoTypeHints import org.json4s.jackson.Serialization trait JsonSerializable { import org.json4s.native.JsonMethods._ implicit val formats = Serialization.formats(NoTypeHints) def toJson: String = { val json = Serialization.writePretty(this) pretty(render(parse(json))) } } case class JsonString(string: String) extends JsonSerializable { override def toJson: String = string }
mratin/kdom
src/main/scala/se/apogo/kdom/api/model/JsonSerializable.scala
Scala
mit
447
/* Copyright 2014 UniCredit S.p.A. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package unicredit.hippo import akka.actor.{ ActorSystem, Props } import akka.io.IO import spray.can.Http import com.typesafe.config.ConfigFactory import net.ceedubs.ficus.FicusConfig._ import actors.{ Client, HttpGate } object Main extends App { private val config = ConfigFactory.load private val host = config.as[String]("http.hostname") private val port = config.as[Int]("http.port") private val servers = config.as[List[String]]("hippo.servers") implicit val system = ActorSystem("hippo-http") val client = system.actorOf( Props(new Client(servers)), name = "client" ) val http = system.actorOf( Props(new HttpGate(client)), name = "http" ) IO(Http) ! Http.Bind(http, interface = host, port = port) }
unicredit/hippodb
http/src/main/scala/unicredit/hippo/Main.scala
Scala
apache-2.0
1,354
package com.sothr.imagetools.engine.util import java.io.{File, FileOutputStream, PrintStream} import java.util.Properties import com.typesafe.config.{Config, ConfigFactory, ConfigRenderOptions} import grizzled.slf4j.Logging /* * Service for loading and interacting with the properties file */ object PropertiesService extends Logging { //OS information val OS = System.getProperty("os.name", "UNKNOWN") val OS_VERSION = System.getProperty("os.version", "UNKNOWN") val OS_ARCH = System.getProperty("os.arch", "UNKNOWN") private val newUserConf: Properties = new Properties() private val configRenderOptions = ConfigRenderOptions.concise().setFormatted(true) //specific highly used properties var TimingEnabled: Boolean = false //ahash var aHashPrecision = 0 var aHashTolerance = 0 var aHashWeight = 0.0f var useAhash = false //dhash var dHashPrecision = 0 var dHashTolerance = 0 var dHashWeight = 0.0f var useDhash = false //phash var pHashPrecision = 0 var pHashTolerance = 0 var pHashWeight = 0.0f var usePhash = false private var defaultConf: Config = null private var userConf: Config = null private var version: Version = null def getVersion: Version = this.version /* * Load the properties file from the specified location */ def loadProperties(defaultLocation: String, userLocation: String = null) = { info(s"Attempting to load properties from: $defaultLocation") defaultConf = ConfigFactory.load(defaultLocation) if (userLocation != null) { userConf = ConfigFactory.parseFile(new File(userLocation)) } else { userConf = ConfigFactory.empty info("No user properties file exists to load from") } version = new Version(get(PropertyEnum.Version.toString)) info(s"Detected Version: $version") //load special properties TimingEnabled = get(PropertyEnum.Timed.toString).toBoolean //ahash aHashPrecision = get(PropertyEnum.AhashPrecision.toString).toInt aHashTolerance = get(PropertyEnum.AhashTolerance.toString).toInt aHashWeight = get(PropertyEnum.AhashWeight.toString).toFloat useAhash = get(PropertyEnum.UseAhash.toString).toBoolean //dhash dHashPrecision = get(PropertyEnum.DhashPrecision.toString).toInt dHashTolerance = get(PropertyEnum.DhashTolerance.toString).toInt dHashWeight = get(PropertyEnum.DhashWeight.toString).toFloat useDhash = get(PropertyEnum.UseDhash.toString).toBoolean //phash pHashPrecision = get(PropertyEnum.PhashPrecision.toString).toInt pHashTolerance = get(PropertyEnum.PhashTolerance.toString).toInt pHashWeight = get(PropertyEnum.PhashWeight.toString).toFloat usePhash = get(PropertyEnum.UsePhash.toString).toBoolean info("Loaded Special Properties") } def get(key: String, defaultValue: String = null): String = { var result: String = defaultValue //check the latest properties if (newUserConf.containsKey(key)) { result = newUserConf.getProperty(key) } //check the loaded user properties else if (userConf.hasPath(key)) { result = userConf.getString(key) } //check the default properties else if (defaultConf.hasPath(key)) { result = defaultConf.getString(key) } result } def saveConf(location: String) = { info(s"Saving user properties to $location") val out: PrintStream = new PrintStream(new FileOutputStream(location, false)) val userConfToSave = getCleanedMergedUserConf //print to the output stream out.print(userConfToSave.root().render(configRenderOptions)) out.flush() out.close() } private def getCleanedMergedUserConf: Config = { ConfigFactory.parseProperties(cleanAndPrepareNewUserProperties()) withFallback userConf } private def cleanAndPrepareNewUserProperties(): Properties = { //insert special keys here newUserConf.setProperty(PropertyEnum.PreviousVersion.toString, version.parsableToString()) //remove special keys here newUserConf.remove(PropertyEnum.Version.toString) newUserConf } def has(key: String): Boolean = { var result = false if (newUserConf.containsKey(key) || userConf.hasPath(key) || defaultConf.hasPath(key)) { result = true } result } def set(key: String, value: String) = { newUserConf.setProperty(key, value) } }
warricksothr/ImageTools
engine/src/main/scala/com/sothr/imagetools/engine/util/PropertiesService.scala
Scala
mit
4,359
package scalaxy.evidence package test import org.scalamock.scalatest.MockFactory import org.scalatest.{ FlatSpecLike, Matchers } import scala.reflect.runtime.universe._ import scala.tools.reflect.ToolBoxError class PersistenceExampleTest extends FlatSpecLike with Matchers with MockFactory { behavior of "scalaxy.evidence on java annotations" val decls = q""" import javax.persistence.Entity import scalaxy.evidence._ type IsEntity[T] = HasAnnotation[T, Entity] type IsNotDeprecated[T] = ![HasAnnotation[T, Deprecated]] def serialize[T : IsEntity : IsNotDeprecated](t: T) = ??? """ it should "allow persistence of GoodEntity" in { toolbox.compile(q""" ..$decls @Entity(name = "GoodEntity") class GoodEntity serialize(new GoodEntity) """) } it should "forbid persistence of DeprecatedEntity" in { a [ToolBoxError] should be thrownBy { toolbox.compile(q""" @Entity(name = "DeprecatedEntity") @Deprecated class DeprecatedEntity serialize(new DeprecatedEntity) """) } } it should "forbid persistence of NotAnEntity" in { a [ToolBoxError] should be thrownBy { toolbox.compile(q""" class NotAnEntity serialize(new NotAnEntity) """) } } }
nativelibs4java/scalaxy-evidence
src/test/scala/PersistenceExampleTest.scala
Scala
bsd-3-clause
1,297
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.deploy.yarn import java.io.IOException import java.net.Socket import java.util.concurrent.CopyOnWriteArrayList import java.util.concurrent.atomic.{AtomicInteger, AtomicReference} import scala.collection.JavaConversions._ import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.{FileSystem, Path} import org.apache.hadoop.net.NetUtils import org.apache.hadoop.security.UserGroupInformation import org.apache.hadoop.util.ShutdownHookManager import org.apache.hadoop.yarn.api._ import org.apache.hadoop.yarn.api.protocolrecords._ import org.apache.hadoop.yarn.api.records._ import org.apache.hadoop.yarn.client.api.AMRMClient import org.apache.hadoop.yarn.client.api.AMRMClient.ContainerRequest import org.apache.hadoop.yarn.conf.YarnConfiguration import org.apache.hadoop.yarn.ipc.YarnRPC import org.apache.hadoop.yarn.util.{ConverterUtils, Records} import org.apache.spark.{SparkConf, SparkContext, Logging} import org.apache.spark.util.Utils class ApplicationMaster(args: ApplicationMasterArguments, conf: Configuration, sparkConf: SparkConf) extends Logging { def this(args: ApplicationMasterArguments, sparkConf: SparkConf) = this(args, new Configuration(), sparkConf) def this(args: ApplicationMasterArguments) = this(args, new SparkConf()) private val yarnConf: YarnConfiguration = new YarnConfiguration(conf) private var appAttemptId: ApplicationAttemptId = _ private var userThread: Thread = _ private val fs = FileSystem.get(yarnConf) private var yarnAllocator: YarnAllocationHandler = _ private var isFinished: Boolean = false private var uiAddress: String = _ private val maxAppAttempts: Int = conf.getInt( YarnConfiguration.RM_AM_MAX_ATTEMPTS, YarnConfiguration.DEFAULT_RM_AM_MAX_ATTEMPTS) private var isLastAMRetry: Boolean = true private var amClient: AMRMClient[ContainerRequest] = _ // Default to numWorkers * 2, with minimum of 3 private val maxNumWorkerFailures = sparkConf.getInt("spark.yarn.max.worker.failures", math.max(args.numWorkers * 2, 3)) def run() { // Setup the directories so things go to YARN approved directories rather // than user specified and /tmp. System.setProperty("spark.local.dir", getLocalDirs()) // set the web ui port to be ephemeral for yarn so we don't conflict with // other spark processes running on the same box System.setProperty("spark.ui.port", "0") // Use priority 30 as it's higher then HDFS. It's same priority as MapReduce is using. ShutdownHookManager.get().addShutdownHook(new AppMasterShutdownHook(this), 30) appAttemptId = getApplicationAttemptId() isLastAMRetry = appAttemptId.getAttemptId() >= maxAppAttempts amClient = AMRMClient.createAMRMClient() amClient.init(yarnConf) amClient.start() // Workaround until hadoop moves to something which has // https://issues.apache.org/jira/browse/HADOOP-8406 - fixed in (2.0.2-alpha but no 0.23 line) // org.apache.hadoop.io.compress.CompressionCodecFactory.getCodecClasses(conf) ApplicationMaster.register(this) // Start the user's JAR userThread = startUserClass() // This a bit hacky, but we need to wait until the spark.driver.port property has // been set by the Thread executing the user class. waitForSparkContextInitialized() // Do this after Spark master is up and SparkContext is created so that we can register UI Url. val appMasterResponse: RegisterApplicationMasterResponse = registerApplicationMaster() // Allocate all containers allocateWorkers() // Wait for the user class to Finish userThread.join() System.exit(0) } /** Get the Yarn approved local directories. */ private def getLocalDirs(): String = { // Hadoop 0.23 and 2.x have different Environment variable names for the // local dirs, so lets check both. We assume one of the 2 is set. // LOCAL_DIRS => 2.X, YARN_LOCAL_DIRS => 0.23.X val localDirs = Option(System.getenv("YARN_LOCAL_DIRS")) .orElse(Option(System.getenv("LOCAL_DIRS"))) localDirs match { case None => throw new Exception("Yarn Local dirs can't be empty") case Some(l) => l } } private def getApplicationAttemptId(): ApplicationAttemptId = { val envs = System.getenv() val containerIdString = envs.get(ApplicationConstants.Environment.CONTAINER_ID.name()) val containerId = ConverterUtils.toContainerId(containerIdString) val appAttemptId = containerId.getApplicationAttemptId() logInfo("ApplicationAttemptId: " + appAttemptId) appAttemptId } private def registerApplicationMaster(): RegisterApplicationMasterResponse = { logInfo("Registering the ApplicationMaster") amClient.registerApplicationMaster(Utils.localHostName(), 0, uiAddress) } private def startUserClass(): Thread = { logInfo("Starting the user JAR in a separate Thread") val mainMethod = Class.forName( args.userClass, false /* initialize */ , Thread.currentThread.getContextClassLoader).getMethod("main", classOf[Array[String]]) val t = new Thread { override def run() { var successed = false try { // Copy var mainArgs: Array[String] = new Array[String](args.userArgs.size) args.userArgs.copyToArray(mainArgs, 0, args.userArgs.size) mainMethod.invoke(null, mainArgs) // some job script has "System.exit(0)" at the end, for example SparkPi, SparkLR // userThread will stop here unless it has uncaught exception thrown out // It need shutdown hook to set SUCCEEDED successed = true } finally { logDebug("finishing main") isLastAMRetry = true if (successed) { ApplicationMaster.this.finishApplicationMaster(FinalApplicationStatus.SUCCEEDED) } else { ApplicationMaster.this.finishApplicationMaster(FinalApplicationStatus.FAILED) } } } } t.start() t } // This need to happen before allocateWorkers() private def waitForSparkContextInitialized() { logInfo("Waiting for Spark context initialization") try { var sparkContext: SparkContext = null ApplicationMaster.sparkContextRef.synchronized { var numTries = 0 val waitTime = 10000L val maxNumTries = sparkConf.getInt("spark.yarn.applicationMaster.waitTries", 10) while (ApplicationMaster.sparkContextRef.get() == null && numTries < maxNumTries) { logInfo("Waiting for Spark context initialization ... " + numTries) numTries = numTries + 1 ApplicationMaster.sparkContextRef.wait(waitTime) } sparkContext = ApplicationMaster.sparkContextRef.get() assert(sparkContext != null || numTries >= maxNumTries) if (sparkContext != null) { uiAddress = sparkContext.ui.appUIAddress this.yarnAllocator = YarnAllocationHandler.newAllocator( yarnConf, amClient, appAttemptId, args, sparkContext.preferredNodeLocationData, sparkContext.getConf) } else { logWarning("Unable to retrieve SparkContext inspite of waiting for %d, maxNumTries = %d". format(numTries * waitTime, maxNumTries)) this.yarnAllocator = YarnAllocationHandler.newAllocator( yarnConf, amClient, appAttemptId, args, sparkContext.getConf) } } } finally { // In case of exceptions, etc - ensure that count is at least ALLOCATOR_LOOP_WAIT_COUNT : // so that the loop (in ApplicationMaster.sparkContextInitialized) breaks. ApplicationMaster.incrementAllocatorLoop(ApplicationMaster.ALLOCATOR_LOOP_WAIT_COUNT) } } private def allocateWorkers() { try { logInfo("Allocating " + args.numWorkers + " workers.") // Wait until all containers have finished // TODO: This is a bit ugly. Can we make it nicer? // TODO: Handle container failure yarnAllocator.addResourceRequests(args.numWorkers) // Exits the loop if the user thread exits. while (yarnAllocator.getNumWorkersRunning < args.numWorkers && userThread.isAlive) { if (yarnAllocator.getNumWorkersFailed >= maxNumWorkerFailures) { finishApplicationMaster(FinalApplicationStatus.FAILED, "max number of worker failures reached") } yarnAllocator.allocateResources() ApplicationMaster.incrementAllocatorLoop(1) Thread.sleep(100) } } finally { // In case of exceptions, etc - ensure that count is at least ALLOCATOR_LOOP_WAIT_COUNT, // so that the loop in ApplicationMaster#sparkContextInitialized() breaks. ApplicationMaster.incrementAllocatorLoop(ApplicationMaster.ALLOCATOR_LOOP_WAIT_COUNT) } logInfo("All workers have launched.") // Launch a progress reporter thread, else the app will get killed after expiration // (def: 10mins) timeout. if (userThread.isAlive) { // Ensure that progress is sent before YarnConfiguration.RM_AM_EXPIRY_INTERVAL_MS elapses. val timeoutInterval = yarnConf.getInt(YarnConfiguration.RM_AM_EXPIRY_INTERVAL_MS, 120000) // we want to be reasonably responsive without causing too many requests to RM. val schedulerInterval = sparkConf.getLong("spark.yarn.scheduler.heartbeat.interval-ms", 5000) // must be <= timeoutInterval / 2. val interval = math.min(timeoutInterval / 2, schedulerInterval) launchReporterThread(interval) } } private def launchReporterThread(_sleepTime: Long): Thread = { val sleepTime = if (_sleepTime <= 0) 0 else _sleepTime val t = new Thread { override def run() { while (userThread.isAlive) { if (yarnAllocator.getNumWorkersFailed >= maxNumWorkerFailures) { finishApplicationMaster(FinalApplicationStatus.FAILED, "max number of worker failures reached") } val missingWorkerCount = args.numWorkers - yarnAllocator.getNumWorkersRunning - yarnAllocator.getNumPendingAllocate if (missingWorkerCount > 0) { logInfo("Allocating %d containers to make up for (potentially) lost containers". format(missingWorkerCount)) yarnAllocator.addResourceRequests(missingWorkerCount) } sendProgress() Thread.sleep(sleepTime) } } } // Setting to daemon status, though this is usually not a good idea. t.setDaemon(true) t.start() logInfo("Started progress reporter thread - sleep time : " + sleepTime) t } private def sendProgress() { logDebug("Sending progress") // Simulated with an allocate request with no nodes requested. yarnAllocator.allocateResources() } /* def printContainers(containers: List[Container]) = { for (container <- containers) { logInfo("Launching shell command on a new container." + ", containerId=" + container.getId() + ", containerNode=" + container.getNodeId().getHost() + ":" + container.getNodeId().getPort() + ", containerNodeURI=" + container.getNodeHttpAddress() + ", containerState" + container.getState() + ", containerResourceMemory" + container.getResource().getMemory()) } } */ def finishApplicationMaster(status: FinalApplicationStatus, diagnostics: String = "") { synchronized { if (isFinished) { return } isFinished = true } logInfo("finishApplicationMaster with " + status) // Set tracking URL to empty since we don't have a history server. amClient.unregisterApplicationMaster(status, "" /* appMessage */ , "" /* appTrackingUrl */) } /** * Clean up the staging directory. */ private def cleanupStagingDir() { var stagingDirPath: Path = null try { val preserveFiles = sparkConf.get("spark.yarn.preserve.staging.files", "false").toBoolean if (!preserveFiles) { stagingDirPath = new Path(System.getenv("SPARK_YARN_STAGING_DIR")) if (stagingDirPath == null) { logError("Staging directory is null") return } logInfo("Deleting staging directory " + stagingDirPath) fs.delete(stagingDirPath, true) } } catch { case ioe: IOException => logError("Failed to cleanup staging dir " + stagingDirPath, ioe) } } // The shutdown hook that runs when a signal is received AND during normal close of the JVM. class AppMasterShutdownHook(appMaster: ApplicationMaster) extends Runnable { def run() { logInfo("AppMaster received a signal.") // we need to clean up staging dir before HDFS is shut down // make sure we don't delete it until this is the last AM if (appMaster.isLastAMRetry) appMaster.cleanupStagingDir() } } } object ApplicationMaster { // Number of times to wait for the allocator loop to complete. // Each loop iteration waits for 100ms, so maximum of 3 seconds. // This is to ensure that we have reasonable number of containers before we start // TODO: Currently, task to container is computed once (TaskSetManager) - which need not be // optimal as more containers are available. Might need to handle this better. private val ALLOCATOR_LOOP_WAIT_COUNT = 30 private val applicationMasters = new CopyOnWriteArrayList[ApplicationMaster]() val sparkContextRef: AtomicReference[SparkContext] = new AtomicReference[SparkContext](null /* initialValue */) val yarnAllocatorLoop: AtomicInteger = new AtomicInteger(0) def incrementAllocatorLoop(by: Int) { val count = yarnAllocatorLoop.getAndAdd(by) if (count >= ALLOCATOR_LOOP_WAIT_COUNT) { yarnAllocatorLoop.synchronized { // to wake threads off wait ... yarnAllocatorLoop.notifyAll() } } } def register(master: ApplicationMaster) { applicationMasters.add(master) } // TODO(harvey): See whether this should be discarded - it isn't used anywhere atm... def sparkContextInitialized(sc: SparkContext): Boolean = { var modified = false sparkContextRef.synchronized { modified = sparkContextRef.compareAndSet(null, sc) sparkContextRef.notifyAll() } // Add a shutdown hook - as a best case effort in case users do not call sc.stop or do // System.exit. // Should not really have to do this, but it helps YARN to evict resources earlier. // Not to mention, prevent the Client from declaring failure even though we exited properly. // Note that this will unfortunately not properly clean up the staging files because it gets // called too late, after the filesystem is already shutdown. if (modified) { Runtime.getRuntime().addShutdownHook(new Thread with Logging { // This is not only logs, but also ensures that log system is initialized for this instance // when we are actually 'run'-ing. logInfo("Adding shutdown hook for context " + sc) override def run() { logInfo("Invoking sc stop from shutdown hook") sc.stop() // Best case ... for (master <- applicationMasters) { master.finishApplicationMaster(FinalApplicationStatus.SUCCEEDED) } } }) } // Wait for initialization to complete and atleast 'some' nodes can get allocated. yarnAllocatorLoop.synchronized { while (yarnAllocatorLoop.get() <= ALLOCATOR_LOOP_WAIT_COUNT) { yarnAllocatorLoop.wait(1000L) } } modified } def main(argStrings: Array[String]) { val args = new ApplicationMasterArguments(argStrings) new ApplicationMaster(args).run() } }
dotunolafunmiloye/spark
yarn/stable/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala
Scala
apache-2.0
16,647
import java.util.{Calendar, GregorianCalendar} import org.apache.spark.rdd.RDD import org.apache.spark.{SparkConf, SparkContext} import scala.collection.mutable.ArrayBuffer import scala.util.Random import scala.collection.JavaConverters._ /** * Created by ylu on 9/26/16. */ object spark_join { var lineitem = "" var orders = "" val stringLineitem = "l_orderkey long, l_partkey int, l_suppkey int, l_linenumber int, l_quantity double, l_extendedprice double, l_discount double, l_tax double, l_returnflag string, l_linestatus string, l_shipdate date, l_commitdate date, l_receiptdate date, l_shipinstruct string, l_shipmode string" val stringOrders = "o_orderkey long, o_custkey long, o_orderstatus string, o_totalprice double, o_orderdate date, o_orderpriority string, o_clerk string, o_shippriority int" val schemaLineitem = Schema.createSchema(stringLineitem) val schemaOrders = Schema.createSchema(stringOrders) def joinTable(sc:SparkContext): Unit ={ val start = System.currentTimeMillis() val table1Rdd = sc.textFile(lineitem); val table2Rdd = sc.textFile(orders); val key1 = schemaLineitem.getAttributeId("l_orderkey") val key2 = schemaOrders.getAttributeId("o_orderkey") val table1RddWithKeys = table1Rdd.map( x => { val key = x.split(Global.SPLIT_DELIMITER)(key1) (key, x) }) val table2RddWithKeys = table2Rdd.map( x => { val key = x.split(Global.SPLIT_DELIMITER)(key2) (key, x) }) val resultRdd = table1RddWithKeys.join(table2RddWithKeys, 800).map( x=> x._2._1 + Global.DELIMITER + x._2._2 ) val result = resultRdd.count() val end = System.currentTimeMillis() println("RES: Time Taken: " + (end - start) + " Result: " + result) } def main(args: Array[String]) { val conf = new SparkConf() conf.setAppName("spark_partitioner") conf.setMaster("spark://istc1.csail.mit.edu:7077") val sc = new SparkContext(conf) lineitem = args(0) orders = args(1) joinTable(sc); } }
mitdbg/AdaptDB
spark-partitioner/src/main/scala/spark_join.scala
Scala
mit
2,027
package controllers import java.util.UUID import com.google.inject.AbstractModule import com.mohiva.play.silhouette.api.{ Environment, LoginInfo } import com.mohiva.play.silhouette.impl.authenticators.CookieAuthenticator import com.mohiva.play.silhouette.test._ import models.User import net.codingwell.scalaguice.ScalaModule import org.specs2.mock.Mockito import org.specs2.specification.Scope import play.api.inject.guice.GuiceApplicationBuilder import play.api.libs.concurrent.Execution.Implicits._ import play.api.test.{ FakeRequest, PlaySpecification, WithApplication } /** * Test case for the [[controllers.ApplicationController]] class. */ class ApplicationControllerSpec extends PlaySpecification with Mockito { sequential "The `index` action" should { "redirect to login page if user is unauthorized" in new Context { new WithApplication(application) { val Some(redirectResult) = route(FakeRequest(routes.ApplicationController.index()) .withAuthenticator[CookieAuthenticator](LoginInfo("invalid", "invalid")) ) status(redirectResult) must be equalTo SEE_OTHER val redirectURL = redirectLocation(redirectResult).getOrElse("") redirectURL must contain(routes.ApplicationController.signIn().toString()) val Some(unauthorizedResult) = route(FakeRequest(GET, redirectURL)) status(unauthorizedResult) must be equalTo OK contentType(unauthorizedResult) must beSome("text/html") contentAsString(unauthorizedResult) must contain("Silhouette - Sign In") } } "return 200 if user is authorized" in new Context { new WithApplication(application) { val Some(result) = route(FakeRequest(routes.ApplicationController.index()) .withAuthenticator[CookieAuthenticator](identity.loginInfo) ) status(result) must beEqualTo(OK) } } } /** * The context. */ trait Context extends Scope { /** * A fake Guice module. */ class FakeModule extends AbstractModule with ScalaModule { def configure() = { bind[Environment[User, CookieAuthenticator]].toInstance(env) } } /** * An identity. */ val identity = User( userID = UUID.randomUUID(), loginInfo = LoginInfo("facebook", "[email protected]"), firstName = None, lastName = None, fullName = None, email = None, avatarURL = None ) /** * A Silhouette fake environment. */ implicit val env: Environment[User, CookieAuthenticator] = new FakeEnvironment[User, CookieAuthenticator](Seq(identity.loginInfo -> identity)) /** * The application. */ lazy val application = new GuiceApplicationBuilder() .overrides(new FakeModule) .build() } }
renexdev/Silhouette-Slick-Seed-pg-jwt
test/controllers/ApplicationControllerSpec.scala
Scala
apache-2.0
2,806
package org.jetbrains.plugins.scala.lang.psi.api.base import org.jetbrains.plugins.scala.lang.psi.api.ScalaPsiElement import org.jetbrains.plugins.scala.lang.refactoring.util.ScalaNamesUtil /** Generic infix operation, covers infix types, patterns, and expressions. * * @author Clément Fournier */ trait ScInfixElement extends ScalaPsiElement { //expression, type element or pattern type Kind <: ScalaPsiElement type Reference <: ScReference def left: Kind def operation: Reference def rightOption: Option[Kind] def isRightAssoc: Boolean = ScalaNamesUtil.clean(operation.refName).endsWith(":") def isLeftAssoc: Boolean = !isRightAssoc } object ScInfixElement { def unapply(arg: ScInfixElement): Option[(arg.Kind, arg.Reference, Option[arg.Kind])] = Some((arg.left, arg.operation, arg.rightOption)) }
JetBrains/intellij-scala
scala/scala-impl/src/org/jetbrains/plugins/scala/lang/psi/api/base/ScInfixElement.scala
Scala
apache-2.0
839
package com.alanjz.microstrike import java.awt.Point import java.awt.geom.Point2D import com.alanjz.microstrike.team.Team class Player(val name : String, val team : Team) { private val _point = new Point2D.Double() private var _health = 0 private var _armor = 0 def point = new Point(x,y) def health = _health def armor = _armor def isAlive = health > 0 def x : Int = _point.x.toInt def y : Int = _point.y.toInt def moveSpeed : Double = 2.7 def setX(x : Double) = _point.setLocation(x,_point.y) def setY(y : Double) = _point.setLocation(_point.x,y) def setPoint(x : Double, y : Double) = _point.setLocation(x,y) def setHealth(health : Int) = _health = health def setArmor(armor : Int) = _armor = armor def moveLeft(): Unit = { setX(x - moveSpeed) } def moveForward(): Unit = { setY(y - moveSpeed) } def moveBackward(): Unit = { setY(y + moveSpeed) } def moveRight(): Unit = { setX(x + moveSpeed) } }
spacenut/microstrike
src/com/alanjz/microstrike/Player.scala
Scala
gpl-2.0
972
package edu.mit.cryptdb import java.io.{ File, PrintWriter } import scala.collection.mutable.{ ArrayBuffer, HashSet, HashMap, Seq => MutSeq } // these are the onions which a plan cost has to // be concerned about (b/c it does seq scans over these relations), // except it does NOT use these onions (so it is the non-constant // part of the cost function) case class PlanExtraCosts( regExtraOnions: Map[String, Map[String, Int]], precomputedExtraOnions: Map[String, Map[String, Int]]) package object math_defns { type ImmVector[A] = scala.collection.immutable.Vector[A] type ImmMatrix[A] = ImmVector[ImmVector[A]] type MutVector[A] = scala.collection.mutable.Seq[A] type MutMatrix[A] = MutVector[MutVector[A]] def toImmVector[A](v: MutVector[A]): ImmVector[A] = { scala.collection.immutable.Vector(v:_*) } def toImmMatrix[A](m: MutMatrix[A]): ImmMatrix[A] = { scala.collection.immutable.Vector(m.map(toImmVector(_)):_*) } def validate[A](m: ImmMatrix[A]): Boolean = { // validate matrix is well-formed m.headOption.map { h => val s = h.size m.filter(_.size != s).isEmpty }.getOrElse(false) } def is_square[A](m: ImmMatrix[A]): Boolean = { assert(validate(m)) if (m.isEmpty) true else m.size == m.head.size } def dimensions[A](m: ImmMatrix[A]): (Int, Int) = { assert(validate(m)) if (m.isEmpty) ((0, 0)) else ((m.size, m.head.size)) } def matrixAsMCode[A](m: ImmMatrix[A]): String = { m.map(v => vectorAsMCode(v, false)).mkString("[", ";", "]") } def vectorAsMCode[A](v: ImmVector[A], c: Boolean): String = { v.map(_.toString).mkString("[", (if (c) ";" else ","), "]") } } import math_defns._ // an instance of a binary-integer-linear-program // // this program is the following optimization problem: // // min cx // subject to: // A x <= b // Aeq x = beq // x is binary case class BILPInstance( c: ImmVector[Double], ieq_constraint: Option[ (ImmMatrix[Double], ImmVector[Double]) ], eq_constraint: Option[ (ImmMatrix[Double], ImmVector[Double]) ]) extends Timer { private val n = c.size private def validateConstraint[A](matrix: ImmMatrix[A], vector: ImmVector[A]) = { val (m, n0) = dimensions(matrix) assert(n == n0) assert(m == vector.size) } ieq_constraint.foreach { case (a, b) => validateConstraint(a, b) } eq_constraint.foreach { case (a, b) => validateConstraint(a, b) } def toCPLEXInput: String = { val buf = new StringBuilder buf.append("Minimize\\n") def vecToString(v: ImmVector[Double]): String = { v.zipWithIndex.flatMap { case (d, idx) => if (d != 0.0) { val s = if (d > 0.0) "+" else "-" Seq("%s %f x%d".format(s, math.abs(d), idx)) } else Seq.empty }.mkString(" ") } buf.append("R0: " + vecToString(c) + "\\n") buf.append("Subject To\\n") ieq_constraint.foreach { case (mat, vec) => val (n, _) = dimensions(mat) (0 until n).foreach { r => buf.append("R%d: ".format(r+1) + vecToString(mat(r)) + " <= " + vec(r) + "\\n") } } eq_constraint.foreach { case (mat, vec) => val (n, _) = dimensions(mat) val offset = ieq_constraint.map(x => dimensions(x._1)._1).getOrElse(0) (0 until n).foreach { r => buf.append("R%d: ".format(r+1+offset) + vecToString(mat(r)) + " = " + vec(r) + "\\n") } } buf.append("Bounds\\n") (0 until n).foreach { r => buf.append("0 <= x%d <= 1\\n".format(r)) } buf.append("Generals\\n") (0 until n).foreach { r => buf.append("x%d\\n".format(r)) } buf.append("end\\n") buf.toString } def toLPSolveInput: String = { val buf = new StringBuilder def vecToString(v: ImmVector[Double], omitZero: Boolean): String = { v.zipWithIndex.flatMap { case (d, idx) => if (!omitZero || d != 0.0) Seq("%f x%d".format(d, idx)) else Seq.empty }.mkString(" ") } buf.append("min: " + vecToString(c, true) + ";\\n") ieq_constraint.foreach { case (mat, vec) => val (n, _) = dimensions(mat) (0 until n).foreach { r => buf.append(vecToString(mat(r), true) + " <= " + vec(r) + ";\\n") } } eq_constraint.foreach { case (mat, vec) => val (n, _) = dimensions(mat) (0 until n).foreach { r => buf.append(vecToString(mat(r), true) + " = " + vec(r) + ";\\n") } } buf.append("bin " + (0 until n).map(x => "x%d".format(x)).mkString(", ") + ";\\n") buf.toString } def toMCode: String = { val buf = new StringBuilder def writeMatrix[A](name: String, m: ImmMatrix[A]) = { buf.append(name) buf.append(" = ") buf.append(matrixAsMCode(m)) buf.append(";\\n") } def writeVector[A](name: String, v: ImmVector[A], c: Boolean) = { buf.append(name) buf.append(" = ") buf.append(vectorAsMCode(v, c)) buf.append(";\\n") } writeVector("c", c, true) ieq_constraint match { case Some((a, b)) => writeMatrix("A", a) writeVector("b", b, true) case None => buf.append("A = [];\\n") buf.append("b = [];\\n") } eq_constraint match { case Some((aeq, beq)) => writeMatrix("Aeq", aeq) writeVector("beq", beq, true) case None => buf.append("Aeq = [];\\n") buf.append("beq = [];\\n") } buf.append("[x,fval,exitflag,output] = ") buf.append("bintprog(c, A, b, Aeq, beq);\\n".format(n)) buf.toString } private def solveUsingLPSolve(): Option[(Double, ImmVector[Boolean])] = { // using lp_solve in java is a PITA. // just shell out for now. // TODO: this is so hacky val tmpFile = File.createTempFile("cdbopt", "lp") val code = toLPSolveInput val writer = new PrintWriter(tmpFile) writer.print(code) writer.flush val lp_solve_prog = Option(System.getenv("LP_SOLVE")).filterNot(_.isEmpty).getOrElse("lp_solve") val output = ProcUtils.execCommandWithResults("%s %s".format(lp_solve_prog, tmpFile.getPath)) val objtext = output.filter(_.startsWith("Value of objective function:")) assert(objtext.size == 1) val obj = objtext.head.split(":")(1).toDouble val vtext = output.filter(_.startsWith("x")) assert(vtext.size == n) // lp_solve seems to give the variables back in sorted order, // so this is ok... Some( (obj, Vector( vtext.map(_.split("\\\\s+")(1) == "1") : _* )) ) } private def solveUsingGLPK(): Option[(Double, ImmVector[Boolean])] = { val tmpFile = File.createTempFile("cdbopt", "lp") val code = toCPLEXInput val writer = new PrintWriter(tmpFile) writer.print(code) writer.flush val glpsol_prog = Option(System.getenv("GLPSOL_PROGRAM")).filterNot(_.isEmpty).getOrElse("glpsol") val (execTimeMillis, output) = timedRunMillis( ProcUtils.execCommandWithResults( "%s --lp %s --output /dev/stdout %s".format(glpsol_prog, tmpFile.getPath, GLPKOpts))) println("Solved LP using GLPK in %f ms".format(execTimeMillis)) val objtext = output.filter(_.startsWith("Objective:")) assert(objtext.size == 1) val obj = objtext.head.split("\\\\s+")(3).toDouble val VarRegex = "x\\\\d+".r val values = output.flatMap { l => // hacky for now... val toks = l.trim.split("\\\\s+") if (toks.size >= 2) { toks(1) match { case VarRegex() => assert(toks(2) == "*") Seq(toks(3) == "1") case _ => Seq.empty } } else Seq.empty } assert(values.size == n) Some( (obj, Vector( values : _* )) ) } protected val GLPKOpts = "--cuts --pcost" protected val UseGLPK = true def solve(): Option[(Double, ImmVector[Boolean])] = { if (UseGLPK) { solveUsingGLPK() } else { solveUsingLPSolve() } } } // an instance of a binary-integer-quadratic-program // // this program is the following optimization problem: // // min 0.5x'Qx + cx // subject to: // A x <= b // Aeq x = beq // x is binary case class BIQPInstance( Q: ImmMatrix[Double], c: ImmVector[Double], ieq_constraint: Option[ (ImmMatrix[Double], ImmVector[Double]) ], eq_constraint: Option[ (ImmMatrix[Double], ImmVector[Double]) ]) { assert(is_square(Q)) assert(Q.size == c.size) private val n = Q.size private def validateConstraint[A](matrix: ImmMatrix[A], vector: ImmVector[A]) = { val (m, n0) = dimensions(matrix) assert(n == n0) assert(m == vector.size) } ieq_constraint.foreach { case (a, b) => validateConstraint(a, b) } eq_constraint.foreach { case (a, b) => validateConstraint(a, b) } def toMCode: String = { val buf = new StringBuilder def writeMatrix[A](name: String, m: ImmMatrix[A]) = { buf.append(name) buf.append(" = ") buf.append(matrixAsMCode(m)) buf.append(";\\n") } def writeVector[A](name: String, v: ImmVector[A], c: Boolean) = { buf.append(name) buf.append(" = ") buf.append(vectorAsMCode(v, c)) buf.append(";\\n") } writeMatrix("Q", Q) writeVector("c", c, true) ieq_constraint match { case Some((a, b)) => writeMatrix("A", a) writeVector("b", b, true) case None => buf.append("A = [];\\n") buf.append("b = [];\\n") } eq_constraint match { case Some((aeq, beq)) => writeMatrix("Aeq", aeq) writeVector("beq", beq, true) case None => buf.append("Aeq = [];\\n") buf.append("beq = [];\\n") } buf.append("Options = struct('method', 'breadth', 'maxQPiter', 100000);\\n") buf.append("[xmin, fmin, flag, Extendedflag] = ") buf.append("miqp(Q, c, A, b, Aeq, beq, [1:%d], [], [], [], Options);\\n".format(n)) buf.toString } def solve(): Option[(Double, ImmVector[Boolean])] = { throw new RuntimeException("UNIMPL") } // ignore the higher order terms of this program def reduceToBILP: BILPInstance = BILPInstance(c, ieq_constraint, eq_constraint) } /** estimates in bytes */ trait SpaceEstimator { def plaintextSpaceEstimate( defns: Definitions, stats: Statistics): Long = { defns.defns.foldLeft(0L) { case (sizeSoFar, (tbl, cols)) => val nRowsTable = stats.stats(tbl).row_count cols.foldLeft(sizeSoFar) { case (acc, TableColumn(_, tpe, _)) => val regColSize = tpe.size acc + regColSize * nRowsTable } } } def encryptedSpaceEstimate( defns: Definitions, stats: Statistics, plans: Seq[EstimateContext]): Long = { // assumes queries is not empty val globalOpts = plans.head.globalOpts val usedReg = new HashMap[String, HashMap[String, Int]] val usedPre = new HashMap[String, HashMap[String, Int]] val usedHom = new HashMap[String, HashSet[Int]] plans.foreach { ectx => mergeInto(usedReg, ectx.requiredOnions) mergeInto(usedPre, ectx.precomputed) ectx.homGroups.foreach { case (reln, gids) => usedHom.getOrElseUpdate(reln, HashSet.empty) ++= gids } } var sum = 0L defns.defns.foreach { case (tbl, cols) => val nRowsTable = stats.stats(tbl).row_count cols.foreach { case TableColumn(name, tpe, _) => usedReg.get(tbl).flatMap(_.get(name)) match { case Some(onions) => Onions.toSeq(onions).foreach { onion => sum += nRowsTable * encColSize(tpe.size, onion) } case None => // exists in DET sum += nRowsTable * tpe.size } } } usedPre.foreach { case (tbl, m) => val nRowsTable = stats.stats(tbl).row_count m.values.foreach { onions => Onions.toSeq(onions).foreach { o => sum += nRowsTable * encColSize(4, o) } } } usedHom.foreach { case (tbl, gids) => val nRowsTable = stats.stats(tbl).row_count gids.foreach { gid => sum += homAggSize(globalOpts, tbl, gid, nRowsTable) } } sum } protected def encColSize(regColSize: Int, o: Int): Long = { o match { case Onions.DET => regColSize case Onions.OPE => regColSize * 2 case Onions.SWP => regColSize * 3 // estimate only case _ => throw new RuntimeException("Unhandled onion: " + o); } } protected def homAggSize( globalOpts: GlobalOpts, reln: String, gid: Int, nRows: Long): Long = { val nExprs = globalOpts.homGroups(reln)(gid).size val nRowsPerHomAgg = math.ceil(12.0 / nExprs.toDouble).toLong val nAggs = math.ceil(nRows.toDouble / nRowsPerHomAgg.toDouble).toLong val nBitsPerAgg = 83 // TODO: this is hardcoded in our system in various places val bytesPerAgg = math.max( nExprs.toDouble * nRowsPerHomAgg.toDouble * nBitsPerAgg.toDouble * 2.0 / 8.0, 256.0).toLong //println( // "homGroup(%s,%d): nExprs = %d, nRowsPerHomAgg = %d, bytesPerAgg = %d, nAggs=%d, totalSize=%s".format( // reln, gid, nExprs, nRowsPerHomAgg, bytesPerAgg, nAggs, (nAggs * bytesPerAgg).toString)) nAggs * bytesPerAgg } protected def mergeInto(dest: HashMap[String, HashMap[String, Int]], src: Map[String, Map[String, Int]]) = { src.foreach { case (k, v) => val m = dest.getOrElseUpdate(k, new HashMap[String, Int]) v.foreach { case (k, v) => m.put(k, m.getOrElse(k, 0) | v) } } } } trait Formulator extends SpaceEstimator { val SpaceFactor = 10.0 // default val SpaceConstraintScaleFactor = 100000000.0 // scales bytes by this amount val ObjectiveFunctionScaleFactor = 1000000.0 // scales cost by this amount protected def outputDebugSolnInfo( defns: Definitions, stats: Statistics, simpleSoln: Seq[PlanNode], optSoln: Seq[(PlanNode, EstimateContext)]): Unit = { optSoln.map(_._1).zip(simpleSoln).zipWithIndex.foreach { case ((a, b), idx) if a != b => println("Query %d differs in choice between opt and simple solution".format(idx)) case _ => } val origSpace = plaintextSpaceEstimate(defns, stats) val encSpace = encryptedSpaceEstimate(defns, stats, optSoln.map(_._2)) println("origSpace=(%f MB), encSpace=(%f MB), overhead=(%f)".format( origSpace.toDouble / (1 << 20).toDouble, encSpace.toDouble / (1 << 20).toDouble, encSpace.toDouble / origSpace.toDouble)) } // greedly handle space constraints def optimizeGreedy( defns: Definitions, stats: Statistics, queries0: Seq[ Seq[(PlanNode, EstimateContext, Estimate)] ]): Seq[PlanNode] = { val globalOpts = queries0.head.head._2.globalOpts val queries = queries0.map(_.sortBy(_._3.cost)) val origSpace = plaintextSpaceEstimate(defns, stats) val budgetSpace = SpaceFactor * origSpace val simpleSoln = queries.map(_.head._1) // code reuse is good val progInstance = formulateIntegerProgram(defns, stats, queries) // the globalXAssignReg and globalXAssignHom maps contain the only knobs we can // flip on/off. set all the knobs on at first val knobSizes = new HashMap[Int, Long] progInstance.globalXAssignReg.foreach { case (reln, m) => val nRowsTable = stats.stats(reln).row_count m.foreach { case ((name, onion), pos) => // lookup as a table column. if not found, then assume // it's a precomputed value assert(Onions.isSingleRowEncOnion(onion)) defns.lookup(reln, name) match { case Some(TableColumn(_, tpe, _)) => knobSizes(pos) = nRowsTable * encColSize(tpe.size, onion) case None => knobSizes(pos) = nRowsTable * encColSize(4, onion) } } } progInstance.globalXAssignHom.foreach { case (reln, m) => val nRowsTable = stats.stats(reln).row_count m.foreach { case (gid, pos) => knobSizes(pos) = homAggSize(globalOpts, reln, gid, nRowsTable) } } val knobState = new HashSet[Int] progInstance.globalXAssignReg.foreach { case (_, m) => knobState ++= m.values } progInstance.globalXAssignHom.foreach { case (_, m) => knobState ++= m.values } val tester = queries // set the initial solution to be the cheapest var solnSoFar = tester.map(_.head) def avgScaledCost(ests: Seq[Estimate]): Double = { ests.map(_.cost).sum / (ests.size.toDouble * ObjectiveFunctionScaleFactor) } def geoMeanScaledCost(ests: Seq[Estimate]): Double = { val prod = ests.map(_.cost).reduceLeft(_*_) math.pow(prod, 1.0 / ests.size.toDouble) / ObjectiveFunctionScaleFactor } while (true) { val encSpace = encryptedSpaceEstimate(defns, stats, solnSoFar.map(_._2)) println("test solution: avgScaledCost=(%f), geoMeanScaledCost=(%f), encSpace=(%f MB), budgetSpace=(%f MB)".format( avgScaledCost(solnSoFar.map(_._3)), geoMeanScaledCost(solnSoFar.map(_._3)), encSpace.toDouble / (1 << 20).toDouble, budgetSpace.toDouble / (1 << 20).toDouble)) if (encSpace <= budgetSpace) { // done outputDebugSolnInfo(defns, stats, simpleSoln, solnSoFar.map(x => (x._1, x._2))) return solnSoFar.map(_._1) } // find the largest onion, turn it off, and then greedily pick the best solution // TODO: currently impl is dumb, does linear scan to find the cheapest of the // remaining knobs. if (knobState.isEmpty) { throw new RuntimeException("Infeasible") } val (_, knobSize) = knobState.toSeq.map(x => (x, knobSizes(x))) max Ordering[Long].on[(_,Long)](_._2) // find all knobs of the same size val candidateKnobs = knobState.filter(x => knobSizes(x) == knobSize) // for each candidate knob, flip it off, find best plan, and cost def doesPlanQualify(knobState: Set[Int], ectx: EstimateContext): Boolean = { (ectx.requiredOnions.toSeq ++ ectx.precomputed.toSeq).foreach { case (reln, cols) => val gTable = progInstance.globalXAssignReg.getOrElse(reln, Map.empty) cols.foreach { case (name, onions) => Onions.toSeq(onions).foreach { o => gTable.get((name, o)).foreach { id => if (!knobState.contains(id)) return false } } } } ectx.homGroups.foreach { case (reln, gids) => val gTable = progInstance.globalXAssignHom(reln) gids.foreach { gid => if (!knobState.contains(gTable(gid))) return false } } true } // now compute a new solution, based on this new knobset // assumes plans is sorted in order of cost def bestPlanForQuery( knobState: Set[Int], plans: Seq[(PlanNode, EstimateContext, Estimate)]): Option[(PlanNode, EstimateContext, Estimate)] = { plans.foldLeft( None : Option[(PlanNode, EstimateContext, Estimate)] ) { case (acc, (p, ectx, est)) => acc.orElse(if (doesPlanQualify(knobState, ectx)) Some((p, ectx, est)) else None) } } val tests: Seq[(Int, Seq[(PlanNode, EstimateContext, Estimate)])] = candidateKnobs.toSeq.flatMap { x => val testKnobState = knobState.toSet -- Set(x) CollectionUtils.optSeq( tester.map(x => bestPlanForQuery(testKnobState, x))).map(s => (x, s)) } if (tests.isEmpty) { throw new RuntimeException("Infeasible") } // find the min cost tie, and remove the associated knob val (knobId, newSoln) = tests min Ordering[Double].on[(_, Seq[(_, _, Estimate)])] { case (_, plans) => geoMeanScaledCost(plans.map(_._3)) } println("turning off largest knob: x%d (%f MB) (broke %d ties)".format( knobId, knobSize.toDouble / (1 << 20).toDouble, tests.size)) knobState -= knobId solnSoFar = newSoln } // not reached throw new RuntimeException("not reached") } def optimize( defns: Definitions, stats: Statistics, queries0: Seq[ Seq[(PlanNode, EstimateContext, Estimate)] ]): Seq[PlanNode] = { val queries = queries0.map(_.sortBy(_._3.cost)) //if (queries.filter(_.size > 1).isEmpty) { // // simple case // return queries.map(_.head._1) //} // use linear program for now val progInstance = formulateIntegerProgram(defns, stats, queries) val bilp = progInstance.prog.reduceToBILP val (obj, soln) = bilp.solve().getOrElse(throw new RuntimeException("No solution brah!")) val optSoln = new ArrayBuffer[(PlanNode, EstimateContext)] queries.foldLeft(0) { case (base, plans) => val interest = soln.slice(base, base + plans.size) val cands = interest.zipWithIndex.filter(_._1) assert(cands.size == 1) // assert exactly one solution val solnIdx = cands.head._2 val (p, ectx, _) = plans(solnIdx) optSoln += ((p, ectx)) // sanity check that the required x variables have been set (ectx.requiredOnions.toSeq ++ ectx.precomputed.toSeq).foreach { case (reln, cols) => val m = progInstance.globalXAssignReg.get(reln).getOrElse(Map.empty) cols.foreach { case (name, onions) => Onions.toSeq(onions).foreach { o => m.get((name, o)).foreach { pos => if (!soln(pos)) { println( "ERROR: solution does not contain required x-variable id=(%d), opt=(%s, %s, %s)".format( pos, reln, name, Onions.str(o))) } } } } } ectx.homGroups.foreach { case (reln, gids) => val m = progInstance.globalXAssignHom.get(reln).getOrElse(Map.empty) gids.foreach { gid => m.get(gid).foreach { pos => if (!soln(pos)) { println( "ERROR: solution does not contain required x-variable id=(%d), homGroup=(%s, %s)".format( pos, reln, gid)) } } } } (base + plans.size) } val simpleSoln = queries.map(_.head._1) // b/c queries is already sorted by simpleSoln assert(optSoln.size == simpleSoln.size) // sanity // useful debugging reporting println("LP objective function value: " + obj) // check storage constraint bilp.ieq_constraint.foreach { case (mat, vec) => val Ax = mat(0).zip(soln).foldLeft(0.0) { case (acc, (elem, b)) => if (b) (acc + elem) else acc } println("Ax_storage=%f, bx_storage=%f".format(Ax, vec(0))) } // flags enabled (excluding queries) val offset = queries.foldLeft(0) { case (acc, x) => acc + x.size } val xEnabled = (0 until soln.size).filter(soln(_)).map(x => "x%d".format(x)).mkString("[", ", ", "]") println("xEnabled: " + xEnabled) outputDebugSolnInfo(defns, stats, simpleSoln, optSoln) optSoln.map(_._1).toSeq } private def toImm(hm: HashMap[String, HashMap[String, Int]]): Map[String, Map[String, Int]] = { hm.map { case (k, v) => (k, v.toMap) }.toMap } private case class GlobalOnions( regular: Map[String, Map[String, Int]], precomputed: Map[String, Map[String, Int]]) private def extraCostsFromEstimate( global: GlobalOnions, ctx: EstimateContext, est: Estimate): PlanExtraCosts = { // we make a simplification and say the extra costs are only from // doing sequential scans val reg = new HashMap[String, HashMap[String, Int]] val pre = new HashMap[String, HashMap[String, Int]] def mergeDiff( dest: HashMap[String, HashMap[String, Int]], src: Map[String, Map[String, Int]], global: Map[String, Map[String, Int]]) = { src.foreach { case (k, v) => val gm = global(k) v.foreach { case (colname, onion) => assert((gm(colname) & onion) == onion) // assert the global contains this instance if (gm(colname) != onion) { // need to place the difference in the dest map val dm = dest.getOrElseUpdate(k, new HashMap[String, Int]) assert(!dm.contains(colname)) dm.put(colname, (gm(colname) & (~onion))) } } } } val seqScanTbls = est.seqScanInfo.map(_._1).toSet mergeDiff( reg, ctx.requiredOnions.filter(t => seqScanTbls.contains(t._1)), global.regular) mergeDiff( pre, ctx.precomputed.filter(t => seqScanTbls.contains(t._1)), global.precomputed) PlanExtraCosts( reg.map { case (k, v) => (k, v.toMap) }.toMap, pre.map { case (k, v) => (k, v.toMap) }.toMap) } case class ProgramInstance( prog: BIQPInstance, globalXAssignReg: Map[String, Map[(String, Int), Int]], globalXAssignHom: Map[String, Map[Int, Int]]) // give as input: // seq ( seq( (rewritten plan, est ctx, cost estimate for plan) ) ) def formulateIntegerProgram( defns: Definitions, stats: Statistics, queries: Seq[ Seq[(PlanNode, EstimateContext, Estimate)] ]): ProgramInstance = { // assumes queries is not empty val globalOpts = queries.head.head._2.globalOpts val reg = new HashMap[String, HashMap[String, Int]] val pre = new HashMap[String, HashMap[String, Int]] queries.zipWithIndex.foreach { case (qplans, qidx) => qplans.zipWithIndex.foreach { case ((_, ctx, _), pidx) => //println("q%d_p%d requiredOnions:".format(qidx, pidx) + ctx.requiredOnions) mergeInto(reg, ctx.requiredOnions) mergeInto(pre, ctx.precomputed) } } val globalOnions = GlobalOnions(toImm(reg), toImm(pre)) val extraCosts = queries.map(_.map { case (_, ec, e) => extraCostsFromEstimate(globalOnions, ec, e) }) var _ctr = 0 def nextPos(): Int = { val x = _ctr _ctr += 1 x } def numVariables: Int = _ctr // have the query plan selection flags come first // THIS MUST COME FIRST, b/c the callers assume this (and the order of // queries MUST be preserved) queries.foreach(_.foreach(_ => nextPos())) // maps (reln, col, onion) => position in the X vector val globalXAssignReg = new HashMap[String, HashMap[(String, Int), Int]] // maps (reln, group) => position in the X vector val globalXAssignHom = new HashMap[String, HashMap[Int, Int]] // regular columns defns.defns.foreach { case (tbl, cols) => cols.foreach { case TableColumn(name, _, _) => reg.get(tbl).flatMap(_.get(name)).foreach { o => val os = Onions.toSeq(o) // if there's only one choice, we're gonna pick it regardless, // so it's pointless to add it as a variable if (os.size > 1) { os.foreach { oo => assert(Onions.isSingleRowEncOnion(oo)) globalXAssignReg.getOrElseUpdate(tbl, HashMap.empty).put((name, oo), nextPos()) } } } } } // pre-computed columns pre.foreach { case (reln, cols) => println("relation " + reln + " precomputed expressions:") cols.foreach { case (name, os) => println(" %s: %s".format(name, globalOpts.precomputed(reln)(name))) Onions.toSeq(os).foreach { o => assert(Onions.isSingleRowEncOnion(o)) globalXAssignReg.getOrElseUpdate(reln, HashMap.empty).put((name, o), nextPos()) } } } // hom agg groups globalOpts.homGroups.foreach { case (reln, groups) => // sanity check assert( groups.toSet.size == groups.size ) println("relation " + reln + " hom groups:") (0 until groups.size).foreach { g => println(" %d: %s".format(g, groups(g).map(_.sql).mkString("{", ", ", "}"))) globalXAssignHom.getOrElseUpdate(reln, HashMap.empty).put(g, nextPos()) } } // build cost function in normal form: // 0.5 x' Q x + c x // note that Q_{ij} = 2H_{ij}, where H_{ij} is the coefficent you want // in front of x_{i}x_{j} in the final equation // // specifically in this formulation, we'll let x_{i} be the decision variable // governing whether or not to pick the query plan, and let x_{j} be the // decision variable governing whether or not a column optimization is enabled. // H_{ij} = (number of times table(column j) is scanned in plan i) * (cost to scan column j) // // for query plan i, we pick only the x_{j} such that the j-th onion is not // actually required for the plan to function, but exists in a table which is // scanned in the plan // // the fixed cost for a plan (which includes only the required onion // (assumes the plan is running independently)) is captured in the c vector // c_{i} is the fixed cost for the i-th query plan def mkNewMatrix(m: Int, n: Int): MutMatrix[Double] = MutSeq.fill(m, n)(0.0) def mkNewSqMatrix(n: Int): MutMatrix[Double] = mkNewMatrix(n, n) def mkNewVector(n: Int): MutVector[Double] = MutSeq.fill(n)(0.0) val Q_arg = mkNewSqMatrix(numVariables) val c_arg = mkNewVector(numVariables) // 0: storage constraint // 1...: number of total plans val nTotalPlans = queries.foldLeft(0) { case (acc, plans) => acc + plans.size } val A_arg = mkNewMatrix(1 + nTotalPlans, numVariables) val b_arg = mkNewVector(1 + nTotalPlans) val Aeq_arg = mkNewMatrix(queries.size, numVariables) val beq_arg = mkNewVector(queries.size) queries.zipWithIndex.foldLeft(0) { case ( acc, ( plans, qidx ) ) => plans.zipWithIndex.foreach { case ( ( nodes, ectx, est ), idx ) => val pidx = acc + idx c_arg(pidx) = est.cost // fixed cost // for each table we do sequential scan on est.seqScanInfo.foreach { case (tbl, nscans) => // s := total_onions(tbl) - used_onions(tbl) // add each element in s to Q (scaled by nscans) val gTable: HashMap[(String, Int), Int] = globalXAssignReg.getOrElse(tbl, HashMap.empty) val sTable = stats.stats(tbl) // total_onions comes from reg+pre def proc(m: HashMap[String, HashMap[String, Int]], b: Map[String, Map[String, Int]]) = { m.get(tbl).foreach { m => b.get(tbl).foreach { b => def procOnions(k: String, os: Seq[Int]) = { os.foreach { o => gTable.get((k, o)).foreach { globalId => // TODO: scale accordingly val recordSize = 4.0 val cost = CostConstants.secToPGUnit( nscans * (recordSize * sTable.row_count.toDouble / CostConstants.DiskReadBytesPerSec)) Q_arg(pidx)(globalId) = 2.0 * cost } } } m.foreach { case (k, v) if !b.contains(k) => procOnions(k, Onions.toSeq(v)) case (k, v) => procOnions(k, (Onions.toSeq(v).toSet -- Onions.toSeq(b(k)).toSet).toSeq) } } } } proc(reg, ectx.requiredOnions) proc(pre, ectx.precomputed) } // constraints! // each query plan has all the necessary onions + precomp onions it needs // (we add hom group constraints separately, below) // // we formulate as follows: suppose plan i requires onions q = [x1, x2, ...] // each plan i contributes the following inequality constraint: // \\sum_{q} x_{q} - |q|x_{i} \\geq 0 // this constraint says that if we pick x_{i}, then we must have all [x1, x2, ...] \\in q // to be enabled // TODO: we really want an index which maps a plan to all the x's used (ectx.requiredOnions.toSeq ++ ectx.precomputed.toSeq).foreach { case (reln, m) => val gTable: HashMap[(String, Int), Int] = globalXAssignReg.getOrElse(reln, HashMap.empty) m.foreach { case (col, o) => Onions.toSeq(o).foreach { o0 => gTable.get((col, o0)).foreach { gid => A_arg(pidx + 1)(gid) -= 1.0 // onion A_arg(pidx + 1)(pidx) += 1.0 // query } } } } ectx.homGroups.foreach { case (reln, gids) => val gHomTable: HashMap[Int, Int] = globalXAssignHom.getOrElse(reln, HashMap.empty) gids.foreach { g => val globalId = gHomTable(g) A_arg(pidx + 1)(globalId) -= 1.0 A_arg(pidx + 1)(pidx) += 1.0 } } } // each query picks exactly 1 plan // (equality constraints) // // this formulation is simple: // suppose query 1 has plans k = [p1, p2, ...] // each query contributes: // \\sum_{k} x_{k} = 1 // this says for each query we must pick exactly one execution option. really, the equality // is not necessary (is sufficient to say \\geq 1), but b/c we are computing min cost, // the min cost option must always be to pick exactly one execution plan (0 until plans.size).foreach { idx => val pidx = acc + idx Aeq_arg(qidx)(pidx) = 1.0 } beq_arg(qidx) = 1.0 (acc + plans.size) } // storage constraint: // the storage constraint is a linear function of x: // the constraint can be expressed as Ax <= b, where b is the constant // which says don't exceed this space var origSpace = 0.0 // regular defns.defns.foreach { case (tbl, cols) => val nRowsTable = stats.stats(tbl).row_count val gTable: HashMap[(String, Int), Int] = globalXAssignReg.getOrElse(tbl, HashMap.empty) cols.foreach { case TableColumn(name, tpe, _) => val regColSize = tpe.size b_arg(0) += SpaceFactor * nRowsTable * regColSize origSpace += nRowsTable * regColSize reg.get(tbl).flatMap(_.get(name)) match { case Some(o) => val os = Onions.toSeq(o) if (os.size > 1) { os.foreach { o0 => A_arg(0)(gTable((name, o0))) = nRowsTable * encColSize(regColSize, o0) } } else { // if there's only one choice, then we don't create an x variable // for the choice. however, if the onion is different size, we need // to factor that into the space calculation b_arg(0) -= nRowsTable * encColSize(regColSize, os.head) } case None => // no queries ever touch this column, so it will exist in DET form b_arg(0) -= nRowsTable * regColSize } } } // precomputed pre.foreach { case (tbl, cols) => val nRowsTable = stats.stats(tbl).row_count val gTable: HashMap[(String, Int), Int] = globalXAssignReg.getOrElse(tbl, HashMap.empty) cols.foreach { case (name, onions) => // lookup precomp type Onions.toSeq(onions).foreach { o => // TODO: actually propagate type information for // precomputed values- right now we just assume they are all // 4 bytes A_arg(0)(gTable((name, o))) = nRowsTable * encColSize(4, o) } } } // hom agg globalXAssignHom.foreach { case (tbl, grps) => val nRowsTable = stats.stats(tbl).row_count println("table: " + tbl) grps.toSeq.sortBy(_._1).foreach { case (gid, idx) => A_arg(0)(idx) = homAggSize(globalOpts, tbl, gid, nRowsTable).toDouble } } // assert we got every onion (nTotalPlans until A_arg(0).size).foreach { x => if (x == 0.0) { println("ERROR: x%d = 0".format(x)) } assert( x != 0.0 ) } println("x vector assignments") queries.zipWithIndex.foldLeft(0) { case (acc, (plans, idx)) => // inclusive println("q%d: x%d-x%d".format(idx, acc, acc + plans.size - 1)) acc + plans.size } globalXAssignReg.flatMap { case (reln, m) => m.map { case ((col, onion), idx) => (idx, reln, col, onion) } }.toSeq.sortBy(_._1).map { case (idx, reln, col, onion) => println("x%d: %s:%s:%d".format(idx, reln, col, onion)) } globalXAssignHom.flatMap { case (reln, m) => m.map { case (gid, idx) => (idx, reln, gid) } }.toSeq.sortBy(_._1).map { case (idx, reln, gid) => println("x%d: %s:%d".format(idx, reln, gid)) } // Do not allow any objective value to exceed 5 orders of magnitude of the // smallest value, by capping values larger val smallest = c_arg.filterNot(_ == 0.0).min val maxLimit = smallest * 100000.0 var nCapped = 0 val c_arg_capped = c_arg.map { x => if (x > maxLimit) { nCapped +=1; maxLimit } else x } if (nCapped > 0) { println("WARNING: had to cap %d values to %f".format(nCapped, maxLimit)) } // apply scaling println("Orig Space=(%f MB)".format( origSpace / (1 << 20).toDouble )) val c_arg_scaled = c_arg_capped.map(_ / ObjectiveFunctionScaleFactor) A_arg(0) = A_arg(0).map(_ / SpaceConstraintScaleFactor) b_arg(0) = b_arg(0) / SpaceConstraintScaleFactor ProgramInstance( BIQPInstance(toImmMatrix(Q_arg), toImmVector(c_arg_scaled), Some((toImmMatrix(A_arg), toImmVector(b_arg))), Some((toImmMatrix(Aeq_arg), toImmVector(beq_arg)))), globalXAssignReg.map { case (k, v) => (k, v.toSeq.toMap) }.toMap, globalXAssignHom.map { case (k, v) => (k, v.toSeq.toMap) }.toMap) } }
tristartom/monomi-optimizer
src/main/scala/formulator.scala
Scala
mit
37,781
package io.cumulus.utils import scala.util.Try import akka.util.ByteString import com.google.common.io.BaseEncoding object Base16 { /** * Encode in base 16 the provided bytes. * * @param bytes The bytes to encode. * @return The encoded bytes. */ def encode(bytes: ByteString): String = BaseEncoding.base16.encode(bytes.toArray) /** * Decode the provided base 16 encoded string. If the string can't be decoded, `None` will be returned. * * @param encoded The encoded string. * @return The decoded string as bytes, or nothing. */ def decode(encoded: String): Option[ByteString] = Try { Some(ByteString(BaseEncoding.base16.decode(encoded.toCharArray))) } getOrElse None } object Base64 { /** * Encode in base 64 the provided bytes. * * @param bytes The bytes to encode. * @return The encoded bytes. */ def encode(bytes: ByteString): String = encode(bytes.toArray) /** * Encode in base 64 the provided byte array. * * @param bytes The bytes to encode. * @return The encoded bytes. */ def encode(bytes: Array[Byte]): String = ByteString(java.util.Base64.getEncoder.encode(bytes)).utf8String /** * Decode the provided base 64 encoded string. If the string can't be decoded, `None` will be returned. * * @param encoded The encoded string. * @return The decoded string as bytes, or nothing. */ def decode(encoded: String): Option[ByteString] = Try { Some(ByteString(java.util.Base64.getDecoder.decode(encoded.getBytes("UTF-8")))) } getOrElse None }
Cumulus-Cloud/cumulus
server/cumulus-core/src/main/scala/io/cumulus/utils/Base64.scala
Scala
mit
1,613
/** * Copyright 2014 André Rouél * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.github.before.uadetector import com.github.before.uadetector.datasource.Device case class UserAgent( deviceCategory: Option[DeviceClass], iconName: String, name: String, operatingSystem: Option[OperatingSystem], producer: String, producerUrl: String, uaType: Option[UserAgentType], url: String, version: Option[Version])
before/uadetector-scala
src/main/scala/com/github/before/uadetector/UserAgent.scala
Scala
apache-2.0
967
package apps /** The `state` package contains example applications for state oriented * simulation models. */ package object state { }
NBKlepp/fda
scalation_1.3/scalation_models/src/main/scala/apps/state/package.scala
Scala
mit
142
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package kafka.server import java.util import java.util.{Collections} import java.util.concurrent.locks.ReentrantReadWriteLock import scala.collection.{mutable, Seq, Set} import scala.jdk.CollectionConverters._ import kafka.cluster.{Broker, EndPoint} import kafka.api._ import kafka.controller.StateChangeLogger import kafka.utils.CoreUtils._ import kafka.utils.Logging import org.apache.kafka.common.internals.Topic import org.apache.kafka.common.message.UpdateMetadataRequestData.UpdateMetadataPartitionState import org.apache.kafka.common.{Cluster, Node, PartitionInfo, TopicPartition} import org.apache.kafka.common.message.MetadataResponseData.MetadataResponseTopic import org.apache.kafka.common.message.MetadataResponseData.MetadataResponsePartition import org.apache.kafka.common.network.ListenerName import org.apache.kafka.common.protocol.Errors import org.apache.kafka.common.requests.{MetadataResponse, UpdateMetadataRequest} import org.apache.kafka.common.security.auth.SecurityProtocol /** * A cache for the state (e.g., current leader) of each partition. This cache is updated through * UpdateMetadataRequest from the controller. Every broker maintains the same cache, asynchronously. */ class MetadataCache(brokerId: Int) extends Logging { private val partitionMetadataLock = new ReentrantReadWriteLock() //this is the cache state. every MetadataSnapshot instance is immutable, and updates (performed under a lock) //replace the value with a completely new one. this means reads (which are not under any lock) need to grab //the value of this var (into a val) ONCE and retain that read copy for the duration of their operation. //multiple reads of this value risk getting different snapshots. @volatile private var metadataSnapshot: MetadataSnapshot = MetadataSnapshot(partitionStates = mutable.AnyRefMap.empty, controllerId = None, aliveBrokers = mutable.LongMap.empty, aliveNodes = mutable.LongMap.empty) this.logIdent = s"[MetadataCache brokerId=$brokerId] " private val stateChangeLogger = new StateChangeLogger(brokerId, inControllerContext = false, None) // This method is the main hotspot when it comes to the performance of metadata requests, // we should be careful about adding additional logic here. Relatedly, `brokers` is // `List[Integer]` instead of `List[Int]` to avoid a collection copy. // filterUnavailableEndpoints exists to support v0 MetadataResponses private def maybeFilterAliveReplicas(snapshot: MetadataSnapshot, brokers: java.util.List[Integer], listenerName: ListenerName, filterUnavailableEndpoints: Boolean): java.util.List[Integer] = { if (!filterUnavailableEndpoints) { brokers } else { val res = new util.ArrayList[Integer](math.min(snapshot.aliveBrokers.size, brokers.size)) for (brokerId <- brokers.asScala) { if (hasAliveEndpoint(snapshot, brokerId, listenerName)) res.add(brokerId) } res } } // errorUnavailableEndpoints exists to support v0 MetadataResponses // If errorUnavailableListeners=true, return LISTENER_NOT_FOUND if listener is missing on the broker. // Otherwise, return LEADER_NOT_AVAILABLE for broker unavailable and missing listener (Metadata response v5 and below). private def getPartitionMetadata(snapshot: MetadataSnapshot, topic: String, listenerName: ListenerName, errorUnavailableEndpoints: Boolean, errorUnavailableListeners: Boolean): Option[Iterable[MetadataResponsePartition]] = { snapshot.partitionStates.get(topic).map { partitions => partitions.map { case (partitionId, partitionState) => val topicPartition = new TopicPartition(topic, partitionId.toInt) val leaderBrokerId = partitionState.leader val leaderEpoch = partitionState.leaderEpoch val maybeLeader = getAliveEndpoint(snapshot, leaderBrokerId, listenerName) val replicas = partitionState.replicas val filteredReplicas = maybeFilterAliveReplicas(snapshot, replicas, listenerName, errorUnavailableEndpoints) val isr = partitionState.isr val filteredIsr = maybeFilterAliveReplicas(snapshot, isr, listenerName, errorUnavailableEndpoints) val offlineReplicas = partitionState.offlineReplicas maybeLeader match { case None => val error = if (!snapshot.aliveBrokers.contains(brokerId)) { // we are already holding the read lock debug(s"Error while fetching metadata for $topicPartition: leader not available") Errors.LEADER_NOT_AVAILABLE } else { debug(s"Error while fetching metadata for $topicPartition: listener $listenerName " + s"not found on leader $leaderBrokerId") if (errorUnavailableListeners) Errors.LISTENER_NOT_FOUND else Errors.LEADER_NOT_AVAILABLE } new MetadataResponsePartition() .setErrorCode(error.code) .setPartitionIndex(partitionId.toInt) .setLeaderId(MetadataResponse.NO_LEADER_ID) .setLeaderEpoch(leaderEpoch) .setReplicaNodes(filteredReplicas) .setIsrNodes(filteredIsr) .setOfflineReplicas(offlineReplicas) case Some(leader) => val error = if (filteredReplicas.size < replicas.size) { debug(s"Error while fetching metadata for $topicPartition: replica information not available for " + s"following brokers ${replicas.asScala.filterNot(filteredReplicas.contains).mkString(",")}") Errors.REPLICA_NOT_AVAILABLE } else if (filteredIsr.size < isr.size) { debug(s"Error while fetching metadata for $topicPartition: in sync replica information not available for " + s"following brokers ${isr.asScala.filterNot(filteredIsr.contains).mkString(",")}") Errors.REPLICA_NOT_AVAILABLE } else { Errors.NONE } new MetadataResponsePartition() .setErrorCode(error.code) .setPartitionIndex(partitionId.toInt) .setLeaderId(maybeLeader.map(_.id()).getOrElse(MetadataResponse.NO_LEADER_ID)) .setLeaderEpoch(leaderEpoch) .setReplicaNodes(filteredReplicas) .setIsrNodes(filteredIsr) .setOfflineReplicas(offlineReplicas) } } } } /** * Check whether a broker is alive and has a registered listener matching the provided name. * This method was added to avoid unnecessary allocations in [[maybeFilterAliveReplicas]], which is * a hotspot in metadata handling. */ private def hasAliveEndpoint(snapshot: MetadataSnapshot, brokerId: Int, listenerName: ListenerName): Boolean = { snapshot.aliveNodes.get(brokerId).exists(_.contains(listenerName)) } /** * Get the endpoint matching the provided listener if the broker is alive. Note that listeners can * be added dynamically, so a broker with a missing listener could be a transient error. * * @return None if broker is not alive or if the broker does not have a listener named `listenerName`. */ private def getAliveEndpoint(snapshot: MetadataSnapshot, brokerId: Int, listenerName: ListenerName): Option[Node] = { snapshot.aliveNodes.get(brokerId).flatMap(_.get(listenerName)) } // errorUnavailableEndpoints exists to support v0 MetadataResponses def getTopicMetadata(topics: Set[String], listenerName: ListenerName, errorUnavailableEndpoints: Boolean = false, errorUnavailableListeners: Boolean = false): Seq[MetadataResponseTopic] = { val snapshot = metadataSnapshot topics.toSeq.flatMap { topic => getPartitionMetadata(snapshot, topic, listenerName, errorUnavailableEndpoints, errorUnavailableListeners).map { partitionMetadata => new MetadataResponseTopic() .setErrorCode(Errors.NONE.code) .setName(topic) .setIsInternal(Topic.isInternal(topic)) .setPartitions(partitionMetadata.toBuffer.asJava) } } } def getAllTopics(): Set[String] = { getAllTopics(metadataSnapshot) } def getAllPartitions(): Set[TopicPartition] = { metadataSnapshot.partitionStates.flatMap { case (topicName, partitionsAndStates) => partitionsAndStates.keys.map(partitionId => new TopicPartition(topicName, partitionId.toInt)) }.toSet } private def getAllTopics(snapshot: MetadataSnapshot): Set[String] = { snapshot.partitionStates.keySet } private def getAllPartitions(snapshot: MetadataSnapshot): Map[TopicPartition, UpdateMetadataPartitionState] = { snapshot.partitionStates.flatMap { case (topic, partitionStates) => partitionStates.map { case (partition, state ) => (new TopicPartition(topic, partition.toInt), state) } }.toMap } def getNonExistingTopics(topics: Set[String]): Set[String] = { topics.diff(metadataSnapshot.partitionStates.keySet) } def getAliveBroker(brokerId: Int): Option[Broker] = { metadataSnapshot.aliveBrokers.get(brokerId) } def getAliveBrokers: Seq[Broker] = { metadataSnapshot.aliveBrokers.values.toBuffer } private def addOrUpdatePartitionInfo(partitionStates: mutable.AnyRefMap[String, mutable.LongMap[UpdateMetadataPartitionState]], topic: String, partitionId: Int, stateInfo: UpdateMetadataPartitionState): Unit = { val infos = partitionStates.getOrElseUpdate(topic, mutable.LongMap()) infos(partitionId) = stateInfo } def getPartitionInfo(topic: String, partitionId: Int): Option[UpdateMetadataPartitionState] = { metadataSnapshot.partitionStates.get(topic).flatMap(_.get(partitionId)) } // if the leader is not known, return None; // if the leader is known and corresponding node is available, return Some(node) // if the leader is known but corresponding node with the listener name is not available, return Some(NO_NODE) def getPartitionLeaderEndpoint(topic: String, partitionId: Int, listenerName: ListenerName): Option[Node] = { val snapshot = metadataSnapshot snapshot.partitionStates.get(topic).flatMap(_.get(partitionId)) map { partitionInfo => val leaderId = partitionInfo.leader snapshot.aliveNodes.get(leaderId) match { case Some(nodeMap) => nodeMap.getOrElse(listenerName, Node.noNode) case None => Node.noNode } } } def getPartitionReplicaEndpoints(tp: TopicPartition, listenerName: ListenerName): Map[Int, Node] = { val snapshot = metadataSnapshot snapshot.partitionStates.get(tp.topic).flatMap(_.get(tp.partition)).map { partitionInfo => val replicaIds = partitionInfo.replicas replicaIds.asScala .map(replicaId => replicaId.intValue() -> { snapshot.aliveBrokers.get(replicaId.longValue()) match { case Some(broker) => broker.getNode(listenerName).getOrElse(Node.noNode()) case None => Node.noNode() }}).toMap .filter(pair => pair match { case (_, node) => !node.isEmpty }) }.getOrElse(Map.empty[Int, Node]) } def getControllerId: Option[Int] = metadataSnapshot.controllerId def getClusterMetadata(clusterId: String, listenerName: ListenerName): Cluster = { val snapshot = metadataSnapshot val nodes = snapshot.aliveNodes.map { case (id, nodes) => (id, nodes.get(listenerName).orNull) } def node(id: Integer): Node = nodes.get(id.toLong).orNull val partitions = getAllPartitions(snapshot) .filter { case (_, state) => state.leader != LeaderAndIsr.LeaderDuringDelete } .map { case (tp, state) => new PartitionInfo(tp.topic, tp.partition, node(state.leader), state.replicas.asScala.map(node).toArray, state.isr.asScala.map(node).toArray, state.offlineReplicas.asScala.map(node).toArray) } val unauthorizedTopics = Collections.emptySet[String] val internalTopics = getAllTopics(snapshot).filter(Topic.isInternal).asJava new Cluster(clusterId, nodes.values.filter(_ != null).toBuffer.asJava, partitions.toBuffer.asJava, unauthorizedTopics, internalTopics, snapshot.controllerId.map(id => node(id)).orNull) } // This method returns the deleted TopicPartitions received from UpdateMetadataRequest def updateMetadata(correlationId: Int, updateMetadataRequest: UpdateMetadataRequest): Seq[TopicPartition] = { inWriteLock(partitionMetadataLock) { val aliveBrokers = new mutable.LongMap[Broker](metadataSnapshot.aliveBrokers.size) val aliveNodes = new mutable.LongMap[collection.Map[ListenerName, Node]](metadataSnapshot.aliveNodes.size) val controllerIdOpt = updateMetadataRequest.controllerId match { case id if id < 0 => None case id => Some(id) } updateMetadataRequest.liveBrokers.forEach { broker => // `aliveNodes` is a hot path for metadata requests for large clusters, so we use java.util.HashMap which // is a bit faster than scala.collection.mutable.HashMap. When we drop support for Scala 2.10, we could // move to `AnyRefMap`, which has comparable performance. val nodes = new java.util.HashMap[ListenerName, Node] val endPoints = new mutable.ArrayBuffer[EndPoint] broker.endpoints.forEach { ep => val listenerName = new ListenerName(ep.listener) endPoints += new EndPoint(ep.host, ep.port, listenerName, SecurityProtocol.forId(ep.securityProtocol)) nodes.put(listenerName, new Node(broker.id, ep.host, ep.port)) } aliveBrokers(broker.id) = Broker(broker.id, endPoints, Option(broker.rack)) aliveNodes(broker.id) = nodes.asScala } aliveNodes.get(brokerId).foreach { listenerMap => val listeners = listenerMap.keySet if (!aliveNodes.values.forall(_.keySet == listeners)) error(s"Listeners are not identical across brokers: $aliveNodes") } val deletedPartitions = new mutable.ArrayBuffer[TopicPartition] if (!updateMetadataRequest.partitionStates.iterator.hasNext) { metadataSnapshot = MetadataSnapshot(metadataSnapshot.partitionStates, controllerIdOpt, aliveBrokers, aliveNodes) } else { //since kafka may do partial metadata updates, we start by copying the previous state val partitionStates = new mutable.AnyRefMap[String, mutable.LongMap[UpdateMetadataPartitionState]](metadataSnapshot.partitionStates.size) metadataSnapshot.partitionStates.foreach { case (topic, oldPartitionStates) => val copy = new mutable.LongMap[UpdateMetadataPartitionState](oldPartitionStates.size) copy ++= oldPartitionStates partitionStates += (topic -> copy) } val traceEnabled = stateChangeLogger.isTraceEnabled val controllerId = updateMetadataRequest.controllerId val controllerEpoch = updateMetadataRequest.controllerEpoch val newStates = updateMetadataRequest.partitionStates.asScala newStates.foreach { state => // per-partition logging here can be very expensive due going through all partitions in the cluster val tp = new TopicPartition(state.topicName, state.partitionIndex) if (state.leader == LeaderAndIsr.LeaderDuringDelete) { removePartitionInfo(partitionStates, tp.topic, tp.partition) if (traceEnabled) stateChangeLogger.trace(s"Deleted partition $tp from metadata cache in response to UpdateMetadata " + s"request sent by controller $controllerId epoch $controllerEpoch with correlation id $correlationId") deletedPartitions += tp } else { addOrUpdatePartitionInfo(partitionStates, tp.topic, tp.partition, state) if (traceEnabled) stateChangeLogger.trace(s"Cached leader info $state for partition $tp in response to " + s"UpdateMetadata request sent by controller $controllerId epoch $controllerEpoch with correlation id $correlationId") } } val cachedPartitionsCount = newStates.size - deletedPartitions.size stateChangeLogger.info(s"Add $cachedPartitionsCount partitions and deleted ${deletedPartitions.size} partitions from metadata cache " + s"in response to UpdateMetadata request sent by controller $controllerId epoch $controllerEpoch with correlation id $correlationId") metadataSnapshot = MetadataSnapshot(partitionStates, controllerIdOpt, aliveBrokers, aliveNodes) } deletedPartitions } } def contains(topic: String): Boolean = { metadataSnapshot.partitionStates.contains(topic) } def contains(tp: TopicPartition): Boolean = getPartitionInfo(tp.topic, tp.partition).isDefined private def removePartitionInfo(partitionStates: mutable.AnyRefMap[String, mutable.LongMap[UpdateMetadataPartitionState]], topic: String, partitionId: Int): Boolean = { partitionStates.get(topic).exists { infos => infos.remove(partitionId) if (infos.isEmpty) partitionStates.remove(topic) true } } case class MetadataSnapshot(partitionStates: mutable.AnyRefMap[String, mutable.LongMap[UpdateMetadataPartitionState]], controllerId: Option[Int], aliveBrokers: mutable.LongMap[Broker], aliveNodes: mutable.LongMap[collection.Map[ListenerName, Node]]) }
sslavic/kafka
core/src/main/scala/kafka/server/MetadataCache.scala
Scala
apache-2.0
18,603
package com.artclod.mathml.scalar.apply import com.artclod.mathml._ import com.artclod.mathml.scalar._ import com.artclod.mathml.scalar.concept.Constant import scala.util._ case class ApplyDivide(val numerator: MathMLElem, val denominator: MathMLElem) extends MathMLElem(MathML.h.prefix, "apply", MathML.h.attributes, MathML.h.scope, false, (Seq[MathMLElem](Divide) ++ numerator ++ denominator): _*) { def eval(vars: Map[String, Double]): Try[Double] = (numerator.eval(vars), denominator.eval(vars)) match { case (f: Failure[Double], _) => f case (_, f: Failure[Double]) => f case (Success(n), Success(d)) => { val divide = n / d if (divide == 0d && n != 0d) { Failure(new IllegalStateException("")) } else { Success(divide) } } } def constant: Option[Constant] = (numerator.c, denominator.c) match { case (Some(nu), Some(de)) => Some(nu / de) case (Some(nu), _) => if (nu.isZero) Some(nu) else None case _ => None } def simplifyStep() = if (denominator.isOne) numerator.s else if (numerator.isZero && !denominator.isZero) `0` else { (numerator.s, denominator.s) match { case (ApplyDivide(n, d), o) => n / (d * o) case (o, ApplyDivide(n, d)) => (o * d) / n case (n, d) => ApplyDivide(n, d) } } def variables: Set[String] = numerator.variables ++ denominator.variables // Quotient Rule (http://en.wikipedia.org/wiki/Quotient_rule) def derivative(x: String): MathMLElem = { val f = numerator.s val fP = f.d(x) val g = denominator.s val gP = g.d(x) // (f/g)' = (f'g - g'f)/g^2 (fP * g - gP * f) / (g ^ `2`) } override def toMathJS: String = "(" + numerator.toMathJS + " / " + denominator.toMathJS + ")" }
kristiankime/web-education-games
app/com/artclod/mathml/scalar/apply/ApplyDivide.scala
Scala
mit
1,693
object Test { def main(args: Array[String]) { val y: AA[Int] = C(2) val c: Int = y.x.y assert(c == 2) } } trait AA[T] extends Any { def x: C[T] } case class C[T](val y: T) extends AnyVal with AA[T] { def x = this }
felixmulder/scala
test/files/run/t6385.scala
Scala
bsd-3-clause
245
package io.finch.refined import eu.timepit.refined.api.Refined import eu.timepit.refined.cats._ import eu.timepit.refined.collection.NonEmpty import eu.timepit.refined.numeric.Positive import eu.timepit.refined.scalacheck.all._ import io.finch.{DecodeEntityLaws, FinchSpec} class DecodeEntityRefinedSpec extends FinchSpec { checkAll("DecodeEntity[Int Refined Positive]", DecodeEntityLaws[Int Refined Positive].all) checkAll("DecodeEntity[String Refined NonEmpty]", DecodeEntityLaws[String Refined NonEmpty].all) }
finagle/finch
refined/src/test/scala/io/finch/refined/DecodeEntityRefinedSpec.scala
Scala
apache-2.0
522
/* * SimpleGUI.scala * (FScape) * * Copyright (c) 2001-2022 Hanns Holger Rutz. All rights reserved. * * This software is published under the GNU Affero General Public License v3+ * * * For further information, please contact Hanns Holger Rutz at * [email protected] */ package de.sciss.fscape.gui import de.sciss.fscape.stream.{Cancelled, Control} import de.sciss.numbers import scala.concurrent.ExecutionContext import scala.swing.{BorderPanel, Button, FlowPanel, Frame, Label, ProgressBar, Swing} import scala.util.{Failure, Success} object SimpleGUI { def apply(ctrl: Control): SimpleGUI = { val txtCancelled = "Cancelled." val lbCancelled = new Label(txtCancelled) lbCancelled.preferredSize = { val d = lbCancelled.preferredSize // d.width = (d.width * 1.5).toInt d.width *= 2 d } lbCancelled.text = null var finished = false import ExecutionContext.Implicits.global val ggCancel = Button("Cancel")(ctrl.cancel()) val ggDump = Button("Dump") { ctrl.debugDotGraph() ctrl.stats.foreach(println) } val ggProg = new ProgressBar ggProg.max = 250 ctrl.status.onComplete { tr => Swing.onEDT { finished = true ggCancel.enabled = false lbCancelled.text = tr match { case Success(()) => "Done." case Failure(Cancelled()) => txtCancelled case Failure(ex) => ex.printStackTrace() s"Error: ${ex.getMessage}" } lbCancelled.tooltip = lbCancelled.text } } val f = new Frame { title = "Control" contents = new BorderPanel { add(new FlowPanel(ggCancel, ggDump, lbCancelled), BorderPanel.Position.Center) add(ggProg, BorderPanel.Position.South) } pack().centerOnScreen() open() override def closeOperation(): Unit = { if (finished) sys.exit() } } new Impl(f, ggProg) } private final class Impl(val frame: Frame, ggProg: ProgressBar) extends SimpleGUI { private[this] var _prog = 0.0 def progress: Double = _prog def progress_=(value: Double): Unit = if (_prog != value) { _prog = value import numbers.Implicits._ ggProg.value = value.linLin(0, 1, 0, 250).toInt } } } trait SimpleGUI { def frame: Frame var progress: Double }
Sciss/FScape-next
core/jvm/src/main/scala/de/sciss/fscape/gui/SimpleGUI.scala
Scala
agpl-3.0
2,397
import scala.concurrent._ import scala.util._ import ExecutionContext.Implicits.global object Main extends App { def getData1() = { for (i <- 1 to 10) { Thread.sleep(100); print("1...") } val result = Random.nextInt(10) println(result) result } def getData2() = { for (i <- 1 to 10) { Thread.sleep(100); print("2...") } val result = 10 * Random.nextInt(10) println(result) result } def workHard(arg: String) = { println("Working hard") Thread.sleep(1000) arg.length } def workOnSomethingElse() = { Thread.sleep(1000) println("Working on something else") Thread.sleep(1000) 0 } def workSmart(arg: String) = { println("Working smart") Thread.sleep(100) 42 } def computeAnswer(arg: String) = { val p = Promise[Int]() Future { val n = workHard(arg) p.success(n) workOnSomethingElse() } p.future } val f = computeAnswer("Fred") f.foreach(n => println(s"Result: $n")) Thread.sleep(3000) val p1 = Promise[Int]() val p2 = Promise[Int]() Future { val n1 = getData1() p1.success(n1) val n2 = getData2() p2.success(n2) } p1.future.foreach(n => println(s"Result 1: $n")) p2.future.foreach(n => println(s"Result 2: $n")) Thread.sleep(3000) val p = Promise[Int]() val arg = "Fred" Future { var n = workHard(arg) p.trySuccess(n) } Future { var n = workSmart(arg) p.trySuccess(n) } p.future.foreach(n => println(s"Result: $n")) Thread.sleep(3000) }
yeahnoob/scala-impatient-2e-code
src/ch17/sec08/sec07/Promises.scala
Scala
gpl-3.0
1,578
package fpinscala.iomonad /** * A trampolined computation producing an `A` that may include * asynchronous steps. See constructors in companion object. */ trait Future[+A] { import Future._ /* Exercise 11: Implement `Monad[Future]`. We will need it to implement our nonblocking `IO` interpreter. Also implement `runAsync`, for an asynchronous evaluator for `Future`, and `run`, the synchronous evaluator. */ def flatMap[B](f: A => Future[B]): Future[B] = this match { case Now(a) => f(a) case More(thunk) => BindMore(thunk, f) case Async(listen) => BindAsync(listen, f) case BindMore(thunk, g) => More(() => BindMore(thunk, g andThen (_ flatMap f))) case BindAsync(listen, g) => More(() => BindAsync(listen, g andThen (_ flatMap f))) } def map[B](f: A => B): Future[B] = flatMap(f andThen (b => Future.unit(b))) def listen(cb: A => Trampoline[Unit]): Unit = this.step match { case Now(a) => cb(a) case Async(onFinish) => onFinish(cb) case BindAsync(onFinish, g) => onFinish(x => Trampoline.delay(g(x)) map (_ listen cb)) } @annotation.tailrec final def step: Future[A] = this match { case More(thunk) => thunk().step case BindMore(thunk, f) => (thunk() flatMap f).step case _ => this } def start: Future[A] = { val latch = new java.util.concurrent.CountDownLatch(1) @volatile var result: Option[A] = None runAsync { a => result = Some(a); latch.countDown } delay { latch.await; result.get } } def runAsync(cb: A => Unit): Unit = listen(a => Trampoline.done(cb(a))) def run: A = { val latch = new java.util.concurrent.CountDownLatch(1) @volatile var result: Option[A] = None runAsync { a => result = Some(a); latch.countDown } latch.await result.get } } object Future extends Monad[Future] { case class Now[+A](a: A) extends Future[A] case class Async[+A](onFinish: (A => Trampoline[Unit]) => Unit) extends Future[A] case class More[+A](thunk: () => Future[A]) extends Future[A] case class BindMore[A,B](thunk: () => Future[A], f: A => Future[B]) extends Future[B] case class BindAsync[A,B](onFinish: (A => Trampoline[Unit]) => Unit, f: A => Future[B]) extends Future[B] def unit[A](a: => A): Future[A] = apply(a) // more { Now(a) } def now[A](a: A): Future[A] = Now(a) def delay[A](a: => A): Future[A] = More(() => Now(a)) def fork[A](a: => Future[A]): Future[A] = apply(a) flatMap (a => a) def async[A](listen: (A => Unit) => Unit): Future[A] = Async((cb: A => Trampoline[Unit]) => listen { a => cb(a).run }) def more[A](f: => Future[A]): Future[A] = More(() => f) def flatMap[A,B](fa: Future[A])(f: A => Future[B]): Future[B] = fa flatMap f import java.util.concurrent.{Callable, Executors, ThreadFactory} def apply[A](a: => A): Future[A] = Async { cb => pool.submit { new Callable[Unit] { def call = cb(a).run }} } // Daemon threads will not prevent the JVM from exiting, if they are // the only threads left running (see java.lang.Thread API docs for // details) val daemonize = new ThreadFactory { def newThread(r: Runnable) = { val t = new Thread(r) t.setDaemon(true) t }} //val pool = Executors.newCachedThreadPool(daemonize) val pool = Executors.newFixedThreadPool(1, daemonize) /* Exercise 13: We can also use an API that supports nonblocking operations directly. Left as an exercise; the general idea is to use the I/O API to construct a Later directly, rather that using Future.apply, which delegates to a thread pool. */ /* * We'll just give the basic idea - here, we construct a `Future` * by reading from an `AsynchronousFileChannel`, a `java.nio` class * which supports asynchronous reads. */ import java.nio._ import java.nio.channels._ def read(file: AsynchronousFileChannel, fromPosition: Long, nBytes: Int): Future[Either[Throwable, Array[Byte]]] = { val buf = ByteBuffer.allocate(nBytes) Async { (cb: Either[Throwable,Array[Byte]] => Trampoline[Unit]) => file.read(buf, fromPosition, (), new CompletionHandler[Integer, Unit] { def completed(bytesRead: Integer, ignore: Unit) = { val arr = new Array[Byte](bytesRead) buf.slice.get(arr, 0, bytesRead) cb(Right(arr)).run } def failed(err: Throwable, ignore: Unit) = cb(Left(err)).run }) } } }
fpinscala-muc/fpinscala-ivpdev
answers/src/main/scala/fpinscala/iomonad/Future.scala
Scala
mit
4,628
/* * Copyright (C) 2016 Nikos Katzouris * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package oled.weightlearn import app.runutils.Debug import com.typesafe.scalalogging.LazyLogging import logic.{Clause, Literal} import optimus.algebra.AlgebraOps._ import optimus.algebra.Expression import optimus.optimization._ import optimus.optimization.enums.{PreSolve, SolverLib} import optimus.optimization.model.MPFloatVar class MAPInference extends LazyLogging { implicit val problem = MPModel(SolverLib.LpSolve) /* Non-empty clauseIds are passed when rules are evaluated in parallel. * See also MLNClauseHandlingWorker: * * AdaGrad.adagrad(groundNetwork, x.clauses, trueGroundingsPerClause.toList, x.clauseIds.toList) * * and * * solver.infer(groundNetwork, clausesWithUpdatedWeights, x.clauseIds.toList) * */ def infer( groundNetwork: Vector[Literal], liftedClauses: Vector[Clause], clauseIds: List[Int] = Nil): Vector[Literal] = { val enumClauses = clauseIds match { case Nil => (1 to liftedClauses.length).toList case _ => clauseIds } val idsToRuleIdsMap = (enumClauses zip liftedClauses).toMap val sTranslation = System.currentTimeMillis() var literalLPVars = Map.empty[Int, MPFloatVar] var expressions: List[Expression] = Nil groundNetwork.zipWithIndex.foreach { case (_, idx) => literalLPVars += idx -> MPFloatVar(s"y$idx", 0, 1) } groundNetwork.zipWithIndex.foreach { case (lit, idx) => // Literal weight: val weight = idsToRuleIdsMap(lit.derivedFrom).weight val floatVar = literalLPVars(idx) if (!lit.isNAF && weight != 0) expressions ::= weight * floatVar if (lit.isNAF) add((1 - floatVar) >:= 1) } val eTranslation = System.currentTimeMillis() //logger.info("Translation time: " + (eTranslation - sTranslation)) // Step 4: Optimize function subject to the constraints introduced val solveTimed = utils.Utils.time{ maximize(sum(expressions)) start(PreSolve.CONSERVATIVE) release() } //logger.info("Solver time: " + solveTimed._2) Debug.totalILPSolverTime += solveTimed._2 var nonIntegralSolutionsCounter = 0 var fractionalSolutions = Vector.empty[Int] for ((id, lpVar) <- literalLPVars) { val value = lpVar.value.getOrElse { logger.error(s"There is no solution for variable '${lpVar.symbol}'") sys.exit() } val normalisedValue = if (value > 0.99) 1.0 else value if (normalisedValue != 0.0 && normalisedValue != 1.0) { nonIntegralSolutionsCounter += 1 fractionalSolutions +:= id } else { val currentAtom = groundNetwork(id) currentAtom.mlnTruthValue = if (normalisedValue == 0) false else true } } val sRoundUp = System.currentTimeMillis() if (nonIntegralSolutionsCounter > 0) { for (i <- fractionalSolutions.indices) { val id = fractionalSolutions(i) val currentAtom = groundNetwork(id) val weight = idsToRuleIdsMap(currentAtom.derivedFrom).weight if (currentAtom.mlnTruthValue && !currentAtom.isNAF && weight >= 0) currentAtom.mlnTruthValue = true else if (currentAtom.mlnTruthValue && !currentAtom.isNAF && weight < 0) currentAtom.mlnTruthValue = false else if (!currentAtom.mlnTruthValue && !currentAtom.isNAF && weight >= 0) currentAtom.mlnTruthValue = false else if (!currentAtom.mlnTruthValue && !currentAtom.isNAF && weight < 0) currentAtom.mlnTruthValue = true else if (currentAtom.isNAF) currentAtom.mlnTruthValue = false /* else if (currentAtom.mlnTruthValue && currentAtom.isNAF && weight >= 0) currentAtom.mlnTruthValue = false else if (currentAtom.mlnTruthValue && currentAtom.isNAF && weight < 0) currentAtom.mlnTruthValue = true else if (!currentAtom.mlnTruthValue && !currentAtom.isNAF && weight >= 0) currentAtom.mlnTruthValue = false else if (!currentAtom.mlnTruthValue && !currentAtom.isNAF && weight < 0) currentAtom.mlnTruthValue = true else if (!currentAtom.mlnTruthValue && currentAtom.isNAF && weight >= 0) currentAtom.mlnTruthValue = true else if (!currentAtom.mlnTruthValue && currentAtom.isNAF && weight < 0) currentAtom.mlnTruthValue = false*/ } } val eRoundUp = System.currentTimeMillis() //logger.info("Roundup time: " + (eRoundUp - sRoundUp)) groundNetwork } }
nkatzz/OLED
src/main/scala/oled/weightlearn/MAPInference.scala
Scala
gpl-3.0
5,163
package unfiltered.response.link object Param { /** Predefined parameter types as specified in [[http://tools.ietf.org/html/rfc5988#section-5 section-5]]. Note that `rev` is omitted as it has been deprecated by the specification. */ sealed abstract class Type(val name: String) case object Rel extends Type("rel") case object Anchor extends Type("anchor") case object Hreflang extends Type("hreflang") case object Media extends Type("media") case object Title extends Type("title") case object TitleStar extends Type("title*") case object ContentType extends Type("type") /** The extension type supporting `link-extension` parameters. */ private [link] final case class ExtensionType (override val name: String) extends Type(name) /** Construct an extension parameter. */ def extension(paramType: String): String => Extension = value => Extension(ExtensionType(paramType), value) /** Extractor for parameter types that cannot repeat within a `Ref`. */ object NonRepeatable { def unapply(param: Param) = param.paramType match { case Rel | Media | Title | TitleStar | ContentType => Some(param) case _ => None } } } /** Root type for all implementations of `link-param` as specified in [[https://tools.ietf.org/html/rfc5988#section-5 section-5]]. Predefined parameter values are specified in various documents linked or referred to in the [[https://tools.ietf.org/html/rfc5988 rfc5988]]; see [[Media]] and [[Rel]]. New parameter types can be added to `Link` headers as `link-extension` parameters. Extension parameters can be constructed via [[Param.extension)]]. */ sealed abstract class Param(val paramType: Param.Type, val value: String) final case class Anchor(uri: String) extends Param(Param.Anchor, uri) final case class Hreflang(lang: String) extends Param(Param.Hreflang, lang) final case class Title(title: String) extends Param(Param.Title, title) final case class TitleStar(titleStar: String) extends Param(Param.TitleStar, titleStar) final case class MediaType(typeName: String, subTypeName: String) extends Param(Param.ContentType, s"$typeName/$subTypeName") final case class Extension private[link] (override val paramType: Param.ExtensionType, override val value: String) extends Param(paramType, value) /** Target media types as described in [[https://tools.ietf.org/html/rfc5988#section-5.4 section-5.4]] The meaning and set of possible values for this parameter are specified in the [[http://www.w3.org/TR/html401/types.html#h-6.13 HTML 401 Types]] specification. */ sealed abstract class Media(val mediaType: String) extends Param(Param.Media, mediaType) { /** According to [[http://www.w3.org/TR/html401/types.html#h-6.13 HTML 401 Types]], `Media` is a monoid resulting in the accumulated media for a single `media` parameter. */ def :+(that: Media) = CompositeMedia(this, that) final override def toString = s"Media($mediaType)" } private [link] final case class CompositeMedia (a: Media, b: Media) extends Media(a.mediaType + ", " + b.mediaType) case object Screen extends Media("screen") case object Tty extends Media("tty") case object Tv extends Media("tv") case object Projection extends Media("projection") case object Handheld extends Media("handheld") case object Print extends Media("print") case object Braille extends Media("braille") case object Aural extends Media("aural") case object All extends Media("all") /** A link relation type as described in [[https://tools.ietf.org/html/rfc5988#section-5.3 section-5.3]]. The relation type is specified as link parameter for which a global set of possible values is catalogued at [[http://www.iana.org/assignments/link-relations/link-relations.xml]]. The specification also permits extension types to be provided as absolute URLs (see the [[ExtensionRel]] type). */ sealed abstract class Rel(val relType: String) extends Param(Param.Rel, relType) { /** According to [[https://tools.ietf.org/html/rfc5988#section-5.5 section-5.5]], `Rel` is a monoid resulting in the accumulated relation types for a single `rel` parameter. */ def :+(that: Rel) = CompositeRel(this, that) final override def toString = s"Rel($relType)" } private [link] final case class CompositeRel (a: Rel, b: Rel) extends Rel(a.relType + " " + b.relType) /** Support for extension relation types as specified in [[https://tools.ietf.org/html/rfc5988#section-4.2 section-4.2]] */ final case class ExtensionRel (uri: String) extends Rel(uri) /* The complete set of catalogued relation types. */ case object About extends Rel("about") case object Alternate extends Rel("alternate") case object Appendix extends Rel("appendix") case object Archives extends Rel("archives") case object Author extends Rel("author") case object Bookmark extends Rel("bookmark") case object Canonical extends Rel("canonical") case object Chapter extends Rel("chapter") case object Collection extends Rel("collection") case object Contents extends Rel("contents") case object Copyright extends Rel("copyright") case object CreateForm extends Rel("create-form") case object Current extends Rel("current") case object Describedby extends Rel("describedby") case object Describes extends Rel("describes") case object Disclosure extends Rel("disclosure") case object Duplicate extends Rel("duplicate") case object Edit extends Rel("edit") case object EditForm extends Rel("edit-form") case object EditMedia extends Rel("edit-media") case object Enclosure extends Rel("enclosure") case object First extends Rel("first") case object Glossary extends Rel("glossary") case object Help extends Rel("help") case object Hosts extends Rel("hosts") case object Hub extends Rel("hub") case object Icon extends Rel("icon") case object Index extends Rel("index") case object Item extends Rel("item") case object Last extends Rel("last") case object LatestVersion extends Rel("latest-version") case object License extends Rel("license") case object Lrdd extends Rel("lrdd") case object Memento extends Rel("memento") case object Monitor extends Rel("monitor") case object MonitorGroup extends Rel("monitor-group") case object Next extends Rel("next") case object NextArchive extends Rel("next-archive") case object Nofollow extends Rel("nofollow") case object Noreferrer extends Rel("noreferrer") case object Original extends Rel("original") case object Payment extends Rel("payment") case object PredecessorVersion extends Rel("predecessor-version") case object Prefetch extends Rel("prefetch") case object Prev extends Rel("prev") case object Preview extends Rel("preview") case object Previous extends Rel("previous") case object PrevArchive extends Rel("prev-archive") case object PrivacyPolicy extends Rel("privacy-policy") case object Profile extends Rel("profile") case object Related extends Rel("related") case object Replies extends Rel("replies") case object Search extends Rel("search") case object Section extends Rel("section") case object Self extends Rel("self") case object Service extends Rel("service") case object Start extends Rel("start") case object Stylesheet extends Rel("stylesheet") case object Subsection extends Rel("subsection") case object SuccessorVersion extends Rel("successor-version") case object Tag extends Rel("tag") case object TermsOfService extends Rel("terms-of-service") case object Timegate extends Rel("timegate") case object Timemap extends Rel("timemap") case object Type extends Rel("type") case object Up extends Rel("up") case object VersionHistory extends Rel("version-history") case object Via extends Rel("via") case object WorkingCopy extends Rel("working-copy") case object WorkingCopyOf extends Rel("working-copy-of")
beni55/unfiltered
library/src/main/scala/response/link/Param.scala
Scala
mit
7,787
package com.yetu.oauth2provider.browser import com.yetu.oauth2provider.controllers.setup.SetupController._ class SetupRegistrationBrowserSpec extends BaseBrowserSpec { val fullSetupRegistrationUrl = s"http://localhost:$port$setupRegistrationUrl" val fullSetupConfirmMailUrl = s"http://localhost:$port$setupConfirmMailUrl" val fullSetupConfirmedMailUrl = s"http://localhost:$port$setupConfirmedMailUrl" val fullSetupDownloadUrl = s"http://localhost:$port$setupDownloadUrl" val fullSetupConfirmedMailErrorUrl = s"http://localhost:$port$setupConfirmedMailErrorUrl" def fullMailTokenUrl(token: String) = { s"http://localhost:$port$setupConfirmedMailUrl/$token" } s"(Gateway-) Registration flow page at $setupRegistrationUrl" must { s"open $setupRegistrationUrl" in { go to fullSetupRegistrationUrl currentUrl mustEqual fullSetupRegistrationUrl } s"open $setupRegistrationUrl and have '$UserNotRegistered' as the default radio button selection" in { go to fullSetupRegistrationUrl radioButtonGroup(UserRegistrationStatus).value mustEqual UserNotRegistered } s"open $setupRegistrationUrl and have 'agreement[]' checkbox unselected" in { go to fullSetupRegistrationUrl checkbox("agreement").isSelected must be(false) } "go to download page when selecting already registered" in { go to fullSetupRegistrationUrl radioButtonGroup(UserRegistrationStatus).value = UserAlreadyRegistered submit() currentUrl mustEqual fullSetupDownloadUrl find(name("setupDownload")) must be('defined) } "not register without filling out forms and should give error messages on fields" in { go to fullSetupRegistrationUrl radioButtonGroup(UserRegistrationStatus).value = UserNotRegistered submit() currentUrl mustEqual fullSetupRegistrationUrl find(id("agreementErrorText")) must be ('defined) find(id("firstNameIDErrorText")) must be ('defined) find(id("lastNameIDErrorText")) must be ('defined) find(id("emailErrorText")) must be ('defined) find(id("password1IDErrorText")) must be ('defined) } "not register without having passwords, that match" in { go to fullSetupRegistrationUrl checkbox("agreement").select() register(browserTestUserPassword, testUserEmail, Some(s"$browserTestUserPassword other")) currentUrl mustEqual fullSetupRegistrationUrl find(id("password2IDErrorText")) must be ('defined) } "not register without accepting terms and conditions and stay on the same page" in { go to fullSetupRegistrationUrl checkbox("agreement").clear() register(browserTestUserPassword, testUserEmail) currentUrl mustEqual fullSetupRegistrationUrl find(id("agreementErrorText")) must be ('defined) } def createNewUserThroughGatewaySetupProcess() = { go to fullSetupRegistrationUrl checkbox("agreement").select() checkbox("agreement").isSelected must be(true) register(browserTestUserPassword, testUserEmail) } s"go to confirm mail page if registration is correct" in { createNewUserThroughGatewaySetupProcess() currentUrl mustEqual fullSetupConfirmMailUrl } s"go to confirmed mail page when clicking the link in the email" in { createNewUserThroughGatewaySetupProcess() go to fullMailTokenUrl(getMailTokenFromMemory) currentUrl mustEqual fullSetupConfirmedMailUrl } s"go to confirmed mail error page when token is wrong" in { go to s"$fullSetupConfirmedMailUrl/3485797250jdgs" currentUrl mustEqual fullSetupConfirmedMailErrorUrl } } }
yetu/oauth2-provider
test/com/yetu/oauth2provider/browser/SetupRegistrationBrowserSpec.scala
Scala
mit
3,697
/* * Copyright (c) 2014-2015 by its authors. Some rights reserved. * See the project homepage at: http://www.monifu.org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package monifu.reactive.channels import monifu.concurrent.Scheduler import monifu.concurrent.atomic.{Atomic, AtomicLong} import monifu.reactive.Ack.Continue import monifu.reactive.OverflowStrategy.Unbounded import monifu.reactive.Observer import monifu.reactive.exceptions.DummyException object AsyncChannelSuite extends BaseChannelSuite { def alreadyTerminatedTest(expectedElems: Seq[Long])(implicit s: Scheduler) = { val c = AsyncChannel[Long](Unbounded) Sample(c, expectedElems.lastOption.getOrElse(0)) } def continuousStreamingTest(expectedElems: Seq[Long])(implicit s: Scheduler) = None test("while active, keep adding subscribers, but don't emit anything") { implicit s => var wereCompleted = 0 var sum = 0L def createObserver = new Observer[Long] { def onNext(elem: Long) = { sum += elem Continue } def onError(ex: Throwable) = () def onComplete() = { wereCompleted += 1 } } val channel = AsyncChannel[Long](Unbounded) channel.onSubscribe(createObserver) channel.onSubscribe(createObserver) channel.onSubscribe(createObserver) channel.pushNext(10, 20, 30) s.tick() assertEquals(sum, 0) assertEquals(wereCompleted, 0) channel.pushComplete() channel.pushComplete() s.tick() assertEquals(sum, 30 * 3) assertEquals(wereCompleted, 3) channel.onSubscribe(createObserver) s.tick() assertEquals(sum, 30 * 4) assertEquals(wereCompleted, 4) } test("should interrupt on error without emitting anything") { implicit s => var wereCompleted = 0 var sum = 0L def createObserver = new Observer[Long] { def onNext(elem: Long) = { sum += elem Continue } def onComplete() = () def onError(ex: Throwable) = ex match { case DummyException("dummy1") => wereCompleted += 1 case _ => () } } val channel = AsyncChannel[Long](Unbounded) channel.onSubscribe(createObserver) channel.onSubscribe(createObserver) channel.onSubscribe(createObserver) channel.pushNext(10) channel.pushNext(20) channel.pushNext(30) s.tick() assertEquals(sum, 0) assertEquals(wereCompleted, 0) channel.pushError(DummyException("dummy1")) channel.pushError(DummyException("dummy2")) s.tick() assertEquals(sum, 0) assertEquals(wereCompleted, 3) channel.onSubscribe(createObserver) assertEquals(sum, 0) assertEquals(wereCompleted, 4) } test("should interrupt when empty") { implicit s => var wereCompleted = 0 var sum = 0L def createObserver = new Observer[Long] { def onNext(elem: Long) = { sum += elem Continue } def onComplete() = wereCompleted += 1 def onError(ex: Throwable) = () } val channel = AsyncChannel[Long](Unbounded) channel.onSubscribe(createObserver) channel.onSubscribe(createObserver) channel.onSubscribe(createObserver) channel.pushComplete() s.tick() assertEquals(sum, 0) assertEquals(wereCompleted, 3) channel.onSubscribe(createObserver) s.tick() assertEquals(sum, 0) assertEquals(wereCompleted, 4) } }
sergius/monifu
monifu/shared/src/test/scala/monifu/reactive/channels/AsyncChannelSuite.scala
Scala
apache-2.0
3,930
package scavlink.coord /** * Base trait for 3D coordinates. */ trait XYZ[T] { def x: T def y: T def z: T def toVector = Vector(x, y, z) override def toString = s"($x,$y,$z)" }
nickolasrossi/scavlink
src/main/scala/scavlink/coord/XYZ.scala
Scala
mit
189
package ems.storage import scala.util.control.Exception.nonFatalCatch import com.mongodb.casbah.Imports._ import ems._ import security.User import java.util import util.UUID import model._ import org.joda.time.DateTime import com.mongodb.casbah.commons.MongoDBObject trait MongoDBStorage { import com.mongodb.casbah.commons.conversions.scala._ DeregisterJodaTimeConversionHelpers() def db: MongoDB def binary: BinaryStorage def getEvents() = db("event").find().sort(MongoDBObject("name" -> 1)).map(Event.apply).toList def getEventsWithSessionCount(user: User) = getEvents().map(e => EventWithSessionCount(e, e.id.map(id => getSessionCount(id)(user)).getOrElse(0))) def getEvent(id: String) = db("event").findOneByID(id).map(Event.apply) def getEventsBySlug(name: String) = db("event").find(MongoDBObject("slug" -> name)).sort(MongoDBObject("name" -> 1)).map(Event.apply).toList def saveEvent(event: Event): Either[Throwable, Event] = saveOrUpdate(event, (e: Event, update) => e.toMongo(update), db("event")) def getSlots(eventId: String, parent: Option[String] = None): Seq[Slot] = { val obj = MongoDBObject("eventId" -> eventId, "parentId" -> parent.orNull) db("slot").find(obj).map(Slot.apply).toVector } def getAllSlots(eventId: String): Seq[SlotTree] = getSlots(eventId).map(s => SlotTree(s, getSlots(s.eventId, s.id))) def getSlot(id: String): Option[Slot] = { db("slot").findOne(MongoDBObject("_id" -> id)).map(Slot.apply) } def saveSlot(slot: Slot): Either[Throwable, Slot] = { saveOrUpdate(slot, (s: Slot, update: Boolean) => s.toMongo, db("slot")) } def removeSlot(eventId: String, id: String): Either[Throwable, String] = Left(new UnsupportedOperationException()) def getRooms(eventId: String): Seq[Room] = getEvent(eventId).map(_.rooms).getOrElse(Nil) def getRoom(eventId: String, id: String): Option[Room] = getRooms(eventId).find(r => r.id.exists(_ == id)) def saveRoom(eventId: String, room: Room): Either[Throwable, Room] = Left(new UnsupportedOperationException()) def removeRoom(eventId: String, id: String): Either[Throwable, String] = Left(new UnsupportedOperationException()) def getSessions(eventId: String)(user: User) = { val query = MongoDBObject.newBuilder query += "eventId" -> eventId if (!user.authenticated) { query += "published" -> true } db("session").find(query.result()).sort(MongoDBObject("title" -> 1)).map(Session(_, this)).toList } def getSessionCount(eventId: String)(user: User): Int = { val query = MongoDBObject.newBuilder query += "eventId" -> eventId if (!user.authenticated) { query += "published" -> true } db("session").find(query.result(), MongoDBObject("_id" -> 1)).size } def getSessionsBySlug(eventId: String, slug: String) = db("session").find( MongoDBObject("eventId" -> eventId, "slug" -> slug) ).sort(MongoDBObject("abstract" -> MongoDBObject("title" -> 1))).map(Session(_, this)).toList def getSession(eventId: String, id: String) = db("session").findOne( MongoDBObject("_id" -> id, "eventId" -> eventId) ).map(Session(_, this)) def saveSession(session: Session) = saveOrUpdate(session, (s: Session, update) => s.toMongo(update), db("session")) def publishSessions(eventId: String, sessions: Seq[String]): Either[Throwable, Unit] = nonFatalCatch.either { val result = db("session").update(MongoDBObject("eventId" -> eventId, "_id" -> MongoDBObject("$in" -> sessions)), MongoDBObject("$set" -> MongoDBObject("published" -> true)), multi = true) () } def saveSlotInSession(eventId: String, sessionId: String, slot: Slot) = saveOrUpdate( getSession(eventId, sessionId).get, (s: Session, update) => MongoDBObject("$set" -> MongoDBObject("slotId" -> slot.id.get, "last-modified" -> DateTime.now.toDate)), db("session") ) def saveRoomInSession(eventId: String, sessionId: String, room: Room) = saveOrUpdate( getSession(eventId, sessionId).get, (s: Session, update) => MongoDBObject("$set" -> MongoDBObject("roomId" -> room.id.get, "last-modified" -> DateTime.now.toDate)), db("session") ) def saveAttachment(eventId: String, sessionId: String, attachment: URIAttachment) = nonFatalCatch.either { val withId = if (attachment.id.isDefined) attachment else attachment.withId(util.UUID.randomUUID().toString) val speakerId = withId.id.get val update = db("session").findOne( MongoDBObject("_id" -> sessionId, "eventId" -> eventId, "attachments._id" -> speakerId), MongoDBObject() ).isDefined val dbObject: MongoDBObject = withId.toMongo val result = if (update) { val toSave = dbObject.foldLeft(MongoDBObject.newBuilder){case (mongo, (key, value)) => mongo += ("attachments.$." + key -> value) }.result() db("session").update( MongoDBObject("_id" -> sessionId, "eventId" -> eventId, "attachments._id" -> speakerId), MongoDBObject("$set" -> toSave) ) } else { db("session").update( MongoDBObject("_id" -> sessionId, "eventId" -> eventId), MongoDBObject("$push" -> MongoDBObject("attachments" -> dbObject)) ) } withId } def removeAttachment(eventId: String, sessionId: String, id: String) = nonFatalCatch.either { val result = db("session").update( MongoDBObject("_id" -> sessionId, "eventId" -> eventId, "attachments._id" -> id), MongoDBObject("$pull" -> MongoDBObject("attachments.$._id" -> id)) ) "OK" } def getSpeaker(eventId: String, sessionId: String, speakerId: String) = { getSession(eventId, sessionId).flatMap(_.speakers.find(_.id.exists(_ == speakerId))) } def saveSpeaker(eventId: String, sessionId: String, speaker: Speaker) = nonFatalCatch.either { val withId = if (speaker.id.isDefined) speaker else speaker.withId(util.UUID.randomUUID().toString) val speakerId = withId.id.get val update = db("session").findOne( MongoDBObject("_id" -> sessionId, "eventId" -> eventId, "speakers._id" -> speakerId), MongoDBObject() ).isDefined val dbObject: MongoDBObject = withId.toMongo val result = if (update) { val toSave = dbObject.foldLeft(MongoDBObject.newBuilder){case (mongo, (key, value)) => mongo += ("speakers.$." + key -> value) }.result() db("session").update( MongoDBObject("_id" -> sessionId, "eventId" -> eventId, "speakers._id" -> speakerId), MongoDBObject("$set" -> toSave) ) } else { db("session").update( MongoDBObject("_id" -> sessionId, "eventId" -> eventId), MongoDBObject("$push" -> MongoDBObject("speakers" -> dbObject)) ) } withId } def removeSession(sessionId: String): Either[Throwable, Unit] = nonFatalCatch.either(db("session").remove(MongoDBObject("_id" -> sessionId))) def updateSpeakerWithPhoto(eventId: String, sessionId: String, speakerId: String, photo: Attachment with Entity[Attachment]) = nonFatalCatch.either { val toSave = MongoDBObject("speakers.$.photo" -> photo.id.get) val result = db("session").update( MongoDBObject("_id" -> sessionId, "eventId" -> eventId, "speakers._id" -> speakerId), MongoDBObject("$set" -> toSave) ) photo } def removeSpeaker(eventId: String, sessionId: String, speakerId: String): Either[Throwable, Unit] = nonFatalCatch.either { db("session").update( MongoDBObject("_id" -> sessionId, "eventId" -> eventId), MongoDBObject("$pull" -> MongoDBObject("sessions" -> MongoDBObject("_id" -> speakerId))) ) } def importSession(session: Session): Either[Throwable, Session] = { val either = saveOrUpdate(session, (o: Session, update) => o.toMongo(update), db("session"), fromImport = true) session.speakers.foreach(sp => sp.photo.foreach(ph => updateSpeakerWithPhoto(session.eventId, session.id.get, sp.id.get, ph).fold( ex => throw ex, _ => () ))) either } def importEvent(event: Event): Either[Throwable, Event] = { saveOrUpdate(event, (o: Event, update) => o.toMongo(update), db("event"), fromImport = true) } def getChangedSessions(from: DateTime)(implicit u: User): Seq[Session] = { val builder = MongoDBObject.newBuilder builder ++= ("last-modified" $gte from.toDate) if (!u.authenticated) { builder += "published" -> true } db("session").find(builder.result()).map(Session(_, this)).toSeq } def status(): String = { try { db.command(MongoDBObject("ping" -> 1)).get("ok").toString match { case "1.0" => "ok" case _ => "down" } } catch { case e: Exception => "down" } } def shutdown() { db.underlying.getMongo.close() } private def saveOrUpdate[A <: Entity[A]](entity: A, toMongoDBObject: (A, Boolean) => DBObject, coll: MongoCollection, fromImport: Boolean = false): Either[Throwable, A] = nonFatalCatch.either { val objectWithId = withId(entity) val update = if (fromImport) coll.findOne(MongoDBObject("_id" -> entity.id.get), MongoDBObject()).isDefined else entity.id.isDefined val toSave = toMongoDBObject(objectWithId, update) if (update) { coll.update(MongoDBObject("_id" -> entity.id.get), toSave) } else { coll.insert(toSave, WriteConcern.Safe) } objectWithId } private def delete[A <: Entity[A]](entity: A, coll: MongoCollection): Either[Throwable, Unit] = nonFatalCatch.either { entity.id.foreach{ id => coll.remove(MongoDBObject("_id" -> id)) } } private def withId[A <: Entity[A]](entity: A): A = { if (entity.id.isDefined) entity else entity.withId(UUID.randomUUID().toString) } }
chrissearle/ems-redux
src/main/scala/ems/storage/MongoDBStorage.scala
Scala
apache-2.0
9,655
/* * @author Flavio Keller * * Copyright 2014 University of Zurich * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.signalcollect.sna.metrics import java.math.MathContext import scala.collection.mutable.ArrayBuffer import scala.collection.mutable.SynchronizedBuffer import scala.math.BigDecimal import com.signalcollect.DataGraphVertex import com.signalcollect.ExecutionConfiguration import com.signalcollect.Graph import com.signalcollect.Vertex import com.signalcollect.configuration.ExecutionMode import com.signalcollect.sna.ComputationResults import com.signalcollect.sna.ExecutionResult import com.signalcollect.sna.constants.SignalCollectSNAConstants import com.signalcollect.DefaultEdge /** * Executes the calculation of the degree centrality values of a graph's vertices */ object PageRank { /** * Function responsible for the execution * @param graph: the parsed graph, instance of @see com.signalcollect.Graph * @return {@link com.signalcollect.sna.ExecutionResult} object */ def run(graph: Graph[Any, Any]): ExecutionResult = { /* This vertex is responsible for the calculation of the PageRank average of the graph */ val avgVertex = new AveragePageRankVertex(SignalCollectSNAConstants.avgVertexId) graph.addVertex(avgVertex) graph.foreachVertex((v: Vertex[Any, _, Any, Any]) => graph.addEdge(v.id, new AveragePageRankEdge(avgVertex.id))) graph.foreachVertex((v: Vertex[Any, _, Any, Any]) => graph.addEdge(avgVertex.id, new AveragePageRankEdge(v.id))) val execmode = ExecutionConfiguration(ExecutionMode.Synchronous) val stats = graph.execute(execmode) graph.awaitIdle var vertexArray = new ArrayBuffer[Vertex[Any, _, Any, Any]] with SynchronizedBuffer[Vertex[Any, _, Any, Any]] graph.foreachVertex(v => vertexArray += v) graph.shutdown new ExecutionResult(new ComputationResults(avgVertex.state, filterInteger(vertexArray)), vertexArray, stats) } /** * Function that creates an ordered Key-Value map out of the vertex array in order to have the PageRank values packaged in order */ def filterInteger(vertexArray: ArrayBuffer[Vertex[Any, _, Any, Any]]): java.util.Map[String, Object] = { var vertices = new java.util.TreeMap[String, Object] for (vertex <- vertexArray) { vertices.put(vertex.id.toString, vertex.state.asInstanceOf[java.lang.Double]) } vertices.remove(SignalCollectSNAConstants.avgVertexId) vertices } } /** * Represents a vertex of a PageRank graph, extends {@link com.signalcollect.DataGraphVertex} * @param id: the vertex' id */ class PageRankVertex(id: Any, dampingFactor: Double = 0.85) extends DataGraphVertex(id, 1 - dampingFactor) { type Signal = Tuple2[Any, Any] type State = Double /** * The collect function calculates the rank of this vertex based on the rank * received from neighbors and the damping factor. */ def collect: State = { val pageRankSignals = mostRecentSignalMap.filter(signal => !signal._1.equals(SignalCollectSNAConstants.avgVertexId)).values.toList var sum = 0.0 if (pageRankSignals.isEmpty) { BigDecimal.valueOf(state).round(new MathContext(3)).toDouble } else { for (signal <- pageRankSignals) { sum += signal._2.asInstanceOf[Double] } BigDecimal.valueOf(1 - dampingFactor + dampingFactor * sum).round(new MathContext(3)).toDouble } } /** * @inheritDoc */ override def scoreSignal: Double = { lastSignalState match { case None => 1 case Some(oldState) => (state - oldState).abs } } } /** * Represents an edge of a PageRank graph, extends {@link com.signalcollect.DefaultEdge} * @param t: the traget vertex' id */ class PageRankEdge(t: Any) extends DefaultEdge(t) { type Source = PageRankVertex /** * The signal function calculates how much rank the source vertex. * Transfers the source id and the source's rank to the target vertex. */ def signal = { if (source.outgoingEdges.contains(SignalCollectSNAConstants.avgVertexId)) { var outweightsNoAvg = (source.sumOfOutWeights - 1) Tuple2(source.id, source.state * weight / outweightsNoAvg) } else { Tuple2(source.id, source.state * weight / source.sumOfOutWeights) } } } /** * Represents a vertex of a PageRank graph, which is concerned with calculating the average PageRank. * Extends {@link com.signalcollect.DataGraphVertex} * @param id: the vertex' id */ class AveragePageRankVertex(id: String) extends DataGraphVertex(id, 0.0) { type Signal = Tuple2[Any, Any] type State = Double /** * The collect function calculates the average PageRank value. * It takes the states of all incoming edges (except those with a {@link com.signalcollect.sna.metrics.AveragePageRankVertex} as source) * and calculates the average out of them */ def collect: State = { val pageRankSignals = mostRecentSignalMap.filter(signal => !signal._1.equals("Average")).values.toList var sum = 0.0 for (signal <- pageRankSignals) { sum += signal._2.asInstanceOf[Double] } scala.math.BigDecimal.valueOf(sum / pageRankSignals.size.toDouble).round(new MathContext(3)).toDouble } } /** * Represents an edge of a PageRank graph, which is concerned with the calculation of the average PageRank. * Extends {@link com.signalcollect.DefaultEdge} * @param t: the target vertex' id */ class AveragePageRankEdge(t: Any) extends DefaultEdge(t) { type Source = DataGraphVertex[Any, Any] /** * The signal function passes the whole vertex object ant the vertex' state of the source vertex to its target, * such that the target vertex is able to distinguish what type the source vertex has. */ def signal = Tuple2(source, source.state) }
fkzrh/signal-collect-sna
src/main/scala/com/signalcollect/sna/metrics/PageRank.scala
Scala
apache-2.0
6,150
package me.reminisce.testutils import java.util.concurrent.TimeUnit import reactivemongo.api.Cursor import reactivemongo.api.collections.bson.BSONCollection import reactivemongo.bson.{BSONDocument, BSONDocumentReader} import scala.concurrent.Await import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.duration.Duration /** * Contains methods to retrieve entities from a mongo collection. They will retry until something is found which matches * the check. If the check trivially returns true, it still ensures that something was found. */ object Retry { private val attemptsPermitted = 20 def find[T](collection: BSONCollection, selector: BSONDocument, attempts: Int) (check: T => Boolean)(implicit reader: BSONDocumentReader[T]): Option[T] = { Await.result[Option[T]](collection.find(selector).one[T], Duration(10, TimeUnit.SECONDS)) match { case Some(result) => if (check(result)) { Some(result) } else { attempt[Option[T]](attempts, None) { find(collection, selector, attempts + 1)(check) } } case None => attempt[Option[T]](attempts, None) { find(collection, selector, attempts + 1)(check) } } } private def attempt[T](attempts: Int, default: T)(block: => T): T = { if (attempts < attemptsPermitted) { Thread.sleep(200) block } else { default } } def findList[T](collection: BSONCollection, selector: BSONDocument, attempts: Int) (check: List[T] => Boolean)(implicit reader: BSONDocumentReader[T]): List[T] = { Await.result(collection.find(selector).cursor[T]().collect[List](maxDocs = -1, Cursor.DoneOnError[List[T]]()), Duration(10, TimeUnit.SECONDS)) match { case List() => attempt[List[T]](attempts, Nil) { findList(collection, selector, attempts + 1)(check) } case result => if (check(result)) { result } else { attempt[List[T]](attempts, Nil) { findList(collection, selector, attempts + 1)(check) } } } } }
reminisceme/stats
src/test/scala/me/reminisce/testutils/Retry.scala
Scala
apache-2.0
2,163
package chandu0101.scalajs.react.components package semanticui import chandu0101.macros.tojs.JSMacro import japgolly.scalajs.react._ import japgolly.scalajs.react.vdom.VdomElement import scala.scalajs.js import scala.scalajs.js.`|` /** * This file is generated - submit issues instead of PR against it */ case class SuiFormCheckbox( ref: js.UndefOr[String] = js.undefined, control: js.UndefOr[String | VdomElement] = js.undefined, value: js.UndefOr[String | Double | js.Array[String] | js.Array[Double]] = js.undefined, name: js.UndefOr[String] = js.undefined, key: js.UndefOr[String] = js.undefined, placeholder: js.UndefOr[String] = js.undefined, label: js.UndefOr[String | js.Object | VdomElement] = js.undefined, as: js.UndefOr[String | js.Function] = js.undefined ) { def apply() = { val props = JSMacro[SuiFormCheckbox](this) val component = JsComponent[js.Object, Children.None, Null](Sui.FormCheckbox) component(props) } }
rleibman/scalajs-react-components
core/src/main/scala/chandu0101/scalajs/react/components/semanticui/SuiFormCheckbox.scala
Scala
apache-2.0
989
package com.twitter.finagle.stats import com.twitter.common.metrics.Metrics import org.jboss.netty.handler.codec.http.HttpHeaders import org.junit.runner.RunWith import org.scalatest.junit.JUnitRunner import org.scalatest.FunSuite import scala.util.matching.Regex import com.twitter.finagle.http.{MediaType, Response, Request} import com.twitter.util.Await @RunWith(classOf[JUnitRunner]) class JsonExporterTest extends FunSuite { test("samples can be filtered") { val registry = Metrics.createDetached() val exporter = new JsonExporter(registry) { override lazy val statsFilterRegex: Option[Regex] = mkRegex("abc,ill_be_partially_matched.*") } val sample = Map[String, Number]( "jvm_uptime" -> 15.0, "abc" -> 42, "ill_be_partially_matched" -> 1 ) val filteredSample = exporter.filterSample(sample) assert(filteredSample.size == 1, "Expected 1 metric to pass through the filter. Found: " + filteredSample.size) assert(filteredSample.contains("jvm_uptime"), "Expected to find jvm_uptime metric in unfiltered samples") } test("empty regex filter string should not result in a regex") { val registry = Metrics.createDetached() val exporter = new JsonExporter(registry) assert(exporter.mkRegex("").isEmpty, "Empty regex filter should result in no filter regex generated") } test("end-to-end fetching stats works") { val registry = Metrics.createDetached() val viewsCounter = registry.createCounter("views") val gcCounter = registry.createCounter("jvm_gcs") viewsCounter.increment() gcCounter.increment() val exporter = new JsonExporter(registry) { override lazy val statsFilterRegex: Option[Regex] = mkRegex("jvm.*,vie") } val requestFiltered = Request("/admin/metrics.json?filtered=1&pretty=0") val responseFiltered = Response(Await.result(exporter.apply(requestFiltered))).contentString assert(responseFiltered.contains("views"), "'Views' should be present - 'vie' is not a match") assert(! responseFiltered.contains("jvm_gcs"), "'jvm_gcs' should be present - jvm.* matches it") val requestUnfiltered = Request("/admin/metrics.json") val responseUnfiltered = Response(Await.result(exporter.apply(requestUnfiltered))) assert(MediaType.Json.equals(responseUnfiltered.headers().get(HttpHeaders.Names.CONTENT_TYPE))) val responseUnfilteredContent = responseUnfiltered.contentString assert(responseUnfilteredContent.contains("views"), "'Views' should be present - 'vie' is not a match") assert(responseUnfilteredContent.contains("jvm_gcs"), "'jvm_gcs' should be present - jvm.* matches it") } }
LithiumTD/finagle
finagle-stats/src/test/scala/com/twitter/finagle/stats/JsonExporterTest.scala
Scala
apache-2.0
2,648
package notebook.util import java.io.File class ZipFileWriter(val name: String, val tmpDir: File = { new File(sys.props("java.io.tmpdir"), "zipfile-"+System.nanoTime) }) { val zipFile = new File(tmpDir, name) zipFile.createNewFile protected val baos = new java.io.FileOutputStream(zipFile) val zip = new java.util.zip.ZipOutputStream(baos) /** * e.g. * write(files = List(sameFile), prefix = "") * write(files, "images/") */ def write(files: Seq[File], prefix:String): Unit = { files.foreach { f => zip.putNextEntry(new java.util.zip.ZipEntry(prefix+f.getName)) val in = new java.io.BufferedInputStream(new java.io.FileInputStream(f)) var b = in.read() while (b > -1) { zip.write(b) b = in.read() } in.close() zip.closeEntry() } } def close(): Unit = { zip.close() } }
andypetrella/spark-notebook
modules/common/src/main/scala/notebook/util/ZipFileWriter.scala
Scala
apache-2.0
898
package mlib.impl.protocol import mlib.api.Message import play.api.libs.json._ import mlib.api.{SystemChannels => Chan} import mlib.impl.protocol.MessageFields._ object ApplicationMessage { import java.util.UUID abstract class Bridge(val channel: Message.ChannelType, val content: JsValue, val msgId: String = UUID.randomUUID().toString) extends Message // input case class Untyped(channel: Message.ChannelType, content: JsValue, msgId: String = UUID.randomUUID().toString) extends Message val UntypedFormat = Json.format[Untyped] // system case class SystemError(code: Int, message: Option[String]) extends Bridge(Chan.SYSTEM, Json.toJson(Content.SystemError(code, message))) implicit object SystemErrorFormat extends Writes[SystemError] { def writes(o: SystemError) = Json.obj( CHANNEL -> o.channel, MSG_ID -> o.msgId, CONTENT -> Json.obj( "code" -> o.code, "message" -> o.message ) ) } case class ConnectionEvent(event: String, connectionId: Message.ConnectionId) extends Bridge(Chan.CONNECTION, Json.toJson(Content.ConnectionEvent(event, connectionId))) implicit object ConnectionEventFormat extends Writes[ConnectionEvent] { def writes(o: ConnectionEvent) = Json.obj( CHANNEL -> o.channel, MSG_ID -> o.msgId, CONTENT -> Json.obj( "event" -> o.event, "connectionId" -> o.connectionId ) ) } }
InnovaCo/mlib
src/main/scala/mlib/impl/protocol/ApplicationMessage.scala
Scala
mit
1,437
/* * Copyright 2014–2018 SlamData Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package quasar package connector import quasar.contrib.pathy._ import scalaz._ final case class PhysicalPlan[R](repr: R, paths: ISet[APath])
jedesah/Quasar
connector/src/main/scala/quasar/connector/PhysicalPlan.scala
Scala
apache-2.0
749
package scala.idx.Genesys.Domain import scala.idx.Genesys._ import scala.xml._ import scala.io._ import java.io.File import scala.io.Source import java.io.FileReader import java.sql.{DriverManager,DatabaseMetaData} abstract class ObjDef(val name: String) class FieldDef(override val name: String, val typeValue: String,val length:Option[Int]=None,val nullable:Option[Boolean]=None,val primary:Option[Boolean]=None,val foreignKey:Option[String]=None) extends ObjDef(name) class EntityDef(override val name: String, val label: String) extends ObjDef(name) { private var fields: List[FieldDef] = List() private var _fieldsMap: Map[String, FieldDef] = Map[String, FieldDef]() def addField(fieldMap: (String, FieldDef)) = { _fieldsMap += fieldMap } def getField(field: String): FieldDef = { return this._fieldsMap(field) } def getFields(): List[FieldDef] = this._fieldsMap.map(t => t._2).toList } trait SourceParser{ def getEntity(sourceModel:Any):EntityDef=new EntityDef("","") } trait XmlParser extends SourceParser{ override def getEntity(sourceModel:Any):EntityDef={ val model=XML.loadString(sourceModel.asInstanceOf[String]) val name = (model \\ "entity" \\ "name").text val label = (model \\ "entity" \\ "label").text var entity = new EntityDef(name, label) (model \\ "entity" \\ "fields" \\ "field").foreach { field => var fieldType = (field \ "@type").text entity.addField(field.text -> new FieldDef(field.text, fieldType)) } println("loading xml-parser..") return entity } } trait DbParser extends SourceParser{ private def getPrimaryKey(table:String,metadata:DatabaseMetaData):Set[String]={ var resultPks=metadata.getPrimaryKeys(null, null, table) var pks=Set.empty[String] while(resultPks.next()){ pks=pks.+(resultPks.getString("COLUMN_NAME")) } return pks } private def getForeingKeys(table:String,metadata:DatabaseMetaData):Map[String,String]={ var resultFK=metadata.getImportedKeys(null, null, table); var fks=Map.empty[String,String]//Set.empty[String]; while (resultFK.next()){ val tablePKName=resultFK.getString("PKTABLE_NAME"); val fkColumnName=resultFK.getString("FKCOLUMN_NAME"); fks=fks + (fkColumnName -> tablePKName) } return fks } override def getEntity(sourceModel:Any):EntityDef={ val model=sourceModel.asInstanceOf[(String, DatabaseMetaData)] val tableName=model._1 val metadata=model._2 var entity = new EntityDef(tableName, tableName) var columnsResult=metadata.getColumns(null, null, tableName, null) val pks=getPrimaryKey(tableName,metadata) val fkMap=getForeingKeys(tableName, metadata); //println("foreing keys==>"+Map); while (columnsResult.next()) { entity.addField( columnsResult.getString("COLUMN_NAME")->new FieldDef( name=columnsResult.getString("COLUMN_NAME"), typeValue=columnsResult.getString("TYPE_NAME").split(" ")(0),//put this fix to avoid extra type info comming from the db length=Option(columnsResult.getInt("COLUMN_SIZE")), nullable=Option(columnsResult.getInt("NULLABLE")==DatabaseMetaData.columnNullable), primary=Option(pks.contains(columnsResult.getString("COLUMN_NAME"))), foreignKey=fkMap.get(columnsResult.getString("COLUMN_NAME")) ) ) } return entity } } object EntityReader{ import scala.collection.JavaConverters._ import scala.idx.Genesys.Util._ def apply(config:Config,ents: java.util.List[String]=null):List[EntityDef]={ //println("enter to Reader and type is==>"+config) var entityList:List[EntityDef]=List() config match{ case XmlSource(folder)=>{ var files=new File(folder).listFiles().filter { _.getName().endsWith(".xml") } for(file <- files){ val fileReader=Source.fromFile(file) val parser=new EntityParser(fileReader.mkString) with XmlParser entityList=entityList.::(parser.parse())//call the entityParser with the xmlParser fileReader.close() } } case DbSource(host,user,pwd,dbType)=>{ if(!(CommonUtil.dbDriverMapping.contains(dbType))){ throw new Exception("Invalid db type") } //println("loading driver") Class.forName(CommonUtil.dbDriverMapping(dbType)) println(s"the db host url =>$host") var connection=DriverManager.getConnection(host,user,pwd); if(user=="" || user==null){ connection=DriverManager.getConnection(host) } val metadata=connection.getMetaData var tableResult=metadata.getTables(null, null, null, Array("TABLE")) while (tableResult.next()) { val model=(tableResult.getString("TABLE_NAME"),metadata) //filtering the table to generate if((ents!=null && ents.asScala.exists { x => x==model._1 }) || ents==null){ val parser=new EntityParser(model) with DbParser entityList=entityList.::(parser.parse())//call the entityParser with the dbParse } } } case _ => new Exception("error not a valid source") } return entityList } private class EntityParser(source:Any) extends SourceParser { def parse():EntityDef={ //println("initialize entity parser") return this.getEntity(source) } } }
serdna27/Genesys
src/main/scala/scala/idx/Genesys/Domain/ObjDef.scala
Scala
mit
5,516
package controllers /** * Created by hadoop on 16-7-24. */ import play.api.mvc._ import javax.inject._ import play.api._ import play.api.data._ import play.api.data.Forms._ import dao.DAO import model.RegistrationInfo @Singleton class RegistrationController @Inject() (implicit dao:DAO) extends Controller{ val signupForm = Form(mapping ( "username" -> nonEmptyText, "password" -> nonEmptyText, "e_mail" -> nonEmptyText )(RegistrationInfo.apply)(RegistrationInfo.unapply)) def mainPage = Action{ implicit request => Ok(views.html.registrate()) } def postPage = Action{ implicit request => val info = signupForm.bindFromRequest.get if(info.validate && !info.checkIfexist()){ info.storeToDB() Redirect("/").withNewSession }else{ Unauthorized("Oops, Your registration has failed") } } }
GreenHunan/aircheck-server
app/controllers/RegistrationController.scala
Scala
gpl-2.0
853
/* Copyright 2009 David Hall, Daniel Ramage Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package epic.preprocess /** * Tokenizes by splitting on the regular expression \\s+. * * @author dramage */ class WhitespaceTokenizer() extends RegexSplitTokenizer("\\\\s+"); object WhitespaceTokenizer { def apply() : WhitespaceTokenizer = new WhitespaceTokenizer; private val _instance : WhitespaceTokenizer = apply(); def apply(in : String) : Iterable[String] = _instance(in); }
followyourheart/epic
src/main/scala/epic/preprocess/WhitespaceTokenizer.scala
Scala
apache-2.0
972
object X { class CA[A] type C = CA[_] val c: C = ??? def f[A](r: CA[A]) = () def g(): CA[_] = CA() def h(): C = ??? // works f(c) // works val x = c.asInstanceOf[C] f(x) // was: error f(c.asInstanceOf[C]) // works, error in Scala 2 f(c.asInstanceOf[c.type]) f(c.asInstanceOf[CA[_]]) f(g()) f(h()) }
dotty-staging/dotty
tests/pos/i12739.scala
Scala
apache-2.0
341
import java.util.concurrent._ import java.util.concurrent.atomic._ object Test { def main(args: Array[String]) { val n = 10000 val i = new AtomicInteger(n) val j = new AtomicInteger(n) val sum = new AtomicInteger val q = new scala.concurrent.SyncVar[Int] val producers = (1 to 3) map { z => new Thread { override def run() { var again = true while (again) { val x = i.getAndDecrement() if (x > 0) q put x else again = false } } } } val summers = (1 to 7) map { z => new Thread { override def run() { val x = j.decrementAndGet() if (x >= 0) { sum addAndGet q.take() } if (x > 0) { run() } else { // done } } } } summers foreach { _.start() } producers foreach { _.start() } summers foreach { _.join() } val got = sum.get val expected = (n + 1) * n / 2 println(got + " " + expected + " " + (got == expected)) producers foreach { _.join() } } } // vim: set ts=2 sw=2 et:
felixmulder/scala
test/files/jvm/sync-var.scala
Scala
bsd-3-clause
975
package system.master.core import akka.actor.{ActorRef, ActorSystem, Props} import akka.testkit.{ImplicitSender, TestActorRef, TestKit, TestProbe} import com.actors.CustomActor import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike} import system.names.NamingSystem import system.ontologies.messages.Location._ import system.ontologies.messages.MessageType.Update import system.ontologies.messages._ /** * Created by Xander_C on 09/07/2017. */ class DataStreamerTest extends TestKit(ActorSystem("DataStreamerTest")) with ImplicitSender with WordSpecLike with Matchers with BeforeAndAfterAll { override def afterAll { TestKit.shutdownActorSystem(system) } "A DataStreamer " should { val probe = TestProbe() val tester: TestActorRef[Tester] = TestActorRef(Props(new Tester(probe.ref)), "Tester") "Submit incoming collection to the Hot Stream" in { tester ! List.empty[Room] probe.expectMsg("I'm the hot stream") } "The submitted data should be delivered to the Admin Manager" in { probe.expectMsg(AriadneMessage( Update, Update.Subtype.Admin, Location.Master >> Location.User, AdminUpdate(0, List.empty[Room].map(c => RoomDataUpdate(c))) )) assert(probe.sender == tester.underlyingActor.admin) } } private class Tester(probe: ActorRef) extends CustomActor { val admin: TestActorRef[CustomActor] = TestActorRef(Props(new CustomActor { override def receive: Receive = { case msg => probe ! msg } }), self, NamingSystem.AdminSupervisor) val streamer: TestActorRef[TopologySupervisor] = TestActorRef(Props(new DataStreamer( target = child(NamingSystem.AdminSupervisor).get, (msg, dest) => { probe ! "I'm the hot stream" dest ! msg }) ), self, NamingSystem.DataStreamer) override def receive: Receive = { case msg if sender == streamer => probe forward msg case msg => streamer forward msg } } }
albertogiunta/arianna
src/test/scala/system/master/core/DataStreamerTest.scala
Scala
gpl-3.0
2,332
package org.cloudfun.util import javax.swing.{JTextField, JTextArea, JFrame} import javax.swing.text.{SimpleAttributeSet, Document} import java.awt.event.{ActionEvent, ActionListener} import java.awt.{Dimension, BorderLayout} /** * Simple console with output lines and an input line. */ class SimpleConsole(name: String, listener: String => Unit) extends JFrame(name) { private val field: JTextArea = new JTextArea() private val input: JTextField = new JTextField() field.setEditable(false) getContentPane.setLayout(new BorderLayout()) setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE) getContentPane.add(field, BorderLayout.CENTER) getContentPane.add(input, BorderLayout.SOUTH) getContentPane.setPreferredSize(new Dimension(800, 600)) pack() setVisible( true ) input.grabFocus() input.addActionListener(new ActionListener() { def actionPerformed(e: ActionEvent) = { val in = input.getText if (in != null && !in.isEmpty) listener(in) input.setText("") } }) def addLine(message: String) { val doc: Document = field.getDocument doc.insertString(doc.getLength, message + "\\n", SimpleAttributeSet.EMPTY) } }
zzorn/cloudfun
src/main/scala/org/cloudfun/util/SimpleConsole.scala
Scala
lgpl-3.0
1,173
package io.udash.bootstrap.modal import io.udash._ import io.udash.bindings.modifiers.Binding import io.udash.bootstrap.utils.BootstrapStyles import io.udash.testing.UdashCoreFrontendTest import io.udash.wrappers.jquery._ import scalatags.JsDom.all._ class UdashModalTest extends UdashCoreFrontendTest { "UdashModal component" should { val header = (_: Binding.NestedInterceptor) => div("Header ").render val body = (_: Binding.NestedInterceptor) => div("Body ").render val footer = (_: Binding.NestedInterceptor) => div("Footer ").render "call listeners on opening and closing" in { import UdashModal._ // fade needs to be false to make this test work in a synchronous way val modal = UdashModal(fade = false.toProperty)(Some(header), Some(body), Some(footer)) var showCounter = 0 var hideCounter = 0 modal.listen { case ModalEvent(_, ModalEvent.EventType.Show) => showCounter += 1 case ModalEvent(_, ModalEvent.EventType.Hide) => hideCounter += 1 } jQ("body").append(modal.render) for (i <- 1 to 10) { modal.show() showCounter should be(i) hideCounter should be(i-1) modal.hide() showCounter should be(i) hideCounter should be(i) } } "draw only passed elements" in { val modal = UdashModal()( Some(header), Some(body), Some(footer) ) val modal1 = UdashModal()( None, Some(body), Some(footer) ) val modal2 = UdashModal()( Some(header), None, Some(footer) ) val modal3 = UdashModal()( Some(header), Some(body), None ) modal.render.textContent should be("Header Body Footer ") modal1.render.textContent.split(" ") shouldNot contain("Header") modal1.render.textContent.split(" ") should contain("Body") modal1.render.textContent.split(" ") should contain("Footer") modal2.render.textContent.split(" ") should contain("Header") modal2.render.textContent.split(" ") shouldNot contain("Body") modal2.render.textContent.split(" ") should contain("Footer") modal3.render.textContent.split(" ") should contain("Header") modal3.render.textContent.split(" ") should contain("Body") modal3.render.textContent.split(" ") shouldNot contain("Footer") } "clean up listeners properly" in { val modalSize: Property[Option[BootstrapStyles.Size]] = Property(Some(BootstrapStyles.Size.Large)) val fade: Property[Boolean] = Property(false) val labelId: Property[Option[String]] = Property(Some("Test")) val backdrop: Property[UdashModal.BackdropType] = Property(UdashModal.BackdropType.None) val keyboard: Property[Boolean] = Property(true) val modal = UdashModal(modalSize, fade, labelId, backdrop, keyboard)(Some(header), Some(body), Some(footer)) val el = modal.render el.firstElementChild.classList should contain("modal-lg") el.firstElementChild.classList shouldNot contain("modal-sm") modalSize.set(Some(BootstrapStyles.Size.Small)) el.firstElementChild.classList shouldNot contain("modal-lg") el.firstElementChild.classList should contain("modal-sm") el.classList shouldNot contain("fade") fade.set(true) el.classList should contain("fade") el.getAttribute(aria.labelledby.name) should be("Test") labelId.set(None) el.getAttribute(aria.labelledby.name) should be(null) modal.kill() modalSize.listenersCount() should be(0) fade.listenersCount() should be(0) labelId.listenersCount() should be(0) backdrop.listenersCount() should be(0) keyboard.listenersCount() should be(0) } } }
UdashFramework/udash-core
bootstrap4/.js/src/test/scala/io/udash/bootstrap/modal/UdashModalTest.scala
Scala
apache-2.0
3,793
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.memory import javax.annotation.concurrent.GuardedBy import org.apache.spark.SparkConf import org.apache.spark.internal.Logging import org.apache.spark.storage.BlockId import org.apache.spark.storage.memory.MemoryStore import org.apache.spark.unsafe.Platform import org.apache.spark.unsafe.array.ByteArrayMethods import org.apache.spark.unsafe.memory.MemoryAllocator /** * An abstract memory manager that enforces how memory is shared between execution and storage. * * In this context, execution memory refers to that used for computation in shuffles, joins, * sorts and aggregations, while storage memory refers to that used for caching and propagating * internal data across the cluster. There exists one MemoryManager per JVM. */ private[spark] abstract class MemoryManager( conf: SparkConf, numCores: Int, onHeapStorageMemory: Long, onHeapExecutionMemory: Long) extends Logging { // -- Methods related to memory allocation policies and bookkeeping ------------------------------ @GuardedBy("this") protected val onHeapStorageMemoryPool = new StorageMemoryPool(this, MemoryMode.ON_HEAP) @GuardedBy("this") protected val offHeapStorageMemoryPool = new StorageMemoryPool(this, MemoryMode.OFF_HEAP) @GuardedBy("this") protected val onHeapExecutionMemoryPool = new ExecutionMemoryPool(this, MemoryMode.ON_HEAP) @GuardedBy("this") protected val offHeapExecutionMemoryPool = new ExecutionMemoryPool(this, MemoryMode.OFF_HEAP) onHeapStorageMemoryPool.incrementPoolSize(onHeapStorageMemory) onHeapExecutionMemoryPool.incrementPoolSize(onHeapExecutionMemory) protected[this] val maxOffHeapMemory = conf.getSizeAsBytes("spark.memory.offHeap.size", 0) protected[this] val offHeapStorageMemory = (maxOffHeapMemory * conf.getDouble("spark.memory.storageFraction", 0.5)).toLong offHeapExecutionMemoryPool.incrementPoolSize(maxOffHeapMemory - offHeapStorageMemory) offHeapStorageMemoryPool.incrementPoolSize(offHeapStorageMemory) /** * Total available on heap memory for storage, in bytes. This amount can vary over time, * depending on the MemoryManager implementation. * In this model, this is equivalent to the amount of memory not occupied by execution. */ def maxOnHeapStorageMemory: Long /** * Total available off heap memory for storage, in bytes. This amount can vary over time, * depending on the MemoryManager implementation. */ def maxOffHeapStorageMemory: Long /** * Set the [[MemoryStore]] used by this manager to evict cached blocks. * This must be set after construction due to initialization ordering constraints. */ final def setMemoryStore(store: MemoryStore): Unit = synchronized { onHeapStorageMemoryPool.setMemoryStore(store) offHeapStorageMemoryPool.setMemoryStore(store) } /** * Acquire N bytes of memory to cache the given block, evicting existing ones if necessary. * * @return whether all N bytes were successfully granted. */ def acquireStorageMemory(blockId: BlockId, numBytes: Long, memoryMode: MemoryMode): Boolean /** * Acquire N bytes of memory to unroll the given block, evicting existing ones if necessary. * * This extra method allows subclasses to differentiate behavior between acquiring storage * memory and acquiring unroll memory. For instance, the memory management model in Spark * 1.5 and before places a limit on the amount of space that can be freed from unrolling. * * @return whether all N bytes were successfully granted. */ def acquireUnrollMemory(blockId: BlockId, numBytes: Long, memoryMode: MemoryMode): Boolean /** * Try to acquire up to `numBytes` of execution memory for the current task and return the * number of bytes obtained, or 0 if none can be allocated. * * This call may block until there is enough free memory in some situations, to make sure each * task has a chance to ramp up to at least 1 / 2N of the total memory pool (where N is the # of * active tasks) before it is forced to spill. This can happen if the number of tasks increase * but an older task had a lot of memory already. */ private[memory] def acquireExecutionMemory( numBytes: Long, taskAttemptId: Long, memoryMode: MemoryMode): Long /** * Release numBytes of execution memory belonging to the given task. */ private[memory] def releaseExecutionMemory( numBytes: Long, taskAttemptId: Long, memoryMode: MemoryMode): Unit = synchronized { memoryMode match { case MemoryMode.ON_HEAP => onHeapExecutionMemoryPool.releaseMemory(numBytes, taskAttemptId) case MemoryMode.OFF_HEAP => offHeapExecutionMemoryPool.releaseMemory(numBytes, taskAttemptId) } } /** * Release all memory for the given task and mark it as inactive (e.g. when a task ends). * * @return the number of bytes freed. */ private[memory] def releaseAllExecutionMemoryForTask(taskAttemptId: Long): Long = synchronized { onHeapExecutionMemoryPool.releaseAllMemoryForTask(taskAttemptId) + offHeapExecutionMemoryPool.releaseAllMemoryForTask(taskAttemptId) } /** * Release N bytes of storage memory. */ def releaseStorageMemory(numBytes: Long, memoryMode: MemoryMode): Unit = synchronized { memoryMode match { case MemoryMode.ON_HEAP => onHeapStorageMemoryPool.releaseMemory(numBytes) case MemoryMode.OFF_HEAP => offHeapStorageMemoryPool.releaseMemory(numBytes) } } /** * Release all storage memory acquired. */ final def releaseAllStorageMemory(): Unit = synchronized { onHeapStorageMemoryPool.releaseAllMemory() offHeapStorageMemoryPool.releaseAllMemory() } /** * Release N bytes of unroll memory. */ final def releaseUnrollMemory(numBytes: Long, memoryMode: MemoryMode): Unit = synchronized { releaseStorageMemory(numBytes, memoryMode) } /** * Execution memory currently in use, in bytes. */ final def executionMemoryUsed: Long = synchronized { onHeapExecutionMemoryPool.memoryUsed + offHeapExecutionMemoryPool.memoryUsed } /** * Storage memory currently in use, in bytes. */ final def storageMemoryUsed: Long = synchronized { onHeapStorageMemoryPool.memoryUsed + offHeapStorageMemoryPool.memoryUsed } /** * Returns the execution memory consumption, in bytes, for the given task. */ private[memory] def getExecutionMemoryUsageForTask(taskAttemptId: Long): Long = synchronized { onHeapExecutionMemoryPool.getMemoryUsageForTask(taskAttemptId) + offHeapExecutionMemoryPool.getMemoryUsageForTask(taskAttemptId) } // -- Fields related to Tungsten managed memory ------------------------------------------------- /** * Tracks whether Tungsten memory will be allocated on the JVM heap or off-heap using * sun.misc.Unsafe. */ final val tungstenMemoryMode: MemoryMode = { if (conf.getBoolean("spark.memory.offHeap.enabled", false)) { require(conf.getSizeAsBytes("spark.memory.offHeap.size", 0) > 0, "spark.memory.offHeap.size must be > 0 when spark.memory.offHeap.enabled == true") require(Platform.unaligned(), "No support for unaligned Unsafe. Set spark.memory.offHeap.enabled to false.") MemoryMode.OFF_HEAP } else { MemoryMode.ON_HEAP } } /** * The default page size, in bytes. * * If user didn't explicitly set "spark.buffer.pageSize", we figure out the default value * by looking at the number of cores available to the process, and the total amount of memory, * and then divide it by a factor of safety. */ val pageSizeBytes: Long = { val minPageSize = 1L * 1024 * 1024 // 1MB val maxPageSize = 64L * minPageSize // 64MB val cores = if (numCores > 0) numCores else Runtime.getRuntime.availableProcessors() // Because of rounding to next power of 2, we may have safetyFactor as 8 in worst case val safetyFactor = 16 val maxTungstenMemory: Long = tungstenMemoryMode match { case MemoryMode.ON_HEAP => onHeapExecutionMemoryPool.poolSize case MemoryMode.OFF_HEAP => offHeapExecutionMemoryPool.poolSize } val size = ByteArrayMethods.nextPowerOf2(maxTungstenMemory / cores / safetyFactor) val default = math.min(maxPageSize, math.max(minPageSize, size)) conf.getSizeAsBytes("spark.buffer.pageSize", default) } /** * Allocates memory for use by Unsafe/Tungsten code. */ private[memory] final val tungstenMemoryAllocator: MemoryAllocator = { tungstenMemoryMode match { case MemoryMode.ON_HEAP => MemoryAllocator.HEAP case MemoryMode.OFF_HEAP => MemoryAllocator.UNSAFE } } }
sh-cho/cshSpark
memory/MemoryManager.scala
Scala
apache-2.0
9,476
package com.github.ldaniels528.broadway.app.config /** * Represents a processing Story */ case class Story() { }
ldaniels528/broadway
app-play/app/com/github/ldaniels528/broadway/app/config/Story.scala
Scala
apache-2.0
119
package mesosphere.marathon package stream import scala.reflect.ClassTag /** * Extends traversable with a few helper methods. */ class RichIterable[+A](to: Iterable[A]) { /** * Works like `exists` but searches for an element of a given type. */ def existsAn[E](implicit tag: ClassTag[E]): Boolean = to.exists(tag.runtimeClass.isInstance) }
mesosphere/marathon
src/main/scala/mesosphere/marathon/stream/RichIterable.scala
Scala
apache-2.0
359
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.util import java.util.concurrent.atomic.AtomicInteger import org.apache.spark._ import org.apache.spark.sql.{LocalSparkSession, SparkSession} import org.apache.spark.sql.execution.QueryExecution import org.apache.spark.sql.internal.StaticSQLConf._ class ExecutionListenerManagerSuite extends SparkFunSuite with LocalSparkSession { import CountingQueryExecutionListener._ test("register query execution listeners using configuration") { val conf = new SparkConf(false) .set(QUERY_EXECUTION_LISTENERS, Seq(classOf[CountingQueryExecutionListener].getName())) spark = SparkSession.builder().master("local").appName("test").config(conf).getOrCreate() spark.sql("select 1").collect() spark.sparkContext.listenerBus.waitUntilEmpty() assert(INSTANCE_COUNT.get() === 1) assert(CALLBACK_COUNT.get() === 1) val cloned = spark.cloneSession() cloned.sql("select 1").collect() spark.sparkContext.listenerBus.waitUntilEmpty() assert(INSTANCE_COUNT.get() === 1) assert(CALLBACK_COUNT.get() === 2) } } private class CountingQueryExecutionListener extends QueryExecutionListener { import CountingQueryExecutionListener._ INSTANCE_COUNT.incrementAndGet() override def onSuccess(funcName: String, qe: QueryExecution, durationNs: Long): Unit = { CALLBACK_COUNT.incrementAndGet() } override def onFailure(funcName: String, qe: QueryExecution, error: Throwable): Unit = { CALLBACK_COUNT.incrementAndGet() } } private object CountingQueryExecutionListener { val CALLBACK_COUNT = new AtomicInteger() val INSTANCE_COUNT = new AtomicInteger() }
goldmedal/spark
sql/core/src/test/scala/org/apache/spark/sql/util/ExecutionListenerManagerSuite.scala
Scala
apache-2.0
2,446
/** * © 2014 Evan Bennett * All rights reserved. */ package com.github.evanbennett.sbt_play_messages import PlayMessagesPlugin.autoImport._ import PlayMessagesPlugin.Statuses._ import sbt._ /** * PlayMessages */ object PlayMessages { val MESSAGES_FILENAME = "messages" val APPLICATION_LANGS_CONFIGURATION_KEY = "application.langs" val CACHE_FOLDER_NAME = "com.github.evanbennett" val CACHE_FILE_NAME = "playMessagesToken" class State(val log: Logger) { if (log == null) throw new IllegalArgumentException("'log' must be provided.") var status: PlayMessagesPlugin.Status = Succeeded } case class Message(key: String, pattern: String) def listFilesRecursively(folder: java.io.File): Array[java.io.File] = { val (subFolders, files) = IO.listFiles(folder).partition(_.isDirectory) files ++ subFolders.flatMap(listFilesRecursively) } private def loadMessagesFile(messagesFile: java.io.File)(implicit state: State): (Seq[Message], java.io.File) = { play.api.i18n.PlayMessagesMessageParser.parse(messagesFile) match { case Right(messages) => state.log.debug("Loaded messages: [" + messagesFile + "] [" + messages.length + "]") (messages, messagesFile) case Left(_) => state.log.error("Error parsing the messages file: [" + messagesFile + "]") state.status = Failed (Nil, messagesFile) } } private def parseGeneratedObject(generatedObject: String)(implicit state: State): (String, String) = { state.log.debug("Parsing 'PlayMessagesKeys.generatedObject'.") val dotLastIndex = if (generatedObject == null) -1 else generatedObject.lastIndexOf('.') if (generatedObject == null || generatedObject.isEmpty || dotLastIndex < 0) { state.log.error("'generatedObject' must contain an object package and name.") state.status = Failed (null, null) } else { val _package = generatedObject.substring(0, dotLastIndex) val name = generatedObject.substring(dotLastIndex + 1) state.log.debug("_package [" + _package + "] name [" + name + "]") (_package, name) } } private[sbt_play_messages] val checkMessagesTask: Def.Initialize[Task[(PlayMessagesPlugin.Status, Seq[String])]] = Def.task { implicit val state = new State(Keys.streams.value.log) val messagesFiles: Array[File] = { state.log.debug("Loading messages files.") val confDirectory = play.PlayImport.PlayKeys.confDirectory.value val messagesFiles = IO.listFiles(confDirectory).filter(_.getName.startsWith(MESSAGES_FILENAME)).sortBy(_.getName) if (messagesFiles.nonEmpty && PlayMessagesKeys.requireDefaultMessagesFile.value && messagesFiles.head.getName != MESSAGES_FILENAME) { state.log.error("The default messages file must exist: [" + confDirectory + java.io.File.separator + MESSAGES_FILENAME + "]") state.status = Failed Array.empty } else { state.log.debug("messagesFiles: [" + messagesFiles.mkString("; ") + "]") messagesFiles } } val hasDefaultMessagesFile = (messagesFiles.nonEmpty && messagesFiles.head.getName == MESSAGES_FILENAME) // TODO: This does not appear to handle sbt options: -Dconfig.resource (relative in classpath); -Dconfig.file (absolute path); -Dconfig.url (URL); -Dapplication.langs=something val applicationLangsValue = play.api.Configuration.load(Keys.baseDirectory.value).getString(APPLICATION_LANGS_CONFIGURATION_KEY) if (messagesFiles.isEmpty && applicationLangsValue.isEmpty) (state.status, Nil) else { // Check if any changes have occurred. val generatedObjectFile = (Keys.sourceManaged in Compile).value / (PlayMessagesKeys.generatedObject.value.replace('.', '/') + ".scala") val newCacheValue = applicationLangsValue.getOrElse("") + ";" + messagesFiles.map(messageFile => messageFile.getName + "-" + Hash.toHex(Hash(messageFile))).mkString("[", ";", "]") val myCacheFolder = Keys.streams.value.cacheDirectory / CACHE_FOLDER_NAME if (!myCacheFolder.exists) myCacheFolder.mkdirs val myCacheFile = myCacheFolder / CACHE_FILE_NAME if (generatedObjectFile.exists && myCacheFile.exists && IO.read(myCacheFile) == newCacheValue) { state.status = NoChange state.log.info("checkMessages: NO CHANGE") } else IO.write(myCacheFile, newCacheValue) val defaultMessages: Seq[Message] = if (state.status != NoChange || PlayMessagesKeys.onNoChangeLoadDefaultMessageKeys.value) loadMessagesFile(messagesFiles.head)._1 else Nil val defaultMessageKeysDistinctSorted = defaultMessages.map(_.key).distinct.sorted if (state.status != NoChange) { if (PlayMessagesKeys.checkApplicationLanguages.value) { state.log.info("Checking 'application.langs' configuration.") applicationLangsValue match { case None => if (messagesFiles.nonEmpty) { state.log.error("The 'application.langs' configuration could not be detected, but messages files were found.") state.status = Failed } case Some(setting) => val applicationLangs = setting.split(",").map(_.trim).toBuffer (if (hasDefaultMessagesFile) messagesFiles.tail else messagesFiles).foreach { messagesFile => val fileLanguage = messagesFile.getName.substring(MESSAGES_FILENAME.length + 1) // + 1 for the '.' if (applicationLangs.contains(fileLanguage)) applicationLangs -= fileLanguage else state.log.warn("Messages file language is not listed in the 'application.langs' configuration: [" + messagesFile.getName + "]") } if (applicationLangs.isEmpty) { if (hasDefaultMessagesFile) { state.log.debug("The 'application.langs' configuration languages all match language specific messages files, and you have a default messages file.") } else { state.log.debug("The 'application.langs' configuration languages all match language specific messages files. You do not have a default messages file. Key consistency is required.") } } else if (applicationLangs.length == 1 && hasDefaultMessagesFile) { state.log.debug("The 'application.langs' configuration languages match the language specific messages files or the default messages file.") } else { state.log.error("The 'application.langs' configuration has languages missing messages files: [" + applicationLangs.mkString("; ") + "]") state.status = Failed } } } val languageSpecificMessages: Array[(Seq[Message], File)] = messagesFiles.tail.map(loadMessagesFile) if (PlayMessagesKeys.checkDuplicateKeys.value) { state.log.info("Checking for duplicate keys.") ((defaultMessages, messagesFiles.head) +: languageSpecificMessages).foreach { case (messages: Seq[Message], file: File) => val duplicateMessageKeys = messages.groupBy(_.key).filter(_._2.length > 1).keys if (duplicateMessageKeys.nonEmpty) state.log.warn("Messages file contains duplicate keys: [" + file + "] [" + duplicateMessageKeys.mkString("; ") + "].") else state.log.debug("Messages file contains no duplicate keys: [" + file + "]") } } if (PlayMessagesKeys.checkKeyConsistency.value) { state.log.info("Checking key consistency.") val skipFilenames = PlayMessagesKeys.checkKeyConsistencySkipFilenames.value val nonexistentFilenames = skipFilenames -- messagesFiles.map(_.getName) if (nonexistentFilenames.nonEmpty) state.log.warn("'checkKeyConsistencySkipFilenames' contains filenames that do not exist.") languageSpecificMessages.filterNot(messagesAndFile => skipFilenames.contains(messagesAndFile._2.getName)).foreach { case (messages: Seq[Message], file: File) => val currentMessageKeys = messages.map(_.key).distinct val missingKeys = defaultMessageKeysDistinctSorted.diff(currentMessageKeys) if (missingKeys.nonEmpty) { val msg = "Messages file is missing some keys: [" + file + "] [" + missingKeys.mkString("; ") + "]" if (hasDefaultMessagesFile) state.log.warn(msg) else { state.log.error(msg) state.status = Failed } } val extraKeys = currentMessageKeys.diff(defaultMessageKeysDistinctSorted) if (extraKeys.nonEmpty) state.log.warn("Messages file contains keys not in the default messages file: [" + file + "] [" + extraKeys.mkString("; ") + "]") if (missingKeys.isEmpty && extraKeys.isEmpty) state.log.debug("Messages file is ok: [" + file + "]") } } if (PlayMessagesKeys.checkKeysUsed.value) { val (_, generatedObjectName) = parseGeneratedObject(PlayMessagesKeys.generatedObject.value) if (!PlayMessagesKeys.generateObject.value) state.log.warn("Not checking key usage as object generation is disable.") else if (generatedObjectName == null) state.log.warn("Unable to check key usage due to the 'generatedObject' error above.") else { state.log.info("Checking key usage.") val ignoreFilenames = PlayMessagesKeys.checkKeysUsedIgnoreFilenames.value val ignoreKeys = PlayMessagesKeys.checkKeysUsedIgnoreKeys.value val messagesReferenceRegex = s"""[\\\\s\\\\.,(]$generatedObjectName\\\\.([a-zA-Z0-9_]+(?:\\\\.[a-zA-Z0-9_]+)*)""".r val sourceFiles = (Keys.unmanagedSourceDirectories in Compile).value.distinct.flatMap(listFilesRecursively).filterNot(file => ignoreFilenames.exists(file.getAbsolutePath.endsWith)) val referencedMessageKeys = sourceFiles.flatMap { file => messagesReferenceRegex.findAllMatchIn(IO.readLines(file).mkString).map(_.group(1)).toSeq }.distinct val remainingMessageKeys = defaultMessageKeysDistinctSorted.filterNot(messageKey => ignoreKeys.exists(messageKey.matches)).diff(referencedMessageKeys) if (remainingMessageKeys.nonEmpty) state.log.warn("Some message keys are not used: [" + remainingMessageKeys.mkString("; ") + "]") else state.log.debug("All messages keys that are not to be ignored are used.") } } } (state.status, defaultMessageKeysDistinctSorted) } } def deleteFile(file: File)(implicit state: State): Seq[File] = { if (file.exists) { file.delete state.log.debug("Deleted file: [" + file + "]") } Nil } def writeFile(newContent: String, file: File)(implicit state: State): Seq[File] = { val currentObjectContent: String = { if (file.exists) { state.log.debug("Loading existing file: [" + file + "]") IO.read(file) } else { state.log.debug("Creating new file: [" + file + "]") IO.createDirectory(file.getParentFile) null } } if (newContent != currentObjectContent) { IO.write(file, newContent) state.log.debug("Saved file: [" + file + "]") } Seq(file) } private[sbt_play_messages] val checkAndGenerateScalaTask: Def.Initialize[Task[(PlayMessagesPlugin.Status, Seq[File])]] = Def.task { val (status, messageKeys) = checkTask.value implicit val state = new State(Keys.streams.value.log) val (generatedObjectPackage, generatedObjectName) = parseGeneratedObject(PlayMessagesKeys.generatedObject.value) val generatedObjectFile = (Keys.sourceManaged in Compile).value / (PlayMessagesKeys.generatedObject.value.replace('.', '/') + ".scala") if (status == NoChange) { state.log.info("generateMessagesObject: NO CHANGE") (status, if (generatedObjectFile.exists) Seq(generatedObjectFile) else Nil) } else if (!PlayMessagesKeys.generateObject.value || messageKeys.isEmpty) { (status, deleteFile(generatedObjectFile)) } else { state.log.info("Generating Scala object.") val currentObjectNesting = scala.collection.mutable.ArrayBuffer.empty[String] val objectContent = "" + // Open root object and generated classes. s"""package $generatedObjectPackage | |import com.github.evanbennett.play_messages._ | |/** Play Message Keys Object generated by PlayMessagesPlugin. */ |object $generatedObjectName extends RootMessageObject { | |""".stripMargin + // Output message values. messageKeys.map { messageKey => val messageKeyParts = messageKey.split('.') var matchesSoFar = true "" + // Close any open objects that are not required. (for (((currentNesting, requiredNesting), i) <- currentObjectNesting.zipAll(messageKeyParts.dropRight(1), null, null).zipWithIndex if currentNesting != null && (!matchesSoFar || currentNesting != requiredNesting)) yield { if (matchesSoFar) matchesSoFar = false currentObjectNesting -= currentNesting (" " * (i + 1)) + "}\\n" }).reverse.mkString + // Open any objects that are required and not open. (for (((currentNesting, requiredNesting), i) <- currentObjectNesting.zipAll(messageKeyParts.dropRight(1), null, null).zipWithIndex if currentNesting == null) yield { currentObjectNesting += requiredNesting (" " * (i + 1)) + s"""object $requiredNesting extends MessageKeyPart("${messageKeyParts.take(i + 1).mkString(".")}.") {""" + "\\n" }).mkString + // Output message value. (" " * (currentObjectNesting.length + 1)) + s"""val ${messageKeyParts.last} = Message("$messageKey")""" + "\\n" }.mkString + // Close any open objects. (for (i <- currentObjectNesting.length until 0 by -1) yield (" " * i) + "}\\n").mkString + // Close root object. "}" (status, writeFile(objectContent, generatedObjectFile)) } } private[sbt_play_messages] val checkAndGenerateJavaTask: Def.Initialize[Task[(PlayMessagesPlugin.Status, Seq[File])]] = Def.task { implicit val state = new State(Keys.streams.value.log) val (status, messageKeys) = checkTask.value val (generatedObjectPackage, generatedObjectName) = parseGeneratedObject(PlayMessagesKeys.generatedObject.value) val generatedObjectFile = (Keys.sourceManaged in Compile).value / (PlayMessagesKeys.generatedObject.value.replace('.', '/') + ".java") if (status == NoChange) { state.log.info("generateMessagesObject: NO CHANGE") (status, if (generatedObjectFile.exists) Seq(generatedObjectFile) else Nil) } else if (!PlayMessagesKeys.generateObject.value || messageKeys.isEmpty) { (status, deleteFile(generatedObjectFile)) } else { state.log.info("Generating Java object.") val currentObjectNesting = scala.collection.mutable.ArrayBuffer.empty[String] val objectContent = "" + // Open root object and generated classes. s"""package $generatedObjectPackage; | |import com.github.evanbennett.play_messages.Message; | |/* | * Play Message Keys Object generated by PlayMessagesPlugin. | */ |public final class $generatedObjectName { | | private $generatedObjectName() {} | | public static String get(String key, Object... args) { | return play.i18n.Messages.get(key, args); | } | | public static String get(play.api.i18n.Lang lang, String key, Object... args) { | return play.i18n.Messages.get(lang, key, args); | } | |""".stripMargin + // Output message values. messageKeys.map { messageKey => val messageKeyParts = messageKey.split('.') var matchesSoFar = true "" + // Close any open objects that are not required. (for (((currentNesting, requiredNesting), i) <- currentObjectNesting.zipAll(messageKeyParts.dropRight(1), null, null).zipWithIndex if currentNesting != null && (!matchesSoFar || currentNesting != requiredNesting)) yield { if (matchesSoFar) matchesSoFar = false currentObjectNesting -= currentNesting (" " * (i + 1)) + "}\\n" }).reverse.mkString + // Open any objects that are required and not open. (for (((currentNesting, requiredNesting), i) <- currentObjectNesting.zipAll(messageKeyParts.dropRight(1), null, null).zipWithIndex if currentNesting == null) yield { currentObjectNesting += requiredNesting val j = i + 1 "" + (" " * j) + s"""public static final class $requiredNesting {""" + "\\n" + (" " * j) + s""" private $requiredNesting() {}""" + "\\n" + (" " * j) + s""" public static String get(String additionalKey, Object... args) {""" + "\\n" + (" " * j) + s""" return play.i18n.Messages.get("${messageKeyParts.take(j).mkString(".")}." + additionalKey, args);""" + "\\n" + (" " * j) + s""" }""" + "\\n" + (" " * j) + s""" public static String get(play.api.i18n.Lang lang, String additionalKey, Object... args) {""" + "\\n" + (" " * j) + s""" return play.i18n.Messages.get(lang, "${messageKeyParts.take(j).mkString(".")}." + additionalKey, args);""" + "\\n" + (" " * j) + s""" }""" + "\\n" }).mkString + // Output message value. (" " * (currentObjectNesting.length + 1)) + s"""public static final Message ${messageKeyParts.last} = new Message("$messageKey");""" + "\\n" }.mkString + // Close any open objects. (for (i <- currentObjectNesting.length until 0 by -1) yield (" " * i) + "}\\n").mkString + // Close root object. "}" (status, writeFile(objectContent, generatedObjectFile)) } } }
evanbennett/play-messages
sbt-play-messages/src/main/scala/com/github/evanbennett/sbt_play_messages/PlayMessages.scala
Scala
bsd-3-clause
16,970
/*********************************************************************** * Copyright (c) 2013-2016 Commonwealth Computer Research, Inc. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Apache License, Version 2.0 * which accompanies this distribution and is available at * http://www.opensource.org/licenses/apache2.0.php. *************************************************************************/ package org.locationtech.geomesa.jobs.mapred import org.apache.accumulo.core.client.mapred.AccumuloInputFormat import org.apache.accumulo.core.client.security.tokens.{AuthenticationToken, PasswordToken} import org.apache.accumulo.core.security.Authorizations import org.apache.hadoop.mapred.JobConf import org.apache.log4j.Level import org.locationtech.geomesa.accumulo.AccumuloVersion._ object InputFormatBaseAdapter { def setConnectorInfo(job: JobConf, user: String, token: PasswordToken) = accumuloVersion match { case V15 => setConnectorInfo15(job, user, token) case V16 => setConnectorInfo16(job, user, token) case _ => setConnectorInfo16(job, user, token) } def setConnectorInfo15(job: JobConf, user: String, token: PasswordToken) = { val method = Class.forName("org.apache.accumulo.core.client.mapred.InputFormatBase") .getMethod("setConnectorInfo", classOf[JobConf], classOf[String], classOf[AuthenticationToken]) method.invoke(null, job, user, token) } def setConnectorInfo16(job: JobConf, user: String, token: PasswordToken) = { val method = classOf[AccumuloInputFormat] .getMethod("setConnectorInfo", classOf[JobConf], classOf[String], classOf[AuthenticationToken]) method.invoke(null, job, user, token) } def setZooKeeperInstance(job: JobConf, instance: String, zookeepers: String) = accumuloVersion match { case V15 => setZooKeeperInstance15(job, instance, zookeepers) case V16 => setZooKeeperInstance16(job, instance, zookeepers) case _ => setZooKeeperInstance16(job, instance, zookeepers) } def setZooKeeperInstance15(job: JobConf, instance: String, zookeepers: String) = { val method = Class.forName("org.apache.accumulo.core.client.mapred.InputFormatBase") .getMethod("setZooKeeperInstance", classOf[JobConf], classOf[String], classOf[String]) method.invoke(null, job, instance, zookeepers) } def setZooKeeperInstance16(job: JobConf, instance: String, zookeepers: String) = { val method = classOf[AccumuloInputFormat] .getMethod("setZooKeeperInstance", classOf[JobConf], classOf[String], classOf[String]) method.invoke(null, job, instance, zookeepers) } def setScanAuthorizations(job: JobConf, authorizations: Authorizations): Unit = accumuloVersion match { case V15 => setScanAuthorizations15(job, authorizations) case V16 => setScanAuthorizations16(job, authorizations) case _ => setScanAuthorizations16(job, authorizations) } def setScanAuthorizations15(job: JobConf, authorizations: Authorizations): Unit = { val method = Class.forName("org.apache.accumulo.core.client.mapred.InputFormatBase") .getMethod("setScanAuthorizations", classOf[JobConf], classOf[Authorizations], classOf[String]) method.invoke(null, job, authorizations) } def setScanAuthorizations16(job: JobConf, authorizations: Authorizations): Unit = { val method = classOf[AccumuloInputFormat] .getMethod("setScanAuthorizations", classOf[JobConf], classOf[Authorizations], classOf[String]) method.invoke(null, job, authorizations) } def setLogLevel(job: JobConf, level: Level) = accumuloVersion match { case V15 => setLogLevel15(job, level) case V16 => setLogLevel16(job, level) case _ => setLogLevel16(job, level) } def setLogLevel15(job: JobConf, level: Level) = { val method = Class.forName("org.apache.accumulo.core.client.mapred.InputFormatBase") .getMethod("setLogLevel", classOf[JobConf], classOf[Level]) method.invoke(null, job, level) } def setLogLevel16(job: JobConf, level: Level) = { val method = classOf[AccumuloInputFormat].getMethod("setLogLevel", classOf[JobConf], classOf[Level]) method.invoke(null, job, level) } }
mdzimmerman/geomesa
geomesa-jobs/src/main/scala/org/locationtech/geomesa/jobs/mapred/InputFormatBaseAdapter.scala
Scala
apache-2.0
4,219
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright © 2012 Christian Krause * * * * Christian Krause <[email protected]> * * <[email protected]> * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * This file is part of 'ClusterKit'. * * * * This project is free software: you can redistribute it and/or modify it under the terms * * of the GNU General Public License as published by the Free Software Foundation, either * * version 3 of the License, or any later version. * * * * This project is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; * * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. * * See the GNU General Public License for more details. * * * * You should have received a copy of the GNU General Public License along with this project. * * If not, see <http://www.gnu.org/licenses/>. * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ package ckit package client package swing import scala.swing._ import scala.swing.event._ import scala.util._ import akka.actor._ object SwingClient extends SwingApplication { val system = ActorSystem("ckit") var remote: ActorSelection = _ val proxy = system.actorOf(Props[Proxy], name = "proxy") lazy val menuBar: MenuBar = { val bar = new MenuBar val monitoring = new Menu("Monitoring") monitoring.contents += new MenuItem(action.JobDetail) monitoring.contents += new MenuItem(action.JobList) monitoring.contents += new MenuItem(action.JobListFor) monitoring.contents += new MenuItem(action.QueueSummary) monitoring.contents += new MenuItem(action.RuntimeSchedule) val main = new Menu("Main") main.contents += monitoring main.contents += new Separator main.contents += new MenuItem(action.Quit) val help = new Menu("Help") help.contents += new MenuItem(action.Help) help.contents += new MenuItem(action.Mail) help.contents += new Separator help.contents += new MenuItem(action.About) bar.contents += main bar.contents += help bar } lazy val top = new MainFrame { override def closeOperation() { SwingClient.quit() } } def Connector = { val field = new TextField("host.cluster.example.org") field.selectAll() field.listenTo(field.keys) field.reactions += { case event @ KeyPressed(`field`, Key.Enter, _, _) ⇒ val host = field.text Try(java.net.InetAddress.getByName(host).getHostAddress) match { case Success(address) ⇒ remote = system.actorSelection(s"""akka.tcp://ckit@$address:2552/user/grid-engine-actor""") action.JobList() case Failure(reason) ⇒ Console.err.println(reason) } } val panel = new FlowPanel panel.contents += field panel } lazy val view = new BorderPanel { private var current: Component = _ private var previous: Component = _ def contents_=(c: Component): Unit = if (current != c) { layout(c) = BorderPanel.Position.Center previous = current current = c current.requestFocus() revalidate() } def back(): Unit = { contents = previous } } def startup(args: Array[String]) { top.title = "ClusterKit" top.menuBar = menuBar view.contents = Connector view.peer.add(StatusBar, java.awt.BorderLayout.SOUTH) top.contents = view top.pack() top.visible = true } override def quit(): Unit = { system.shutdown() sys.exit(0) } }
wookietreiber/ckit
client/swing/main/scala/SwingClient.scala
Scala
gpl-3.0
4,879
/* * Copyright (c) 2017 Lucas Satabin * * Licensed under the Apache License Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package toolxit package font package tfm import dimen._ import scodec._ import bits._ import codecs._ import shapeless._ import java.nio.charset.Charset /** Codec for `.tfm` files. * Based on http://texdoc.net/texmf-dist/doc/generic/knuth/texware/tftopl.pdf */ object TfmCodec { private val fixWord: Codec[Double] = int32.xmap({ i => i * scala.math.pow(2, -20) }, { d => scala.math.round(d * scala.math.pow(2, 20)).toInt }) private val uint6 = uint(6) private val charInfoWord: Codec[CharInfoWord] = (uint8 :: uint4 :: uint4 :: uint6 :: uint2.xmap[Tag](i => Tag.withValue(i.toByte), _.value) :: uint8).as[CharInfoWord] private val header = ("lf" | uint16) :: ("lh" | uint16) :: ("bc" | uint16) :: ("ec" | uint16) :: ("nw" | uint16) :: ("nh" | uint16) :: ("nd" | uint16) :: ("ni" | uint16) :: ("nl" | uint16) :: ("nk" | uint16) :: ("nc" | uint16) :: ("np" | uint16) private def bcpl(name: String, size: Int) = for { sz <- uint8.emap(sz => if (sz < size) Attempt.successful(sz) else Attempt.failure(Err(f"$name size must be less than $size bytes"))) scheme <- fixedSizeBytes(sz, string(Charset.forName("US-ASCII"))) // ignore the rest _ <- ignore((size - sz - 1) * 8) } yield Some(scheme) private val codingScheme = bcpl("encoding scheme", 40) private val fontIdentifier = bcpl("font identifier", 20) private def formatFace(k: Byte): String = { val s = k % 2 val b = k / 2 val mbl = "MBL"(b % 3) val ri = "RI"(s) val rce = "RCE"(b / 3) f"$mbl$ri$rce" } private val face = for { _ <- ignore(24) b <- byte } yield Some(if (b >= 18 || b < 0) Right(b) else Left(formatFace(b))) /** Decodes an entire `.tfm` file. */ val file: Decoder[TfmFontMetrics] = header.flatMap { case lf :: lh :: bc :: ec :: nw :: nh :: nd :: ni :: nl :: nk :: nc :: np :: HNil => for { checkSum <- int32 size <- fixWord codingScheme <- if (lh >= 12) codingScheme.decodeOnly else provide(None) fontIdentifier <- if (lh >= 17) fontIdentifier.decodeOnly else provide(None) face <- if (lh >= 18) face.decodeOnly else provide(None) // ignore the rest of the header _ <- if (lh >= 19) ignore((lh - 18) * 8) else provide(()) charInfo <- fixedSizeBits((ec - bc + 1) * 32, vector(charInfoWord)) width <- fixedSizeBits(nw * 32, vector(fixWord)) height <- fixedSizeBits(nh * 32, vector(fixWord)) depth <- fixedSizeBits(nd * 32, vector(fixWord)) italic <- fixedSizeBits(ni * 32, vector(fixWord)) ligKern <- fixedSizeBits(nl * 32, vector(int32)) kern <- fixedSizeBits(nk * 32, vector(fixWord)) exten <- fixedSizeBits(nc * 32, vector(int32)) param <- fixedSizeBits(np * 32, vector(fixWord)) } yield TfmFontMetrics(checkSum, size.pt, codingScheme, fontIdentifier, face, bc, charInfo, width, height, depth, italic, ligKern, kern, exten, param) } }
satabin/toolxit-ng
fonts/src/main/scala/toolxit/font/tfm/TfmCodec.scala
Scala
apache-2.0
3,651
package com.blogspot.ramannanda.scala.algorithms.cp3.adhoc.rl import scala.io.StdIn object SuperBowlSunday { def printNumbersIfPossible(sum: Int, diff: Int): Unit = { if ((sum + diff) % 2 == 0 && (sum - diff) % 2 == 0) { val a = (sum + diff) / 2 val b = (sum - diff) / 2 if (a > 0 && b > 0) { println(s"$a $b") return } } println("impossible") } def main(args: Array[String]): Unit = { val cases = StdIn.readLine().toInt for (i <- 0 until cases) { val numbers = StdIn.readLine().split("\\\\s+").map(_.toInt) printNumbersIfPossible(numbers(0), numbers(1)) } } }
ramannanda9/algorithms-in-scala
src/main/scala/com/blogspot/ramannanda/scala/algorithms/cp3/adhoc/rl/SuperBowlSunday.scala
Scala
gpl-3.0
647
package io.continuum.bokeh object Json extends Json trait Json extends upickle.AttributeTagged with JsonSyntax { implicit def JsWriter[T <: Js.Value] = Writer[T] { case value => value } override implicit def OptionW[T:Writer] = Writer[Option[T]] { case Some(value) => writeJs(value) case None => Js.Null } override implicit def SomeW[T:Writer] = Writer[Some[T]](OptionW[T].write) override implicit val NoneW: Writer[None.type] = Writer[None.type](OptionW[Int].write) case class Stringable[T](str: T => String) implicit val StringStringable = Stringable[String](identity) implicit val SymbolStringable = Stringable[Symbol](_.name) implicit def EnumStringable[T <: EnumType] = Stringable[T](_.name) implicit def MapW[K:Stringable:Writer, V:Writer]: Writer[Map[K, V]] = Writer[Map[K, V]] { case obj => Js.Obj(obj.toSeq.map { case (k, v) => (implicitly[Stringable[K]].str(k), writeJs(v)) }.sortBy(_._1): _*) } implicit def EnumWriter[T <: EnumType] = Writer[T] { case value => writeJs(value.name) } implicit val PercentWriter = Writer[Percent] { case percent => writeJs(percent.value) } implicit val ColorWriter = Writer[Color] { case color => writeJs(color.toCSS) } implicit val FontSizeWriter = Writer[FontSize] { case size => writeJs(size.toCSS) } implicit val TooltipWriter = Writer[Tooltip] { case StringTooltip(string) => writeJs(string) case HTMLTooltip(html) => writeJs(html.toString) case TabularTooltip(rows) => writeJs(rows) } implicit val OrientationWrites = Writer[Orientation] { case Orientation.Angle(value) => writeJs(value) case value => EnumWriter.write(value) } implicit def HasFieldsWriter[T <: HasFields] = Writer[T] { case (obj: Model) => writeJs(obj.getRef) case obj => obj.fieldsToJson(false) } } trait JsonSyntax { self: Json => // Inspired by play-json's "Simplified JSON Syntax" case class JsWrapper(value: Js.Value) implicit def wrapJsValue[T:Writer](value: T): JsWrapper = JsWrapper(writeJs(value)) def obj(pairs: (String, JsWrapper)*): Js.Obj = { Js.Obj(pairs.map { case (k, v) => k -> v.value }: _*) } def arr(items: JsWrapper*): Js.Arr = { Js.Arr(items.map(_.value): _*) } }
bokeh/bokeh-scala
bokeh/src/main/scala/Json.scala
Scala
mit
2,467
package ch.wsl.box.client.forms import ch.wsl.box.client.utils.TestHooks import ch.wsl.box.client.{Context, EntityFormState, Main, TestBase} import ch.wsl.box.model.shared.{EntityKind, JSONID, JSONKeyValue} import org.scalajs.dom.document import org.scalajs.dom.raw.HTMLElement import scala.concurrent.Future class ReadOnlyTest extends TestBase { "read only field" should "not be editable" in { for { _ <- Main.setupUI() _ <- Context.services.clientSession.login("test", "test") _ <- waitLoggedIn _ <- Future { Context.applicationInstance.goTo(EntityFormState(EntityKind.FORM.kind, values.testFormName, "true", Some(JSONID(Vector(JSONKeyValue("id", "1"))).asString),false)) } _ <- waitElement({() => logger.info(s"Looking for .${TestHooks.readOnlyField(values.readOnlyField)}") val result = document.getElementsByClassName(TestHooks.readOnlyField(values.readOnlyField)).item(0) if(result == null) logger.info("null") else logger.info(result.outerHTML) result },"Read only field") _ <- Future { assert(document.getElementsByClassName(TestHooks.readOnlyField(values.readOnlyField)).length == 1) assert(document.getElementsByClassName(TestHooks.readOnlyField(values.readOnlyField)).item(0).isInstanceOf[HTMLElement]) val field = document.getElementsByClassName(TestHooks.readOnlyField(values.readOnlyField)).item(0).asInstanceOf[HTMLElement] assert(field.innerHTML == values.readOnlyValue) } } yield succeed } }
Insubric/box
client/src/test/scala/ch/wsl/box/client/forms/ReadOnlyTest.scala
Scala
apache-2.0
1,598
package org.openapitools.client.api import argonaut._ import argonaut.EncodeJson._ import argonaut.DecodeJson._ import org.http4s.{EntityDecoder, EntityEncoder} import org.http4s.argonaut._ import org.joda.time.DateTime import InputStepImpllinks._ case class InputStepImpllinks ( self: Option[Link], `class`: Option[String]) object InputStepImpllinks { import DateTimeCodecs._ implicit val InputStepImpllinksCodecJson: CodecJson[InputStepImpllinks] = CodecJson.derive[InputStepImpllinks] implicit val InputStepImpllinksDecoder: EntityDecoder[InputStepImpllinks] = jsonOf[InputStepImpllinks] implicit val InputStepImpllinksEncoder: EntityEncoder[InputStepImpllinks] = jsonEncoderOf[InputStepImpllinks] }
cliffano/swaggy-jenkins
clients/scalaz/generated/src/main/scala/org/openapitools/client/api/InputStepImpllinks.scala
Scala
mit
720
package blended.prickle.akka.http import scala.util.Success import akka.http.scaladsl.model.HttpEntity import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.testkit.ScalatestRouteTest import blended.util.logging.Logger import microjson.JsValue import org.scalatest.FreeSpec import prickle.JsConfig import prickle.PConfig import prickle.Pickle import prickle.Pickler import prickle.Unpickle class PrickleSupportSpec extends FreeSpec with ScalatestRouteTest with PrickleSupport { private[this] val log = Logger[PrickleSupportSpec] case class Foo(bar: String, baz: Long) implicit val prickleConfig: PConfig[JsValue] = JsConfig(areSharedObjectsSupported = false) implicit val fooPickler: Pickler[Foo] = Pickler.materializePickler[Foo] val testRoute = get { complete(Foo("Hi", 42L)) } ~ post { entity(as[Foo]) { case Foo(bar, baz) => complete(s"Got a foo bar ${bar} with baz ${baz}") } } "pickling" in { val foo = Foo("Foo", 0L) assert(Unpickle[Foo].fromString(Pickle.intoString(foo)) === Success(foo)) } "marshal" in { log.info("About to GET") Get() ~> testRoute ~> check { assert(contentType === PrickleSupport.prickleMediaType.toContentType) // log.info(s"got: ${responseAs[String]}") assert(responseAs[Foo] === Foo("Hi", 42L)) // assert(entityAs[String] === Pickle.intoString(Foo("Hi", 42L))) } } "unmarshal" in { log.info(s"About to POST ${Foo("Hello", 1L)}") Post("/", HttpEntity(prickleMediaType.toContentType, Pickle.intoString(Foo("Hello", 1L)))) ~> testRoute ~> check { log.info(s"Got: ${responseAs[String]}") assert(responseAs[String] === "Got a foo bar Hello with baz 1") } } }
lefou/blended
blended.prickle.akka.http/src/test/scala/blended/prickle/akka/http/PrickleSupportSpec.scala
Scala
apache-2.0
1,736
/* * Copyright (c) 2018. Yuriy Stul */ package com.stulsoft.queues /** * @author Yuriy Stul */ case class SomeObject(id:Int,text:String)
ysden123/poc
queues/src/main/scala/com/stulsoft/queues/SomeObject.scala
Scala
mit
145
/* * Copyright 2021 HM Revenue & Customs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package uk.gov.hmrc.ct.computations.Validators import org.mockito.Mockito.when import org.scalatestplus.mockito.MockitoSugar import org.scalatest.{Matchers, WordSpec} import uk.gov.hmrc.ct.CATO13 import uk.gov.hmrc.ct.box.{CtValidation, ValidatableBox} import uk.gov.hmrc.ct.computations._ import uk.gov.hmrc.ct.computations.retriever.ComputationsBoxRetriever trait DonationsValidationFixture extends WordSpec with Matchers with MockitoSugar { def testGlobalDonationsValidationErrors(box: ValidatableBox[ComputationsBoxRetriever])(boxRetriever: ComputationsBoxRetriever): Unit = { when(boxRetriever.cp301()).thenReturn(CP301(1)) when(boxRetriever.cp302()).thenReturn(CP302(1)) "fail if total donations in p&l is undefined and the sum of donations is greater than 0" in { when(boxRetriever.cp29()).thenReturn(CP29(None)) when(boxRetriever.cato13()).thenReturn(CATO13(10)) when(boxRetriever.cp999()).thenReturn(CP999(1)) when(boxRetriever.cp303()).thenReturn(CP303(0)) when(boxRetriever.cp3030()).thenReturn(CP3030(0)) box.validate(boxRetriever) shouldBe Set(CtValidation(None, "error.sum.of.donations.exceeds.total")) when(boxRetriever.cp999()).thenReturn(CP999(0)) when(boxRetriever.cp303()).thenReturn(CP303(1)) when(boxRetriever.cp3030()).thenReturn(CP3030(0)) box.validate(boxRetriever) shouldBe Set(CtValidation(None, "error.sum.of.donations.exceeds.total")) when(boxRetriever.cp999()).thenReturn(CP999(0)) when(boxRetriever.cp303()).thenReturn(CP303(0)) when(boxRetriever.cp3030()).thenReturn(CP3030(1)) box.validate(boxRetriever) shouldBe Set(CtValidation(None, "error.sum.of.donations.exceeds.total")) } "validate if total donations in p&l is undefined the sum of donations is 0" in { when(boxRetriever.cp29()).thenReturn(CP29(None)) when(boxRetriever.cp999()).thenReturn(CP999(0)) when(boxRetriever.cp303()).thenReturn(CP303(0)) when(boxRetriever.cp3030()).thenReturn(CP3030(0)) box.validate(boxRetriever) shouldBe Set.empty } "fail if total donations is defined and the sum of donations is greater" in { when(boxRetriever.cp29()).thenReturn(CP29(Some(3))) when(boxRetriever.cp999()).thenReturn(CP999(1)) when(boxRetriever.cp303()).thenReturn(CP303(2)) when(boxRetriever.cp3030()).thenReturn(CP3030(1)) box.validate(boxRetriever) shouldBe Set(CtValidation(None, "error.sum.of.donations.exceeds.total")) } "validate if total donations is defined and the sum of donations is less than or equal" in { when(boxRetriever.cp29()).thenReturn(CP29(Some(3))) when(boxRetriever.cp999()).thenReturn(CP999(1)) when(boxRetriever.cp303()).thenReturn(CP303(1)) when(boxRetriever.cp3030()).thenReturn(CP3030(1)) box.validate(boxRetriever) shouldBe Set.empty when(boxRetriever.cp999()).thenReturn(CP999(1)) when(boxRetriever.cp303()).thenReturn(CP303(1)) when(boxRetriever.cp3030()).thenReturn(CP3030(None)) box.validate(boxRetriever) shouldBe Set.empty } "validate if total donations is less than or equal to Net Profit" in { when(boxRetriever.cp999()).thenReturn(CP999(1)) when(boxRetriever.cato13()).thenReturn(CATO13(1)) box.validate(boxRetriever) shouldBe Set.empty when(boxRetriever.cp999()).thenReturn(CP999(1)) when(boxRetriever.cato13()).thenReturn(CATO13(2)) box.validate(boxRetriever) shouldBe Set.empty } "fail if total donations is greater than Net Profit" in { when(boxRetriever.cp999()).thenReturn(CP999(2)) when(boxRetriever.cato13()).thenReturn(CATO13(1)) box.validate(boxRetriever) shouldBe Set(CtValidation(None, "error.qualifying.donations.exceeds.net.profit")) } } }
hmrc/ct-calculations
src/test/scala/uk/gov/hmrc/ct/computations/Validators/DonationsValidationFixture.scala
Scala
apache-2.0
4,424
package tmvault.io import tmvault.{Future, ExecutionContext} import tmvault.util.SHA1Hash object ObjectStore { def apply[T](blockStore: BlockStore, serializer: BlobSerializer[T])(implicit ec: ExecutionContext): ObjectStore[T] = SimpleObjectStore(blockStore, serializer) private case class SimpleObjectStore[T](blockStore: BlockStore, serializer: BlobSerializer[T])(implicit ec: ExecutionContext) extends ObjectStore[T] { override def get(key: SHA1Hash): Future[T] = blockStore.getBytes(key).map(bytes => serializer.read(BlobIterator(bytes))) override def put(value: T): Future[SHA1Hash] = { val size = serializer.size(value) val builder = BlobBuilder(size) serializer.write(value, builder) blockStore.putBytes(builder.result) } } } trait ObjectStore[T] { def put(value:T) : Future[SHA1Hash] def get(key:SHA1Hash) : Future[T] }
rklaehn/tmvault
tmvault/src/main/scala/tmvault/io/ObjectStore.scala
Scala
apache-2.0
891
package test import cucumber.api.scala.{ScalaDsl, EN} import org.scalatest.matchers.ShouldMatchers class CucumberJarStepDefinitions extends ScalaDsl with EN with ShouldMatchers { private var givenCalled = false private var whenCalled = false Given("""^an SBT project$""") { () => givenCalled = true } When("""^the cucumber task is called$""") { () => whenCalled = true } Then("""^Cucumber is executed against the features and step definitions$""") { () => givenCalled should be (true) whenCalled should be (true) System.getProperty("testing") should be ("true") System.getProperty("demo") should be ("yes") } }
gsood/xsbt-cucumber-plugin
testProjects/testProject_2.9/src/test/scala/test/CucumberJarStepDefinitions.scala
Scala
apache-2.0
657
package relational.analyzers import relational._ import relational.attributes.AttributeLike import relational.comparissions.{Comparission, None => NullComp, And, Equality} import relational.joins.Join class QueryAnalyzer(selector: Selector) { lazy val fields = { val grouped = selector.select.grouped(1).map(_.toSet).toSet val cond = allConditions extractConditions(cond, grouped) } private def extractConditions(comparission: Comparission, current: Set[Set[AttributeLike]]): Set[Set[AttributeLike]] = comparission match { case Equality(Equality.Equals, one, two) => val subtracted = current.filterNot { s => s.contains(one) || s.contains(two) } val combined = (current -- subtracted).flatten subtracted + combined case And(list) => list.foldLeft(current) { (sets, e) => extractConditions(e, sets) } case _ => current } // This brings all conditions that restrict this query, like conditions in joins, in where, and having. def allConditions: Comparission = { val seed = selector.where && selector.having selector.join.foldLeft(seed) { case(comp, Join(_, c, 'inner)) => comp && c case (comp, _) => comp } } }
mauricioszabo/relational-scala
src/main/scala/relational/analyzers/QueryAnalyzer.scala
Scala
artistic-2.0
1,216
package com.sksamuel.elastic4s import org.scalatest.FlatSpec import org.scalatest.mock.MockitoSugar import ElasticDsl._ /** @author Stephen Samuel */ class CountDslTest extends FlatSpec with MockitoSugar with ElasticSugar { "a count request" should "accept tuple for from" in { val req = count from "places" -> "cities" where "name" -> "sammy" assert(req.build.indices() === Array("places")) assert(req.build.types() === Array("cities")) } it should "accept indextype" in { val req = count from "places" / "cities" where "name" -> "sammy" assert(req.build.indices() === Array("places")) assert(req.build.types() === Array("cities")) } it should "accept sequence of indexes and types" in { val req = count from Seq("index1", "index2") types Seq("type1", "type2") where " name" -> " sammy" assert(req.build.indices() === Array("index1", "index2")) } it should "accept sequence of indexes and single type" in { val req = count from Seq("index1", "index2") types "type1" where " name" -> " sammy" assert(req.build.indices() === Array("index1", "index2")) } it should "accept single index and single type" in { val req = count from "places" types "cities" where "paris" assert(req.build.indices() === Array("places")) } it should "accept single index and sequence of types" in { val req = count from "places" types Seq("type1", "type2") query "paris" assert(req.build.indices() === Array("places")) } it should "accept varargs index and varargs of types" in { val req = count from ("places", "bands") types ("type1", "type2") where "paris" assert(req.build.indices() === Array("places", "bands")) } it should "accept single index and varargs of types" in { val req = count from "places" types ("type1", "type2") where "paris" assert(req.build.indices() === Array("places")) } it should "parse slash indextype" in { val req = count from "places/cities" query "paris" assert(req.build.indices() === Array("places")) } it should "parase method invocation as index type" in { val req = count("places/cities") query "paris" assert(req.build.indices() === Array("places")) } it should "accept vararg method invocation as indexes" in { val req = count("places", "bands") query "paris" assert(req.build.indices() === Array("places", "bands")) } it should "accept tuple method invocation" in { val req = count("places" -> "bands") query "paris" assert(req.build.indices() === Array("places")) } }
l15k4/elastic4s
elastic4s-core/src/test/scala/com/sksamuel/elastic4s/CountDslTest.scala
Scala
apache-2.0
2,549
///* // active-learning-scala: Active Learning library for Scala // Copyright (c) 2014 Davi Pereira dos Santos // // This program is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // // This program is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU General Public License // along with this program. If not, see <http://www.gnu.org/licenses/>. // */ // //package al.strategies // //import ml.Pattern //import ml.classifiers.Learner //import ml.models.Model //import util.XSRandom // //case class GATU3b(learner: Learner, pool: Seq[Pattern], distance_name: String, alpha: Double = 1, beta: Double = 1, debug: Boolean = false) // extends StrategyWithLearnerAndMaps with MarginMeasure with EntropyMeasure { // override val toString = "GATU3b a" + alpha + " b" + beta + " (" + distance_name + ")" // val abr = "\\textbf{GATU3b" + distance_name.take(3) + "}" // //+ beta // val id = if (alpha == 1 && beta == 1 || alpha == 0.5 && beta == 0.5) distance_name match { // case "eucl" => 8533361 + (100000 * (1 - alpha)).toInt // case "cheb" => 8533381 + (100000 * (1 - alpha)).toInt // case "maha" => 8533391 + (100000 * (1 - alpha)).toInt // case "manh" => 8533371 + (100000 * (1 - alpha)).toInt // } else throw new Error("Parametros inesperados para GATU3b.") // // protected def next(mapU: => Map[Pattern, Double], mapL: => Map[Pattern, Double], current_model: Model, unlabeled: Seq[Pattern], labeled: Seq[Pattern]) = { // val agnostico = labeled.size % 2 == 0 // // val selected = unlabeled maxBy { x => // val similarityU = mapU(x) / mapU.size.toDouble // val similarityL = mapL(x) / mapL.size.toDouble // if (agnostico) // math.pow(similarityU, beta) / math.pow(similarityL, alpha) // else // 1 - margin(current_model)(x) // } // selected // } //} //
active-learning/active-learning-scala
src/main/scala/al/strategies/GATU3b.scala
Scala
gpl-2.0
2,294
class BogoSort { def isSorted(l:List[Int]): Boolean = l match { case Nil => true case x :: Nil => true case x :: xs => x <= xs.head && isSorted(xs) } def bogoSortMethod(listForShuffle: List[Int]):List[Int] = { val shuffledList = util.Random.shuffle(listForShuffle) if(isSorted(shuffledList)) shuffledList else bogoSortMethod(shuffledList) } }
warreee/Algorithm-Implementations
Bogosort/Scala/aayushKumarJarvis/BogoSort.scala
Scala
mit
388
package no.nr.edvard.osiris.analysis import collection.immutable import collection.mutable import immutable.Set import no.nr.edvard.osiris.model.{MethodIdentifier, JavaMethod, JavaType} // TODO! Should rename this to what it is, and extract a general interface ;-) // Plus optimize it! object CalleeMapBuilder { private type CalleeMap = Map[MethodIdentifier, Set[JavaMethod]] private case class NameDescTuple(name: String, desc: String) def buildCalleeMap(types: Set[JavaType]): CalleeMap = { require(types != null) val inheritanceMap = buildInheritanceMap(types) val tempRes = mutable.Map[MethodIdentifier, immutable.Set[JavaMethod]]() def updateTempRes(id: MethodIdentifier, delta: Set[JavaMethod]) = tempRes.get(id) match { case Some(existingCallees) => tempRes.put(id, existingCallees ++ delta) case None => tempRes.put(id, delta) } def propagateMethod( _type: JavaType, nameDesc: NameDescTuple, superImpl: Option[JavaMethod] = None ): Set[JavaMethod] = { val typeImpl = _type.methods.find(m => m.name == nameDesc.name && m.desc == nameDesc.desc) val dominantImpl = typeImpl match { case Some(m) => typeImpl case None => superImpl } val subTypeImplementations = inheritanceMap(_type).flatMap( propagateMethod(_, nameDesc, dominantImpl)) val possibleCallees = dominantImpl match { case Some(m) if m.isConcrete => subTypeImplementations + m case _ => subTypeImplementations } updateTempRes( MethodIdentifier(_type.name.internal, nameDesc.name, nameDesc.desc), possibleCallees ) possibleCallees } def saveNonVirtual(_type: JavaType, method: JavaMethod) = updateTempRes( method.identifier, Set(method) ) def dig(_type: JavaType, closed: Set[NameDescTuple] = Set()) { val (nonVirtuals, virtuals) = _type.methods.filter(!_.isSpecial) .partition(m => m.isPrivate || m.isStatic) nonVirtuals.foreach(saveNonVirtual(_type, _)) val closedDelta = for { m <- virtuals desc = NameDescTuple(m.name, m.desc) if !closed.contains(desc) } yield { propagateMethod(_type, desc) desc } for (subType <- inheritanceMap(_type)) dig(subType, closed ++ closedDelta) } for (t <- types if t.isRootType && t.interfaceInternalNames.isEmpty) dig(t) tempRes.toMap } private def buildInheritanceMap(types: Set[JavaType]) = { val accum = types.map(_.name.internal -> mutable.ArrayBuffer[JavaType]()).toMap def registerInheritance(_type: JavaType) { def registerIfFound(fromTypeName: String) = accum.get(fromTypeName) match { case Some(subtypes) => subtypes += _type case None => } _type.superClassInternalName match { case Some(name) => registerIfFound(name) case None => } _type.interfaceInternalNames.foreach(registerIfFound) } types.foreach(registerInheritance) val typeMap = types.map { t => (t.name.internal -> t) }.toMap accum.map { case (k, v) => (typeMap(k) -> v.toSet) }.toMap } }
edwkar/edwbsc
projects/Osiris/src/main/scala/no/nr/edvard/osiris/analysis/CalleeMapBuilder.scala
Scala
gpl-2.0
3,301
package scalan.primitives import scalan._ import scalan.common._ import scala.reflect.runtime.universe._ trait StructKeys extends ViewsDsl with Entities { self: StructsDsl with Scalan => type SKey[S <: Struct] = Rep[StructKey[S]] trait StructKey[Schema <: Struct] extends Def[StructKey[Schema]] { def eSchema: Elem[Schema] def index: Rep[Int] def name: Rep[String] } abstract class IndexStructKey[Schema <: Struct] (val index: Rep[Int]) (implicit val eSchema: Elem[Schema]) extends StructKey[Schema] { def name: Rep[String] = { val i = index.asValue eSchema.fieldNames(i) } override def toString = s"${eSchema.fieldsString}[$index]" } abstract class NameStructKey[Schema <: Struct] (val name: Rep[String]) (implicit val eSchema: Elem[Schema]) extends StructKey[Schema] { def index: Rep[Int] = { val n = name.asValue eSchema.findFieldIndex(n) } override def toString = s"${eSchema.fieldsString}.$name" } } trait StructKeysDsl extends impl.StructKeysAbs {self: StructsDsl with Scalan => type KSet = Rep[KeySet] trait KeySet { def keys: Seq[String] } class KeySetCompanion { def apply(names: Seq[String]) = keyset_create(names) } val KeySet: KeySetCompanion = new KeySetCompanion case class KeySetSeq(keys: Seq[String]) extends KeySet implicit class KeySetOps(ks: Rep[KeySet]) { // def apply(i: Rep[Int]) = keyset_getAt(ks, i) } class KeySetElem extends BaseElem[KeySet]()(weakTypeTag[KeySet], Default.defaultVal(KeySetSeq(Seq()))) implicit val KeySetElement: Elem[KeySet] = new KeySetElem def keyset_create(keys: Seq[String]): Rep[KeySet] // def keyset_getAt(ks: KSet, i: Rep[Int]): Rep[StructKey] } trait StructKeysDslStd extends impl.StructKeysStd {self: StructsDsl with ScalanStd => def keyset_create(keys: Seq[String]): Rep[KeySet] = KeySetSeq(keys) } trait StructKeysDslExp extends impl.StructKeysExp {self: StructsDsl with ScalanExp => def keyset_create(keys: Seq[String]): Rep[KeySet] = KeySetDef(keys) case class KeySetDef(keys: Seq[String]) extends BaseDef[KeySet] { override def toString = s"KeySet(${keys.mkString(",")})" } }
PCMNN/scalan-ce
core/src/main/scala/scalan/primitives/StructKeys.scala
Scala
apache-2.0
2,201
import language.experimental.macros import scala.reflect.macros._ import blackbox.Context object X { def classTagOrNull[T](implicit t: reflect.ClassTag[T] = null) = t // the failed search for ClassTag[T] does not issue a visible // error as we fall back to the default argument. But, the // macro engine things we have expanded the macro `materializeClassTag[D]()` // to `EmptyTree`, and then attaches a backreference from the expansion // to the expandee. This is the `MacroExpansionAttachment` tree attachment. def foo[D] = classTagOrNull[D] def extractor: Any = macro X.extractorMacro def extractorMacro(c: Context): c.Expr[Any] = { // Later, in reify, an unrelated use of `EmptyTree` in the AST representing // the argument is now treated as a macro expansion which should be rolled // back in the tree we reify! This ends up generating a call to `implicitly` // which leads to an ambiguous error. // // Any macro call that expands to EmptyTree could have triggered this problem. c.universe.reify(new { def something(data: Any) = ??? }) } // Workarounds: // // 1. Use quasiquotes rather than `reify`. (But, beware to fully qualify all references, e.g. `_root_.scala.Predef.???`) // 2. Avoid failed ClassTag lookups (e.g. in the original bug report, annotate the type argument to `map`) // 3. In the macro implementation, just before calling the `reify` macro, you could call another macro // // def prepareReify = macro prepareReifyImpl // def prepareReifyImpl(c: Context) = { // val symtab = c.universe.asInstanceOf[reflect.internal.SymbolTable] // symtab.EmptyTree.setAttachments(symtab.NoPosition) // } // // To make this visible to the macro implementation, it will need to be compiled in an earlier stage, // e.g a separate SBT sub-project. }
jvican/scala
test/files/pos/t8947/Macro_1.scala
Scala
bsd-3-clause
1,866
/** * Copyright (C) 2009-2013 Typesafe Inc. <http://www.typesafe.com> */ package actorbintree import akka.actor._ import scala.collection.immutable.Queue import actorbintree.BinaryTreeNode.CopyFinished object BinaryTreeSet { trait Operation { def requester: ActorRef def id: Int def elem: Int } trait OperationReply { def id: Int } /** Request with identifier `id` to insert an element `elem` into the tree. * The actor at reference `requester` should be notified when this operation * is completed. */ case class Insert(requester: ActorRef, id: Int, elem: Int) extends Operation /** Request with identifier `id` to check whether an element `elem` is present * in the tree. The actor at reference `requester` should be notified when * this operation is completed. */ case class Contains(requester: ActorRef, id: Int, elem: Int) extends Operation /** Request with identifier `id` to remove the element `elem` from the tree. * The actor at reference `requester` should be notified when this operation * is completed. */ case class Remove(requester: ActorRef, id: Int, elem: Int) extends Operation /** Request to perform garbage collection*/ case object GC /** Holds the answer to the Contains request with identifier `id`. * `result` is true if and only if the element is present in the tree. */ case class ContainsResult(id: Int, result: Boolean) extends OperationReply /** Message to signal successful completion of an insert or remove operation. */ case class OperationFinished(id: Int) extends OperationReply } class BinaryTreeSet extends Actor { import BinaryTreeSet._ import BinaryTreeNode._ def createRoot: ActorRef = context.actorOf(BinaryTreeNode.props(0, initiallyRemoved = true)) var root = createRoot var queue = Queue.empty[Operation] def receive = normal /** Accepts `Operation` and `GC` messages. */ val normal: Receive = { case op : Operation => run(op) case GC => startGC } val running: Receive = enQueue orElse { case or: OperationReply => runNext case GC => context become scheduleGC } val scheduleGC: Receive = enQueue orElse { case or: OperationReply => startGC } def garbageCollecting(newRoot: ActorRef): Receive = enQueue orElse { case CopyFinished => { root = newRoot runNext } } def enQueue: Receive = { case op : Operation => queue = queue.enqueue(op) } def runNext { if(queue.isEmpty) context.become(normal) else { val op = queue.head queue = queue.tail run(op) } } def run(op: Operation) { context become running root ! op } def startGC { val newRoot = createRoot context.become(garbageCollecting(newRoot)) root ! CopyTo(newRoot) } } object BinaryTreeNode { trait Position case object Left extends Position case object Right extends Position case class CopyTo(treeNode: ActorRef) case object CopyFinished def props(elem: Int, initiallyRemoved: Boolean = false) = Props(classOf[BinaryTreeNode], elem, initiallyRemoved) } class BinaryTreeNode(val elem: Int, initiallyRemoved: Boolean) extends Actor { import BinaryTreeNode._ import BinaryTreeSet._ var subtrees = Map[Position, ActorRef]() var removed = initiallyRemoved def receive = normal /** Handles `Operation` messages and `CopyTo` requests. */ val normal: Receive = { case op: Operation => { if(op.elem < elem && subtrees.contains(Left)) subtrees(Left) forward op else if (op.elem > elem && subtrees.contains(Right)) subtrees(Right) forward op else perform(op) } case CopyTo(treeNode) => { checkFinish(subtrees.values.toSet, removed) if(!removed) treeNode ! Insert(self, elem, elem ) //use elem as insert operation id subtrees.values.foreach(_ ! CopyTo(treeNode)) } case of: OperationReply => { context become normal context.parent forward of } } def insert(position: Position, newElem: Int, id: Int) { subtrees += position -> context.actorOf(BinaryTreeNode.props(newElem)) } def perform(op: Operation) { val reply = op match { case Insert(_, id, newElem) => { if (newElem < elem) insert(Left, newElem, id) else if (newElem > elem) insert(Right, newElem, id) else removed = false OperationFinished(id) } case Contains(_, id, testElem) => ContainsResult(id, !removed && testElem == elem) case Remove(_, id, toRemove) => { if (toRemove == elem) removed = true OperationFinished(id) } } op.requester ! reply self ! reply } // optional /** `expected` is the set of ActorRefs whose replies we are waiting for, * `insertConfirmed` tracks whether the copy of this node to the new tree has been confirmed. */ def copying(expected: Set[ActorRef], insertConfirmed: Boolean): Receive = { case CopyFinished => { val rest = expected - sender checkFinish(rest, insertConfirmed) } case of: OperationFinished => checkFinish(expected, true) } def checkFinish(expected: Set[ActorRef], insertConfirmed: Boolean) = { if(expected.isEmpty && insertConfirmed) { context.parent ! CopyFinished context.stop(self) }else { context become copying(expected, insertConfirmed) } } }
kailuowang/PrinciplesOfReactiveProgramming
actorbintree/src/main/scala/actorbintree/BinaryTreeSet.scala
Scala
mit
5,444
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.spark.sql import java.util.{Map => JMap} import org.apache.spark.sql.types.ArrayType import org.apache.spark.sql.types.DataTypes._ import org.apache.spark.sql.types.StructType import org.codehaus.jackson.map.ObjectMapper import org.elasticsearch.hadoop.cfg.ConfigurationOptions._ import org.elasticsearch.hadoop.cfg.Settings import org.elasticsearch.hadoop.util.TestSettings import org.elasticsearch.spark.sql.SchemaUtils._ import org.junit.Assert.assertEquals import org.junit.Assert.assertFalse import org.junit.Assert.assertTrue import org.junit.Before import org.junit.Test import java.util.Collections import org.elasticsearch.hadoop.serialization.dto.mapping.FieldParser class SchemaUtilsTest { var cfg: Settings = null @Before def start(): Unit = { cfg = new TestSettings } @Test def testConvertToStructSimpleField(): Unit = { val mapping = """{ | "simple" : { | "properties" : { | "name" : { | "type" : "string" | } | } | } |} |""".stripMargin val struct = getStruct(mapping) assertTrue(struct.fieldNames.contains("name")) assertEquals(StringType, struct("name").dataType) } @Test def testConvertToStructWithObject(): Unit = { val mapping = """{ "nested-array": { | "properties" : { | "arr" : { | "properties" : { | "one" : { "type" : "string" }, | "two" : { "type" : "string" } | } | }, | "top-level" : { "type" : "string" } | } |} }""".stripMargin val struct = getStruct(mapping) assertTrue(struct.fieldNames.contains("arr")) assertFalse(struct.fieldNames.contains("one")) val nested = struct("arr").dataType assertEquals("struct", nested.typeName) val arr = nested.asInstanceOf[StructType] assertTrue(arr.fieldNames.contains("one")) assertTrue(arr.fieldNames.contains("two")) assertEquals(StringType, arr("one").dataType) assertEquals(StringType, arr("two").dataType) } @Test def testConvertToStructWithSpecifiedArray(): Unit = { val mapping = """{ | "simple" : { | "properties" : { | "name" : { | "type" : "string" | } | } | } |} |""".stripMargin cfg.setProperty(ES_READ_FIELD_AS_ARRAY_INCLUDE, "name") val struct = getStruct(mapping) assertTrue(struct.fieldNames.contains("name")) assertEquals("array", struct("name").dataType.typeName) val arr = struct("name").dataType.asInstanceOf[ArrayType] assertEquals(StringType, arr.elementType) } @Test def testConvertToStructWithSpecifiedArrayDepth(): Unit = { val mapping = """{ | "simple" : { | "properties" : { | "name" : { | "type" : "string" | } | } | } |} |""".stripMargin cfg.setProperty(ES_READ_FIELD_AS_ARRAY_INCLUDE, "name:3") val struct = getStruct(mapping) assertTrue(struct.fieldNames.contains("name")) // first level assertEquals("array", struct("name").dataType.typeName) var arr = struct("name").dataType.asInstanceOf[ArrayType] // second level assertEquals("array", arr.elementType.typeName) arr = arr.elementType.asInstanceOf[ArrayType] // third type assertEquals("array", arr.elementType.typeName) arr = arr.elementType.asInstanceOf[ArrayType] // actual type assertEquals(StringType, arr.elementType) } @Test def testConvertToStructWithJoinField(): Unit = { val mapping = """{ | "join": { | "properties": { | "my_join": { | "type": "join", | "relations": { | "my_parent": "my_child" | } | } | } | } |} """.stripMargin val struct = getStruct(mapping) assertTrue(struct.fieldNames.contains("my_join")) val nested = struct("my_join").dataType assertEquals("struct", nested.typeName) val arr = nested.asInstanceOf[StructType] assertTrue(arr.fieldNames.contains("name")) assertTrue(arr.fieldNames.contains("parent")) assertEquals(StringType, arr("name").dataType) assertEquals(StringType, arr("parent").dataType) } @Test def testDetectRowInfoSimple(): Unit = { val mapping = """{ "array-mapping-top-level": { | "properties" : { | "arr" : { | "properties" : { | "one" : { "type" : "string" }, | "two" : { "type" : "string" } | } | }, | "top-level" : { "type" : "string" } | } |} }""".stripMargin val struct = getStruct(mapping) val info = detectRowInfo(cfg, struct) assertEquals("arr,top-level", info._1.getProperty("_")) assertEquals("one,two", info._1.getProperty("arr")) } @Test def testDetectRowInfoWithOneNestedArray(): Unit = { val mapping = """{ "array-mapping-top-level": { | "properties" : { | "arr" : { | "properties" : { | "one" : { "type" : "string" }, | "two" : { "type" : "string" } | } | }, | "top-level" : { "type" : "string" } | } |} }""".stripMargin cfg.setProperty(ES_READ_FIELD_AS_ARRAY_INCLUDE, "arr") val struct = getStruct(mapping) val info = detectRowInfo(cfg, struct) assertEquals("arr,top-level", info._1.getProperty("_")) assertEquals("one,two", info._1.getProperty("arr")) assertEquals("1", info._2.getProperty("arr")) } @Test def testDetectRowInfoWithMultiDepthArray(): Unit = { val mapping = """{ "array-mapping-top-level": { | "properties" : { | "arr" : { | "properties" : { | "one" : { "type" : "string" }, | "two" : { "type" : "string" } | } | }, | "top-level" : { "type" : "string" } | } |} }""".stripMargin cfg.setProperty(ES_READ_FIELD_AS_ARRAY_INCLUDE, "arr:3") val struct = getStruct(mapping) val info = detectRowInfo(cfg, struct) assertEquals("arr,top-level", info._1.getProperty("_")) assertEquals("one,two", info._1.getProperty("arr")) assertEquals("3", info._2.getProperty("arr")) } private def wrapMappingAsResponse(mapping: String): String = s"""{ | "index": { | "mappings": $mapping | } |} """.stripMargin private def fieldFromMapping(mapping: String) = { FieldParser.parseMapping(new ObjectMapper().readValue(wrapMappingAsResponse(mapping), classOf[JMap[String, Object]])).getResolvedView } private def getStruct(mapping: String) = { convertToStruct(fieldFromMapping(mapping), Collections.emptyMap(), cfg) } }
takezoe/elasticsearch-hadoop
spark/sql-20/src/test/scala/org/elasticsearch/spark/sql/SchemaUtilsTest.scala
Scala
apache-2.0
7,616
package coder.simon.slots.common import java.util.Random object Spin { type RNG = Int => Int def randomRNG(random: Random)(max: Int) = random.nextInt(max); def idxMatrix(reelInfos: Seq[ReelInfo]): Matrix = randomMatrix(randomRNG(new java.util.Random()))(reelInfos) def randomMatrix(random: RNG)(reelInfos: Seq[ReelInfo]): Matrix = { def gen(max: Int, times: Int) = { val start = random(max); (0 until times).map(i => (start + i) % max).toIndexedSeq } reelInfos.map(ri => gen(ri.numOfSymbols, ri.lenOfReel)).toIndexedSeq } def crop(reels: ReelArray, matrix: Matrix): ReelArray = { def oneReel(column: Seq[Int], reel: Array[Symbol]) = column.map(i => reel(i)).toArray reels.zipWithIndex.map { case (reel, x) => oneReel(matrix(x), reel) } } def reelsInfo(lenOfReel: Int, reels: ReelArray) = reels.map(r => ReelInfo(r.length, lenOfReel)).toSeq def randomSpin(lenOfReel: Int, reels: ReelArray) = { val matrix = idxMatrix(reelsInfo(lenOfReel, reels)) crop(reels, matrix) } }
erlangxk/fpscala
src/main/scala/coder/simon/slots/common/Spin.scala
Scala
mit
1,036
package com.arcusys.valamis.updaters.version330.schema3301 import com.arcusys.valamis.persistence.common.DbNameUtils._ import com.arcusys.valamis.persistence.common.{LongKeyTableComponent, SlickProfile, TypeMapper} trait TableComponent extends LongKeyTableComponent with TypeMapper { self: SlickProfile => import driver.simple._ implicit val authTypeMapper = enumerationMapper(AuthType) class LrsEndpointTable(tag: Tag) extends Table[LrsEndpoint](tag, "LEARN_LRS_ENDPOINT") { def id = column[Long](idName, O.PrimaryKey, O.AutoInc) def endpoint = column[String]("END_POINT", O.Length(2000, varying = true)) def authType = column[AuthType.AuthType]("AUTH_TYPE", O.Length(255, varying = true)) def key = column[String]("KEY", O.Length(2000, varying = true)) def secret = column[String]("SECRET", O.Length(2000, varying = true)) def customHost = column[Option[String]]("CUSTOM_HOST") def companyId = column[Long]("COMPANY_ID") def companyIdOpt = column[Option[Long]]("COMPANY_ID") def * = (endpoint, authType, key, secret, customHost, id.?) <> (LrsEndpoint.tupled, LrsEndpoint.unapply) } class SettingsTable(tag : Tag) extends Table[Setting](tag, "LEARN_SETTINGS") { def id = column[Long](idName, O.PrimaryKey, O.AutoInc) def dataKey = column[String]("DATAKEY", O.Length(128, varying = true)) def dataValue = column[String]("DATAVALUE", O.Length(2048, varying = true)) def companyId = column[Option[Long]]("COMPANY_ID") def * = (id, dataKey, dataValue, companyId) <> (Setting.tupled, Setting.unapply) } class OldSettingsTable(tag : Tag) extends Table[OldSetting](tag, "LEARN_SETTINGS") { def dataKey = column[String]("DATAKEY", O.PrimaryKey, O.Length(128, true)) def dataValue = column[String]("DATAVALUE", O.Length(2048, true)) def * = (dataKey, dataValue) <> (OldSetting.tupled, OldSetting.unapply) } val oldSettings = TableQuery[OldSettingsTable] val settings = TableQuery[SettingsTable] val lrsEndpoint = TableQuery[LrsEndpointTable] }
arcusys/Valamis
valamis-updaters/src/main/scala/com/arcusys/valamis/updaters/version330/schema3301/TableComponent.scala
Scala
gpl-3.0
2,043
package notification.services import models.GITContent import models.Link.Internal import notification.NotificationsFixtures import org.specs2.concurrent.ExecutionEnv import org.specs2.mock.Mockito import org.specs2.mutable.Specification import org.specs2.specification.Scope import scala.concurrent.Future class ArticlePurgeSpec(implicit ee: ExecutionEnv) extends Specification with Mockito with NotificationsFixtures { "ArticlePurge" should { "trigger a soft purge for a breaking news" in new ArticlePurgeScope { val breakingNews = breakingNewsNotification(Nil).copy(link = new Internal("expected/article/id", None, GITContent, None)) articlePurge.purgeFromNotification(breakingNews) should beEqualTo(true).await val urlCaptor = capture[String] there was one(fastlyPurge).softPurge(urlCaptor) println(urlCaptor.value) urlCaptor.value shouldEqual "expected/article/id" } "do nothing for any other notification type" in new ArticlePurgeScope { val newsstand = newsstandShardNotification() articlePurge.purgeFromNotification(newsstand) should beEqualTo(false).await there was no(fastlyPurge).softPurge(any[String]) } } trait ArticlePurgeScope extends Scope { val fastlyPurge: FastlyPurge = { val m = mock[FastlyPurge] m.softPurge(any[String]) returns Future.successful(true) m } val articlePurge = new ArticlePurge(fastlyPurge) } }
guardian/mobile-n10n
notification/test/notification/services/ArticlePurgeSpec.scala
Scala
apache-2.0
1,450
package visitor abstract class HtmlTag extends Element { def getTagName: String def setStartTag(tag: String): Unit def getStartTag: String def setEndTag(tag: String): Unit def getEndTag: String def setTagBody(tagBody: String): Unit = ??? def addChildTag(htmlTag: HtmlTag): Unit = ??? def removeChildTag(htmlTag: HtmlTag): Unit = ??? def getChildren: List[HtmlTag] = ??? def generateHtml(): Unit }
BBK-PiJ-2015-67/sdp-portfolio
exercises/week11/src/main/scala/visitor/HtmlTag.scala
Scala
unlicense
418
/* * Copyright 2022 HM Revenue & Customs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package services.application import connectors.OnlineTestEmailClient import model.EvaluationResults.{ Green, Red } import model._ import model.exchange.FsbScoresAndFeedback import model.persisted.fsb.ScoresAndFeedback import model.persisted.{ ContactDetails, FsbTestGroup, SchemeEvaluationResult } import org.mockito.ArgumentMatchers.{ eq => eqTo, _ } import org.mockito.Mockito._ import repositories.SchemeRepository import repositories.application.GeneralApplicationMongoRepository import repositories.contactdetails.ContactDetailsRepository import repositories.fsb.FsbRepository import services.scheme.SchemePreferencesService import testkit.MockitoImplicits._ import testkit.{ ExtendedTimeout, UnitSpec } import uk.gov.hmrc.http.HeaderCarrier import scala.concurrent.Await import scala.concurrent.duration._ class FsbServiceSpec extends UnitSpec with ExtendedTimeout { "find scores and feedback" must { "handle no data" in new TestFixture { when(mockFsbRepo.findScoresAndFeedback(any[String])).thenReturnAsync(None) val result = service.findScoresAndFeedback(appId).futureValue result mustBe None } "handle data" in new TestFixture { when(mockFsbRepo.findScoresAndFeedback(any[String])).thenReturnAsync(Some(ScoresAndFeedback(1.12, "feedback"))) val result = service.findScoresAndFeedback(appId).futureValue result mustBe Some(FsbScoresAndFeedback(1.12, "feedback")) } } "fsb evaluation" must { "evaluate scheme to Eligible for Job Offer if results are Green" in new TestFixture { val res = FsbTestGroup(List(SchemeEvaluationResult(DSSchemeIds.DiplomaticAndDevelopment, Green.toString))) when(mockSchemePreferencesService.find(uid.toString())).thenReturnAsync(selectedSchemes) when(mockFsbRepo.findByApplicationId(uid.toString())).thenReturnAsync(Some(res)) when(mockApplicationRepo.getCurrentSchemeStatus(uid.toString())).thenReturnAsync(res.evaluation.result) when(mockApplicationRepo.addProgressStatusAndUpdateAppStatus(uid.toString(), ProgressStatuses.FSB_PASSED)).thenReturnAsync() when(mockApplicationRepo.addProgressStatusAndUpdateAppStatus(uid.toString(), ProgressStatuses.ELIGIBLE_FOR_JOB_OFFER)).thenReturnAsync() service.evaluateFsbCandidate(uid)(hc).futureValue verify(mockApplicationRepo).addProgressStatusAndUpdateAppStatus(uid.toString(), ProgressStatuses.ELIGIBLE_FOR_JOB_OFFER) } "evaluate scheme to Final FAILED if results are red and no more schemes selected" in new TestFixture { val curSchemeStatus = List(SchemeEvaluationResult(DSSchemeIds.DiplomaticAndDevelopment, Green.toString)) val res = FsbTestGroup(List(SchemeEvaluationResult(DSSchemeIds.DiplomaticAndDevelopment, Red.toString))) when(mockSchemePreferencesService.find(uid.toString())).thenReturnAsync(selectedSchemes) when(mockFsbRepo.findByApplicationId(uid.toString())).thenReturnAsync(Some(res)) when(mockApplicationRepo.getCurrentSchemeStatus(uid.toString())).thenReturnAsync(curSchemeStatus) when(mockApplicationRepo.addProgressStatusAndUpdateAppStatus(uid.toString(), ProgressStatuses.FSB_FAILED)).thenReturnAsync() when(mockFsbRepo.updateCurrentSchemeStatus(uid.toString(), res.evaluation.result)).thenReturnAsync() when(mockApplicationRepo.addProgressStatusAndUpdateAppStatus(uid.toString(), ProgressStatuses.ALL_FSBS_AND_FSACS_FAILED)).thenReturnAsync() service.evaluateFsbCandidate(uid)(hc).futureValue verify(mockApplicationRepo).addProgressStatusAndUpdateAppStatus(uid.toString(), ProgressStatuses.ALL_FSBS_AND_FSACS_FAILED) } "fail to evaluate scheme GES_DS if FCO results were not submitted" in new TestFixture { val curSchemeStatus = List( SchemeEvaluationResult(SchemeId("DigitalDataTechnologyAndCyber"), Red.toString), SchemeEvaluationResult(DSSchemeIds.DiplomaticAndDevelopmentEconomics, Green.toString) ) val res = FsbTestGroup(List(SchemeEvaluationResult(DSSchemeIds.DiplomaticAndDevelopmentEconomics, Green.toString))) when(mockSchemePreferencesService.find(uid.toString())).thenReturnAsync(selectedSchemes) when(mockFsbRepo.findByApplicationId(uid.toString())).thenReturnAsync(Some(res)) when(mockApplicationRepo.getCurrentSchemeStatus(uid.toString())).thenReturnAsync(curSchemeStatus) when(mockApplicationRepo.addProgressStatusAndUpdateAppStatus(uid.toString(), ProgressStatuses.FSB_FAILED)).thenReturnAsync() when(mockFsbRepo.updateCurrentSchemeStatus(uid.toString(), res.evaluation.result)).thenReturnAsync() when(mockApplicationRepo.addProgressStatusAndUpdateAppStatus(uid.toString(), ProgressStatuses.ALL_FSBS_AND_FSACS_FAILED)).thenReturnAsync() intercept[IllegalArgumentException] { Await.result(service.evaluateFsbCandidate(uid)(hc), 1.second) } } "evaluate DS as failed, and then GES_DS as failed too, but do not evaluate GES as EAC evaluation hasn't happened yet" in new TestFixture { val curSchemeStatus = List( SchemeEvaluationResult(DSSchemeIds.DiplomaticAndDevelopment, Green.toString), SchemeEvaluationResult(DSSchemeIds.DiplomaticAndDevelopmentEconomics, Green.toString), SchemeEvaluationResult(DSSchemeIds.GovernmentEconomicsService, Green.toString) ) val res = FsbTestGroup(List( SchemeEvaluationResult(DSSchemeIds.DiplomaticAndDevelopment, Red.toString) )) when(mockSchemePreferencesService.find(uid.toString())).thenReturnAsync(selectedSchemes) when(mockFsbRepo.findByApplicationId(uid.toString())).thenReturnAsync(Some(res)) when(mockApplicationRepo.getCurrentSchemeStatus(uid.toString())).thenReturnAsync(curSchemeStatus) // DS when(mockApplicationRepo.addProgressStatusAndUpdateAppStatus(uid.toString(), ProgressStatuses.FSB_FAILED)).thenReturnAsync() when(mockFsbRepo.updateCurrentSchemeStatus(uid.toString(), List( SchemeEvaluationResult(DSSchemeIds.DiplomaticAndDevelopment, Red.toString), SchemeEvaluationResult(DSSchemeIds.DiplomaticAndDevelopmentEconomics, Green.toString), SchemeEvaluationResult(DSSchemeIds.GovernmentEconomicsService, Green.toString) ) )).thenReturnAsync() // GES_DS when(mockApplicationRepo.addProgressStatusAndUpdateAppStatus(uid.toString(), ProgressStatuses.FSB_FAILED)).thenReturnAsync() when(mockFsbRepo.updateCurrentSchemeStatus(uid.toString(), List( SchemeEvaluationResult(DSSchemeIds.DiplomaticAndDevelopment, Red.toString), SchemeEvaluationResult(DSSchemeIds.DiplomaticAndDevelopmentEconomics, Red.toString), SchemeEvaluationResult(DSSchemeIds.GovernmentEconomicsService, Green.toString) ) )).thenReturnAsync() // more fsb required when(mockApplicationRepo.find(uid.toString())).thenReturnAsync(Some(cand1)) when(mockContactDetailsRepo.find(cand1.userId)).thenReturnAsync(cd1) when(mockEmailClient.notifyCandidateOnFinalFailure(eqTo(cd1.email), eqTo(cand1.name))(any())).thenReturnAsync() Await.result(service.evaluateFsbCandidate(uid)(hc), 2.seconds) verify(mockApplicationRepo, times(2)).addProgressStatusAndUpdateAppStatus(uid.toString(), ProgressStatuses.FSB_FAILED) } "evaluate scheme GES_DS as failed, and then GES as failed, but finally DS passed" in new TestFixture { val curSchemeStatus = List( SchemeEvaluationResult(DSSchemeIds.DiplomaticAndDevelopmentEconomics, Green.toString), SchemeEvaluationResult(SchemeId("DigitalDataTechnologyAndCyber"), Red.toString), SchemeEvaluationResult(DSSchemeIds.GovernmentEconomicsService, Green.toString), SchemeEvaluationResult(DSSchemeIds.DiplomaticAndDevelopment, Green.toString) ) val res = FsbTestGroup(List( SchemeEvaluationResult(DSSchemeIds.DiplomaticAndDevelopmentEconomics, Red.toString), SchemeEvaluationResult(DSSchemeIds.DiplomaticAndDevelopment, Green.toString) )) when(mockSchemePreferencesService.find(uid.toString())).thenReturnAsync(selectedSchemes) when(mockFsbRepo.findByApplicationId(uid.toString())).thenReturnAsync(Some(res)) when(mockApplicationRepo.getCurrentSchemeStatus(uid.toString())).thenReturnAsync(curSchemeStatus) // GES_DS when(mockApplicationRepo.addProgressStatusAndUpdateAppStatus(uid.toString(), ProgressStatuses.FSB_FAILED)).thenReturnAsync() when(mockFsbRepo.updateCurrentSchemeStatus(uid.toString(), List( SchemeEvaluationResult(DSSchemeIds.DiplomaticAndDevelopmentEconomics, Red.toString), SchemeEvaluationResult(SchemeId("DigitalDataTechnologyAndCyber"), Red.toString), SchemeEvaluationResult(DSSchemeIds.GovernmentEconomicsService, Green.toString), SchemeEvaluationResult(DSSchemeIds.DiplomaticAndDevelopment, Green.toString) ) )).thenReturnAsync() // GES when(mockApplicationRepo.addProgressStatusAndUpdateAppStatus(uid.toString(), ProgressStatuses.FSB_FAILED)).thenReturnAsync() when(mockFsbRepo.updateCurrentSchemeStatus(uid.toString(), List( SchemeEvaluationResult(DSSchemeIds.DiplomaticAndDevelopmentEconomics, Red.toString), SchemeEvaluationResult(SchemeId("DigitalDataTechnologyAndCyber"), Red.toString), SchemeEvaluationResult(DSSchemeIds.GovernmentEconomicsService, Red.toString), SchemeEvaluationResult(DSSchemeIds.DiplomaticAndDevelopment, Green.toString) ) )).thenReturnAsync() // DS when(mockApplicationRepo.addProgressStatusAndUpdateAppStatus(uid.toString(), ProgressStatuses.FSB_PASSED)).thenReturnAsync() when(mockApplicationRepo.addProgressStatusAndUpdateAppStatus(uid.toString(), ProgressStatuses.ELIGIBLE_FOR_JOB_OFFER)).thenReturnAsync() service.evaluateFsbCandidate(uid)(hc).futureValue verify(mockApplicationRepo).addProgressStatusAndUpdateAppStatus(uid.toString(), ProgressStatuses.ELIGIBLE_FOR_JOB_OFFER) } /** * DiplomaticAndDevelopmentEconomics (code: GES-DS, fsbType: EAC_DS) TODO: clarify with Paul to rename fsbType to EAC_FCO * - GovernmentEconomicsService (code: GES, fsbType: EAC) * - DiplomaticAndDevelopment (code: DS, fsbType: FCO) * * At FSB the separate parts are named correctly: * EAC pass/fail FCO pass/fail previous actual outcome expected outcome (now fixed) * pass fail offered a job fail * */ "Pass the candidate who is only in the running for GES-DS if the candidate passes " + "both the EAC and FCO parts of the fsb" in new TestFixture { val fsbResult = FsbTestGroup(List( SchemeEvaluationResult(DSSchemeIds.DiplomaticAndDevelopment, Green.toString), SchemeEvaluationResult(DSSchemeIds.GovernmentEconomicsService, Green.toString) )) when(mockFsbRepo.findByApplicationId(uid.toString())).thenReturnAsync(Some(fsbResult)) override val schemes = List(DSSchemeIds.DiplomaticAndDevelopmentEconomics) override val selectedSchemes = SelectedSchemes(schemes, orderAgreed = true, eligible = true) when(mockSchemePreferencesService.find(uid.toString())).thenReturnAsync(selectedSchemes) // This is the css after FSAC and before FSB evaluation val curSchemeStatus = List(SchemeEvaluationResult(DSSchemeIds.DiplomaticAndDevelopmentEconomics, Green.toString)) when(mockApplicationRepo.getCurrentSchemeStatus(uid.toString())).thenReturnAsync(curSchemeStatus) when(mockApplicationRepo.addProgressStatusAndUpdateAppStatus(uid.toString(), ProgressStatuses.FSB_PASSED)).thenReturnAsync() when(mockApplicationRepo.addProgressStatusAndUpdateAppStatus(uid.toString(), ProgressStatuses.ELIGIBLE_FOR_JOB_OFFER)).thenReturnAsync() service.evaluateFsbCandidate(uid)(hc).futureValue verify(mockApplicationRepo).addProgressStatusAndUpdateAppStatus(uid.toString(), ProgressStatuses.ELIGIBLE_FOR_JOB_OFFER) } "Fail the candidate who is only in the running for GES-DS if the candidate passes " + "the EAC part but fails the DS (FCO) part of the GES_DS fsb" in new TestFixture { val fsbResult = FsbTestGroup(List( SchemeEvaluationResult(DSSchemeIds.DiplomaticAndDevelopment, Red.toString), SchemeEvaluationResult(DSSchemeIds.GovernmentEconomicsService, Green.toString) )) when(mockFsbRepo.findByApplicationId(uid.toString())).thenReturnAsync(Some(fsbResult)) override val schemes = List(DSSchemeIds.DiplomaticAndDevelopmentEconomics) override val selectedSchemes = SelectedSchemes(schemes, orderAgreed = true, eligible = true) when(mockSchemePreferencesService.find(uid.toString())).thenReturnAsync(selectedSchemes) // This is the css after FSAC and before FSB evaluation val curSchemeStatus = List(SchemeEvaluationResult(DSSchemeIds.DiplomaticAndDevelopmentEconomics, Green.toString)) when(mockApplicationRepo.getCurrentSchemeStatus(uid.toString())).thenReturnAsync(curSchemeStatus) when(mockApplicationRepo.addProgressStatusAndUpdateAppStatus(uid.toString(), ProgressStatuses.FSB_FAILED)).thenReturnAsync() when(mockFsbRepo.updateCurrentSchemeStatus(any[String], any[List[SchemeEvaluationResult]])).thenReturnAsync() when(mockApplicationRepo.addProgressStatusAndUpdateAppStatus(uid.toString(), ProgressStatuses.ALL_FSBS_AND_FSACS_FAILED)).thenReturnAsync() // Mocking required to send the failure email when(mockApplicationRepo.find(uid.toString())).thenReturnAsync(Some(cand1)) when(mockContactDetailsRepo.find(cand1.userId)).thenReturnAsync(cd1) when(mockEmailClient.notifyCandidateOnFinalFailure(eqTo(cd1.email), eqTo(cand1.name))(any())).thenReturnAsync() service.evaluateFsbCandidate(uid)(hc).futureValue verify(mockApplicationRepo).addProgressStatusAndUpdateAppStatus(uid.toString(), ProgressStatuses.ALL_FSBS_AND_FSACS_FAILED) // verify the failure email is sent out verify(mockEmailClient).notifyCandidateOnFinalFailure(eqTo(cd1.email), eqTo(cand1.name))(any[HeaderCarrier]) } "Fail the candidate who is only in the running for GES-DS if the candidate fails the EAC part but passes " + "the DS (FCO) part of the GES_DS fsb. Note the candidate should not be invited to the DS part " + "if they fail the EAC part (so this should never happen unless they also have DS as a separate scheme)" in new TestFixture { val fsbResult = FsbTestGroup(List( SchemeEvaluationResult(DSSchemeIds.DiplomaticAndDevelopment, Green.toString), SchemeEvaluationResult(DSSchemeIds.GovernmentEconomicsService, Red.toString) )) when(mockFsbRepo.findByApplicationId(uid.toString())).thenReturnAsync(Some(fsbResult)) override val schemes = List(DSSchemeIds.DiplomaticAndDevelopmentEconomics) override val selectedSchemes = SelectedSchemes(schemes, orderAgreed = true, eligible = true) when(mockSchemePreferencesService.find(uid.toString())).thenReturnAsync(selectedSchemes) // This is the css after FSAC and before FSB evaluation val curSchemeStatus = List(SchemeEvaluationResult(DSSchemeIds.DiplomaticAndDevelopmentEconomics, Green.toString)) when(mockApplicationRepo.getCurrentSchemeStatus(uid.toString())).thenReturnAsync(curSchemeStatus) when(mockApplicationRepo.addProgressStatusAndUpdateAppStatus(uid.toString(), ProgressStatuses.FSB_FAILED)).thenReturnAsync() when(mockFsbRepo.updateCurrentSchemeStatus(any[String], any[List[SchemeEvaluationResult]])).thenReturnAsync() when(mockApplicationRepo.addProgressStatusAndUpdateAppStatus(uid.toString(), ProgressStatuses.ALL_FSBS_AND_FSACS_FAILED)).thenReturnAsync() // Mocking required to send the failure email when(mockApplicationRepo.find(uid.toString())).thenReturnAsync(Some(cand1)) when(mockContactDetailsRepo.find(cand1.userId)).thenReturnAsync(cd1) when(mockEmailClient.notifyCandidateOnFinalFailure(eqTo(cd1.email), eqTo(cand1.name))(any())).thenReturnAsync() service.evaluateFsbCandidate(uid)(hc).futureValue verify(mockApplicationRepo).addProgressStatusAndUpdateAppStatus(uid.toString(), ProgressStatuses.ALL_FSBS_AND_FSACS_FAILED) // verify the failure email is sent out verify(mockEmailClient).notifyCandidateOnFinalFailure(eqTo(cd1.email), eqTo(cand1.name))(any[HeaderCarrier]) } "Fail the candidate who is in the running for GES-DS and DS schemes who passes the EAC part but fails the FCO part of " + "the GES_DS fsb" in new TestFixture { val fsbResult = FsbTestGroup(List( SchemeEvaluationResult(DSSchemeIds.DiplomaticAndDevelopment, Red.toString), SchemeEvaluationResult(DSSchemeIds.GovernmentEconomicsService, Green.toString) )) when(mockFsbRepo.findByApplicationId(uid.toString())).thenReturnAsync(Some(fsbResult)) override val schemes = List(DSSchemeIds.DiplomaticAndDevelopmentEconomics, DSSchemeIds.DiplomaticAndDevelopment) override val selectedSchemes = SelectedSchemes(schemes, orderAgreed = true, eligible = true) when(mockSchemePreferencesService.find(uid.toString())).thenReturnAsync(selectedSchemes) // This is the css after FSAC and before FSB evaluation val curSchemeStatus = List( SchemeEvaluationResult(DSSchemeIds.DiplomaticAndDevelopmentEconomics, Green.toString), SchemeEvaluationResult(DSSchemeIds.DiplomaticAndDevelopment, Green.toString) ) when(mockApplicationRepo.getCurrentSchemeStatus(uid.toString())).thenReturnAsync(curSchemeStatus) when(mockApplicationRepo.addProgressStatusAndUpdateAppStatus(uid.toString(), ProgressStatuses.FSB_FAILED)).thenReturnAsync() when(mockFsbRepo.updateCurrentSchemeStatus(any[String], any[List[SchemeEvaluationResult]])).thenReturnAsync() when(mockApplicationRepo.addProgressStatusAndUpdateAppStatus(uid.toString(), ProgressStatuses.ALL_FSBS_AND_FSACS_FAILED)).thenReturnAsync() // Mocking required to send the failure email when(mockApplicationRepo.find(uid.toString())).thenReturnAsync(Some(cand1)) when(mockContactDetailsRepo.find(cand1.userId)).thenReturnAsync(cd1) when(mockEmailClient.notifyCandidateOnFinalFailure(eqTo(cd1.email), eqTo(cand1.name))(any())).thenReturnAsync() service.evaluateFsbCandidate(uid)(hc).futureValue verify(mockApplicationRepo).addProgressStatusAndUpdateAppStatus(uid.toString(), ProgressStatuses.ALL_FSBS_AND_FSACS_FAILED) // verify the failure email is sent out verify(mockEmailClient).notifyCandidateOnFinalFailure(eqTo(cd1.email), eqTo(cand1.name))(any[HeaderCarrier]) } "Fail the candidate who is in the running for GES-DS and GES schemes who fails the EAC part and passes the FCO part of " + "the GES_DS fsb" in new TestFixture { val fsbResult = FsbTestGroup(List( SchemeEvaluationResult(DSSchemeIds.DiplomaticAndDevelopment, Green.toString), SchemeEvaluationResult(DSSchemeIds.GovernmentEconomicsService, Red.toString) )) when(mockFsbRepo.findByApplicationId(uid.toString())).thenReturnAsync(Some(fsbResult)) override val schemes = List(DSSchemeIds.DiplomaticAndDevelopmentEconomics, DSSchemeIds.GovernmentEconomicsService) override val selectedSchemes = SelectedSchemes(schemes, orderAgreed = true, eligible = true) when(mockSchemePreferencesService.find(uid.toString())).thenReturnAsync(selectedSchemes) // This is the css after FSAC and before FSB evaluation val curSchemeStatus = List( SchemeEvaluationResult(DSSchemeIds.DiplomaticAndDevelopmentEconomics, Green.toString), SchemeEvaluationResult(DSSchemeIds.GovernmentEconomicsService, Green.toString) ) when(mockApplicationRepo.getCurrentSchemeStatus(uid.toString())).thenReturnAsync(curSchemeStatus) when(mockApplicationRepo.addProgressStatusAndUpdateAppStatus(uid.toString(), ProgressStatuses.FSB_FAILED)).thenReturnAsync() when(mockFsbRepo.updateCurrentSchemeStatus(any[String], any[List[SchemeEvaluationResult]])).thenReturnAsync() when(mockApplicationRepo.addProgressStatusAndUpdateAppStatus(uid.toString(), ProgressStatuses.ALL_FSBS_AND_FSACS_FAILED)).thenReturnAsync() // Mocking required to send the failure email when(mockApplicationRepo.find(uid.toString())).thenReturnAsync(Some(cand1)) when(mockContactDetailsRepo.find(cand1.userId)).thenReturnAsync(cd1) when(mockEmailClient.notifyCandidateOnFinalFailure(eqTo(cd1.email), eqTo(cand1.name))(any())).thenReturnAsync() service.evaluateFsbCandidate(uid)(hc).futureValue verify(mockApplicationRepo).addProgressStatusAndUpdateAppStatus(uid.toString(), ProgressStatuses.ALL_FSBS_AND_FSACS_FAILED) // verify the failure email is sent out verify(mockEmailClient).notifyCandidateOnFinalFailure(eqTo(cd1.email), eqTo(cand1.name))(any[HeaderCarrier]) } } trait TestFixture { val hc = HeaderCarrier() val uid = UniqueIdentifier.randomUniqueIdentifier val appId = "appId" val mockApplicationRepo = mock[GeneralApplicationMongoRepository] val mockContactDetailsRepo = mock[ContactDetailsRepository] val mockFsbRepo = mock[FsbRepository] val mockSchemeRepo = mock[SchemeRepository] val mockSchemePreferencesService = mock[SchemePreferencesService] val mockEmailClient = mock[OnlineTestEmailClient] //TODO:changed type was EmailClient val cand1 = Candidate("123", None, None, Some("[email protected]"), Some("Leia"), Some("Amadala"), None, None, None, None, None, None, None) val cd1 = ContactDetails(outsideUk = false, Address("line1a"), Some("123"), Some("UK"), "[email protected]", "12345") val service = new FsbService( mockApplicationRepo, mockContactDetailsRepo, mockFsbRepo, mockSchemeRepo, mockSchemePreferencesService, mockEmailClient ) val schemes = List( SchemeId("DigitalDataTechnologyAndCyber"), SchemeId("DiplomaticAndDevelopment"), SchemeId("DiplomaticAndDevelopmentEconomics"), SchemeId("GovernmentEconomicsService") ) val selectedSchemes = SelectedSchemes(schemes, orderAgreed = true, eligible = true) } }
hmrc/fset-faststream
test/services/application/FsbServiceSpec.scala
Scala
apache-2.0
22,878
/* * Licensed to The Apereo Foundation under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional * information regarding copyright ownership. * * The Apereo Foundation licenses this file to you under the Apache License, * Version 2.0, (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.tle.web.workflow.notification import com.tle.core.guice.Bind import com.tle.core.notification.beans.Notification import javax.inject.Singleton @Bind @Singleton class StandardNotifications extends FilterableNotification with TemplatedNotification with NotificationLookup { type N = ItemNotification def toFreemarkerModel(notes: Iterable[Notification]) = createItemNotifications("notification-item.ftl", notes) }
equella/Equella
Source/Plugins/Core/com.equella.core/scalasrc/com/tle/web/workflow/notification/StandardNotifications.scala
Scala
apache-2.0
1,252
package ru.wordmetrix.arrangetext import java.io.File import ru.wordmetrix.features.Features import ru.wordmetrix.features.Features.String2Word import ru.wordmetrix.smartfile.SmartFile.fromFile import ru.wordmetrix.utils.{CFG, debug, _} import ru.wordmetrix.vector.Vector import scala.Option.option2Iterable import scala.annotation.tailrec import scala.util.Try import scala.util.matching.Regex import ru.wordmetrix.treeapproximator._ /** * ArrangeText is a strategy that places a bunch of text in convenient fashion. * * It provides three predefined approaches of saving text: a tree of text * arranged by similarity, a set of clusters placed into folders and a web- * page of links on web pages (assuming that an original URI is available). */ object ArrangeText extends App { override def main(args: Array[String]) { val (command, args1) = args match { case Array(command, args@_*) if Set("tree", "cluster", "links")(command) => (Some(command), args.toList) case args@Array(arg, _ @ _*) => (Some ("all"), args.toList) case args => println("\\nEnter: (tree | cluster | links) [Options] [<FILE> [..]]\\n") (None, args.toList) } implicit val cfg = CFG(args1) lazy val arrangetext = ArrangeText() command foreach { case "tree" => new ArrangeTextDumpTree(arrangetext).dump() //arrange_tree(tree_aligned, target) case "cluster" => new ArrangeTextDumpClusters(arrangetext).dump() case "links" => new ArrangeTextDumpHTML(arrangetext, cfg.f2u match { case CFG.F2U.Simple => new File2URITransform() case CFG.F2U.Map => new File2URIMap() case CFG.F2U.Dump => new File2URIDump() }).dump() case "all" => println("tree.size = " + arrangetext.tree.size) new ArrangeTextDumpTree(arrangetext).dump() //arrange_tree(tree_aligned, target) println("cluster suze = " + arrangetext.clusters.size) new ArrangeTextDumpClusters(arrangetext).dump() println("links = " + arrangetext.clusters.size) case _ => println("Huh, boyz ...") } } def apply()(implicit cfg: CFG) = new ArrangeText() } abstract class ArrangeTextDump(arrangetree: ArrangeText)(implicit cfg: CFG) { implicit def accuracy = cfg.accuracy def vector2Title(v: Vector[String], n: Int = 5, stopword: Set[String] = Set(" ")) = { v.toList.sortBy(-_._2).takeWhile(_._2 > 0d).map(_._1).filterNot(stopword).filterNot(Set(" ", "")).take(n).mkString(" ") } implicit def vectors2Vectors(v: Vector[Word]): Vector[String] = Vector(v.map { case (x, y) => (arrangetree.index.rmap.getOrElse(x, "unknown" /*"Word is unknown or index possibly is old"*/) -> y) } toList) } class ArrangeText()(implicit cfg: CFG) { implicit def accuracy = cfg.accuracy type Word = Int type Node = TreeApproximator.Node[Word, File] type Tree = TreeApproximator.Tree[Word, File] type Leaf = TreeApproximator.Leaf[Word, File] val start_time = System.currentTimeMillis() def tree_raw: Tree = vectors.zipWithIndex.foldLeft(TreeApproximator[Word, File]())({ case (tree, ((vector, file), n)) => log.time("%s %d %s tree(%s).energy => %4.3f, length = %d / %d".format( (System.currentTimeMillis() - start_time), tree.n, index.map.size, file, tree.energy2, vector.size, tree.average.size) ) { if (n % 100 == 0) System.gc() (tree +(vector, file)).rectify(cfg.rectify_inline) } }) def tree_opt: Tree = (1 to cfg.rectify).foldLeft(tree_raw)({ case (tree, n) => log.time("Rectifying #%3d = %4.3f %d, size = %s, length = %s".format( n, tree.energy2, tree.average.size, tree.toList.length, tree.align()._1.pathlength) ) { tree.rectify(tree.n) } }) def tree_aligned = (cfg.path / "tree.dat") cache { val tree = tree_opt.align()._1 // debug("tree_aligned size = %s", tree_opt.toList.length) val size = tree.size val pathlength = tree.pathlength log("Average distance = %f (%f/%d)", pathlength / size, pathlength, size) tree } lazy val tree: Tree = tree_aligned lazy val index = cfg.path / "index.dat" cache index_uncached lazy val (vectors, index_uncached) = sample( for { file <- scala.util.Random.shuffle(cfg.files).toStream page <- Try(file.readLines().mkString(" ")).toOption } yield ( if (cfg.ishtml) new Html2Ascii(page).wrap(72) else page, file ), Stream(), String2Word() ) match { case (vectors, index) => val (empties, substantative) = vectors.partition({ case (vs, filename) => vs.isEmpty }) if (empties.nonEmpty) log("%s was rejected as empty", empties.size) if (cfg.isdebug) empties.foreach { case (v, file) => debug("Splitter reduces %s to ashes", file) } val uniq = substantative.toMap debug("%s uniq vectors", uniq.size) if (uniq.size != substantative.size) { log("%s duplicates were rejected", substantative.size - uniq.size) if (cfg.isdebug) substantative.filter({ case (v, f) => uniq.get(v) match { case Some(`f`) => false case _ => true } } ) foreach { case (v, f) => debug("%s is the same as %s", f, uniq(v)) } } cfg.path / "vocabulary_whole.txt" write index.map.keys.toList.sorted (uniq.toStream, index) } lazy val clusters: Iterable[Iterable[Vector[Word]]] = debug.time("clustering") { debug("Size tree: %s", tree.toList.size) val c = Clusters(tree_aligned) debug("clusters %s %s %s", c.size, c.iterator.toList.flatten.size, c.heads.toList.flatMap { _._2 }.size) c } val delimiter: Regex = """\\W+""".r @tailrec private def sample( files: Stream[(String, File)], vectors: Stream[(Vector[Word], File)], index: String2Word[String, Double]): (Stream[(Vector[Word], File)], String2Word[String, Double]) = files match { case (s, file) #:: files => val (countids, index1) = Features.fromText(s, index) sample( files, (Vector(countids.toList), file) #:: vectors, index1 ) case Stream() => (vectors, index) } }
electricmind/treeapproximator
arrangetext/src/main/scala/ru/wordmetrix/arrangetext/ArrangeText.scala
Scala
apache-2.0
6,428
package fpinscala.gettingstarted import scala.annotation.tailrec // A comment! /* Another comment */ /** A documentation comment */ object MyModule { def abs(n: Int): Int = if (n < 0) -n else n private def formatAbs(x: Int) = { val msg = "The absolute value of %d is %d" msg.format(x, abs(x)) } def main(args: Array[String]): Unit = println(formatAbs(-42)) // A definition of factorial, using a local, tail recursive function def factorial(n: Int): Int = { @annotation.tailrec def go(n: Int, acc: Int): Int = if (n <= 0) acc else go(n-1, n*acc) go(n, 1) } // Another implementation of `factorial`, this time with a `while` loop def factorial2(n: Int): Int = { var acc = 1 var i = n while (i > 0) { acc *= i; i -= 1 } acc } // Exercise 1: Write a function to compute the nth fibonacci number def fib(n: Int): Int = { @tailrec def loop(n: Int, acc: Int, pAcc: Int): Int = { if (n == 0) pAcc else loop(n - 1, pAcc + acc, acc) } loop(n, 1, 0) } // This definition and `formatAbs` are very similar.. private def formatFactorial(n: Int) = { val msg = "The absolute value of %d is %d." msg.format(n, factorial(n)) } // We can generalize `formatAbs` and `formatFactorial` to // accept a _function_ as a parameter def formatResult(name: String, n: Int, f: Int => Int) = { val msg = "The %s of %d is %d." msg.format(name, n, f(n)) } } object FormatAbsAndFactorial { import MyModule._ // Now we can use our general `formatResult` function // with both `abs` and `factorial` def main(args: Array[String]): Unit = { println(formatResult("absolute value", -42, abs)) println(formatResult("factorial", 7, factorial)) } } // Functions get passed around so often in FP that it's // convenient to have syntax for constructing a function // *without* having to give it a name object AnonymousFunctions { import MyModule._ // Some examples of anonymous functions: def main(args: Array[String]): Unit = { println(formatResult("absolute value", -42, abs)) println(formatResult("factorial", 7, factorial)) println(formatResult("increment", 7, (x: Int) => x + 1)) println(formatResult("increment2", 7, (x) => x + 1)) println(formatResult("increment3", 7, x => x + 1)) println(formatResult("increment4", 7, _ + 1)) println(formatResult("increment5", 7, x => { val r = x + 1; r })) } } object MonomorphicBinarySearch { // First, a binary search implementation, specialized to `Double`, // another primitive type in Scala, representing 64-bit floating // point numbers // Ideally, we could generalize this to work for any `Array` type, // so long as we have some way of comparing elements of the `Array` def binarySearch(ds: Array[Double], key: Double): Int = { @annotation.tailrec def go(low: Int, mid: Int, high: Int): Int = { if (low > high) -mid - 1 else { val mid2 = (low + high) / 2 val d = ds(mid2) // We index into an array using the same // syntax as function application if (d == key) mid2 else if (d > key) go(low, mid2, mid2-1) else go(mid2 + 1, mid2, high) } } go(0, 0, ds.length - 1) } } object PolymorphicFunctions { // Here's a polymorphic version of `binarySearch`, parameterized on // a function for testing whether an `A` is greater than another `A`. def binarySearch[A](as: Array[A], key: A, gt: (A,A) => Boolean): Int = { @annotation.tailrec def go(low: Int, mid: Int, high: Int): Int = { if (low > high) -mid - 1 else { val mid2 = (low + high) / 2 val a = as(mid2) val greater = gt(a, key) if (!greater && !gt(key,a)) mid2 else if (greater) go(low, mid2, mid2-1) else go(mid2 + 1, mid2, high) } } go(0, 0, as.length - 1) } // Exercise 2: Implement a polymorphic function to check whether // an `Array[A]` is sorted def isSorted[A](as: Array[A], gt: (A,A) => Boolean): Boolean = { @tailrec def loop(n: Int): Boolean = { if (n > (as.length - 2)) true else if (!gt(as(n), as(n+1))) false else loop(n + 1) } loop(0) } // Polymorphic functions are often so constrained by their type // that they only have one implementation! Here's an example: // Exercise 3: Implement `partial1`. def partial1[A, B, C](a: A, f: (A, B) => C): B => C = { b => f(a, b) } // Exercise 4: Implement `curry`. // Note that `=>` associates to the right, so we could // write the return type as `A => B => C` def curry[A, B, C](f: (A, B) => C): A => (B => C) = { a => b => f(a, b) } // NB: The `Function2` trait has a `curried` method already // Exercise 5: Implement `uncurry` def uncurry[A, B, C](f: A => B => C): (A, B) => C = { (a, b) => f(a)(b) } /* NB: There is a method on the `Function` object in the standard library, `Function.uncurried` that you can use for uncurrying. Note that we can go back and forth between the two forms. We can curry and uncurry and the two forms are in some sense "the same". In FP jargon, we say that they are _isomorphic_ ("iso" = same; "morphe" = shape, form), a term we inherit from category theory. */ // Exercise 6: Implement `compose` def compose[A,B,C](f: B => C, g: A => B): A => C = { (a) => f(g(a)) } }
benusher/fpinscala_exercises
exercises/src/main/scala/fpinscala/gettingstarted/GettingStarted.scala
Scala
mit
5,459
/* * Copyright 2021 HM Revenue & Customs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package uk.gov.hmrc.ct.accounts.frs102.calculations import org.scalatest.{Matchers, WordSpec} import uk.gov.hmrc.ct.accounts.frs10x.boxes.{AC13, AC15, AC16, AC17, AC24, AC25} import uk.gov.hmrc.ct.accounts.{AC12, AC14, AC401, AC402, AC403, AC404} class GrossProfitAndLossCalculatorSpec extends WordSpec with Matchers { "GrossProfitAndLossCalculator" should { "calculate AC16 and" when { "return empty AC16 if all are empty" in new GrossProfitAndLossCalculator { calculateAC16(AC12(None), AC24(None), AC401(None), AC403(None), AC14(None)) shouldBe AC16(None) } "return AC12 value as AC16 if AC12 and AC24(None), AC401 has value and AC14 and AC403 are empty" in new GrossProfitAndLossCalculator { calculateAC16(AC12(Some(12)), AC24(None), AC401(Some(10)), AC403(None), AC14(None)) shouldBe AC16(Some(22)) } "return -AC14 value as AC16 if AC12 and AC24(None), AC401 are empty and AC14 and AC 403has a value" in new GrossProfitAndLossCalculator { calculateAC16(AC12(None), AC24(None), AC401(None), AC403(Some(10)), AC14(Some(14))) shouldBe AC16(Some(-24)) } "return AC12 - AC14 value as AC16 if all have values" in new GrossProfitAndLossCalculator { calculateAC16(AC12(Some(12)), AC24(Some(2)), AC401(Some(20)), AC403(Some(9)), AC14(Some(14))) shouldBe AC16(Some(11)) } } "calculate AC17 and" when { "return empty AC17 if all are empty" in new GrossProfitAndLossCalculator { calculateAC17(AC13(None), AC25(None), AC402(None), AC404(None), AC15(None)) shouldBe AC17(None) } "return AC12 value as AC17 if AC12 and AC402 has value and AC15 and AC404 are empty" in new GrossProfitAndLossCalculator { calculateAC17(AC13(Some(13)), AC25(None), AC402(Some(8)), AC404(None), AC15(None)) shouldBe AC17(Some(21)) } "return AC14 value as AC17 if AC12 and AC402 are empty and AC15 and AC404 has a value" in new GrossProfitAndLossCalculator { calculateAC17(AC13(None), AC25(None), AC402(None), AC404(Some(1)), AC15(Some(15))) shouldBe AC17(Some(-16)) } "return AC12 - AC14 value as all have values" in new GrossProfitAndLossCalculator { calculateAC17(AC13(Some(13)), AC25(Some(2)), AC402(Some(7)), AC404(Some(4)), AC15(Some(15))) shouldBe AC17(Some(3)) } } } }
hmrc/ct-calculations
src/test/scala/uk/gov/hmrc/ct/accounts/frs102/calculations/GrossProfitAndLossCalculatorSpec.scala
Scala
apache-2.0
2,938
/** * Copyright 2013 Brian Porter (poornerd at gmail dot com) - twitter: @poornerd * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package securesocial.core.providers import play.api.libs.json.JsObject import securesocial.core._ import securesocial.core.providers.XingProvider._ import securesocial.core.services.{ CacheService, RoutesService } import scala.concurrent.Future /** * A Xing Provider */ class XingProvider( routesService: RoutesService, cacheService: CacheService, client: OAuth1Client) extends OAuth1Provider(routesService, cacheService, client) { override val id = XingProvider.Xing override def fillProfile(info: OAuth1Info): Future[BasicProfile] = { client.retrieveProfile(XingProvider.VerifyCredentials, info).map { json => val me = (json \\ Users).as[Seq[JsObject]].head val userId = (me \\ Id).as[String] val displayName = (me \\ Name).asOpt[String] val lastName = (me \\ LastName).asOpt[String] val firstName = (me \\ FirstName).asOpt[String] val profileImage = (me \\ ProfileImage \\ Large).asOpt[String] val email = (me \\ ActiveEmail).asOpt[String] BasicProfile(id, userId, displayName, firstName, lastName, email, profileImage, authMethod, Some(info)) } recover { case e => logger.error("[securesocial] error retrieving profile information from Xing", e) throw new AuthenticationException() } } } object XingProvider { val VerifyCredentials = "https://api.xing.com/v1/users/me" val Xing = "xing" val Id = "id" val Name = "display_name" val FirstName = "first_name" val LastName = "last_name" val Users = "users" val ProfileImage = "photo_urls" val Large = "large" val ActiveEmail = "active_email" }
k4200/securesocial
module-code/app/securesocial/core/providers/XingProvider.scala
Scala
apache-2.0
2,259
package de.leanovate.dose.product.consul import spray.json.{JsonFormat, DefaultJsonProtocol} case class ServiceNode( Node: String, Address: String, ServiceID: String, ServiceName: String, ServiceTags: Seq[String], ServicePort: Int ) object ServiceNode extends DefaultJsonProtocol { implicit val serviceNodeFormat : JsonFormat[ServiceNode]= jsonFormat6(ServiceNode.apply) }
leanovate/microzon-product
src/main/scala/de/leanovate/dose/product/consul/ServiceNode.scala
Scala
mit
391
class A object Impl { def foo()(implicit x: A = null): Int = 2 def test: Int = { foo()() // ok foo() // did not work before, does now } } // same with multiple parameters object Impl2 { def foo()(implicit ev: Int, x: A = null): Int = 2 def test: Int = { implicit val ii: Int = 1 foo() } }
som-snytt/dotty
tests/pos/i576.scala
Scala
apache-2.0
321
package util import java.io.{ File, FileNotFoundException } import java.nio.file._ import java.nio.file.attribute.BasicFileAttributes import com.google.common.hash.Hashing import com.google.common.io import domain.FileChecksum import domain.Types.FolderContent import scala.util.Try object FolderUtils { def filesInDir(dir: String): FolderContent = { val dirPath = Paths.get(dir) require(Files.isDirectory(dirPath)) var fileList = List[FileChecksum]() class CustomFileVisitor extends SimpleFileVisitor[Path] { override def visitFile(file: Path, attrs: BasicFileAttributes): FileVisitResult = { val relPath = dirPath.relativize(file) fileList = FileChecksum(relPath.toString, file.toAbsolutePath.toString, file.getFileName.toString, fileChecksum(file.toAbsolutePath.toString)) :: fileList super.visitFile(file, attrs) } } Files.walkFileTree(dirPath, new CustomFileVisitor) fileList.filterNot { case FileChecksum(_, path, _, _) => Files.isDirectory(Paths.get(path)) } } def fileChecksum(filePath: String, retries: Int = 3): String = { val result = Try(io.Files.hash(new File(filePath), Hashing.md5()).toString) recover { case e: FileNotFoundException => if (retries > 0) { Thread.sleep(50) fileChecksum(filePath, retries = retries - 1) } else { throw e } } result.getOrElse("") } def remoteFilesMissingLocallyByRelPath(localFolder: FolderContent, remoteFolder: FolderContent): FolderContent = { remoteFolder.filterNot { remoteFC => localFolder.exists(_.relPath == remoteFC.relPath) } } }
felixbr/akka-file-sync
src/main/scala/util/FolderUtils.scala
Scala
mit
1,632
package io.prediction.examples.friendrecommendation import io.prediction.controller._ class FriendRecommendationDataSourceParams( val itemFilePath: String, val userKeywordFilePath: String, val userActionFilePath: String, val trainingRecordFilePath: String ) extends Params
ch33hau/PredictionIO
examples/experimental/scala-local-friend-recommendation/src/main/scala/FriendRecommendationDataSourceParams.scala
Scala
apache-2.0
283