code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
package br.gov.lexml.parser.pl.util
object Entities {
val entities = Map[String,Char](
"aacute " -> 'á',
"Aacute " -> 'Á',
"abreve " -> 'ă',
"Abreve " -> 'Ă',
"ac" -> '∾',
"acd " -> '∿',
"acirc " -> 'â',
"Acirc " -> 'Â',
"acute" -> '´',
"acy " -> 'а',
"Acy " -> 'А',
"add " -> '+',
"aelig " -> 'æ',
"AElig " -> 'Æ',
"agrave " -> 'à',
"Agrave " -> 'À',
"alefsym" -> 'ℵ',
"aleph " -> 'ℵ',
"alpha " -> 'α',
"Alpha " -> 'Α',
"amacr " -> 'ā',
"Amacr " -> 'Ā',
"amalg " -> '⨿',
"amp" -> '&',
"AMP " -> '&',
"and" -> '∧',
"And " -> '⩓',
"andand " -> '⩕',
"andd " -> '⩜',
"andslope " -> '⩘',
"andv " -> '⩚',
"ang" -> '∠',
"ange " -> '⦤',
"angle " -> '∠',
"angmsd" -> '∡',
"angmsdaa " -> '⦨',
"angmsdab " -> '⦩',
"angmsdac " -> '⦪',
"angmsdad " -> '⦫',
"angmsdae " -> '⦬',
"angmsdaf " -> '⦭',
"angmsdag " -> '⦮',
"angmsdah " -> '⦯',
"angrt " -> '∟',
"angrtvb " -> '⊾',
"angrtvbd " -> '⦝',
"angsph " -> '∡',
"angst " -> 'Å',
"angzarr " -> '⍼',
"aogon " -> 'ą',
"Aogon " -> 'Ą',
"ap" -> '≈',
"ape" -> '≊',
"apid " -> '≋',
"apos " -> '\\'',
"apos" -> '\\'',
"approx" -> '≈',
"approxeq " -> '≊',
"aring " -> 'å',
"Aring " -> 'Å',
"Assign " -> '≔',
"ast" -> '*',
"asymp" -> '≈',
"asympeq" -> '≍',
"atilde " -> 'ã',
"Atilde " -> 'Ã',
"auml " -> 'ä',
"Auml " -> 'Ä',
"awconint" -> '∲',
"awint " -> '⨑',
"backcong " -> '≌',
"backepsilon " -> '϶',
"backprime " -> '‵',
"backsim " -> '∽',
"backsimeq " -> '⋍',
"Backslash" -> '∖',
"barvee " -> '⊽',
"barwed" -> '⌅',
"Barwed" -> '⌆',
"barwedge " -> '⌅',
"bbrk" -> '⎵',
"bbrktbrk " -> '⎶',
"bcong" -> '≌',
"bcy " -> 'б',
"Bcy " -> 'Б',
"bdquo" -> '„',
"becaus" -> '∵',
"because" -> '∵',
"Because " -> '∵',
"bemptyv " -> '⦰',
"bepsi" -> '϶',
"bernou" -> 'ℬ',
"Bernoullis" -> 'ℬ',
"beta " -> 'β',
"Beta " -> 'Β',
"beth " -> 'ℶ',
"between " -> '≬',
"bigcap " -> '⋂',
"bigcirc " -> '◯',
"bigcup " -> '⋃',
"bigodot " -> '⨀',
"bigoplus " -> '⨁',
"bigotimes " -> '⨂',
"bigsqcup " -> '⨆',
"bigstar " -> '★',
"bigtriangledown " -> '▽',
"bigtriangleup " -> '△',
"biguplus " -> '⨄',
"bigvee " -> '⋁',
"bigwedge " -> '⋀',
"bkarow " -> '⤍',
"blacklozenge " -> '⧫',
"blacksquare" -> '▪',
"blacktriangle " -> '▴',
"blacktriangledown " -> '▾',
"blacktriangleleft " -> '◂',
"blacktriangleright " -> '▸',
"blank " -> '␣',
"blk12 " -> '▒',
"blk14 " -> '░',
"blk34 " -> '▓',
"block " -> '█',
"bnot " -> '⌐',
"bot" -> '⊥',
"bottom" -> '⊥',
"bowtie " -> '⋈',
"boxbox " -> '⧉',
"boxdl " -> '┐',
"boxdL " -> '╕',
"boxDl " -> '╖',
"boxDL " -> '╗',
"boxdr " -> '┌',
"boxdR " -> '╒',
"boxDr " -> '╓',
"boxDR " -> '╔',
"boxh" -> '─',
"boxH " -> '═',
"boxhd " -> '┬',
"boxhD " -> '╥',
"boxHd " -> '╤',
"boxHD " -> '╦',
"boxhu " -> '┴',
"boxhU " -> '╨',
"boxHu " -> '╧',
"boxHU " -> '╩',
"boxminus " -> '⊟',
"boxplus " -> '⊞',
"boxtimes " -> '⊠',
"boxul " -> '┘',
"boxuL " -> '╛',
"boxUl " -> '╜',
"boxUL " -> '╝',
"boxur " -> '└',
"boxuR " -> '╘',
"boxUr " -> '╙',
"boxUR " -> '╚',
"boxv " -> '│',
"boxV " -> '║',
"boxvh " -> '┼',
"boxvH " -> '╪',
"boxVh " -> '╫',
"boxVH " -> '╬',
"boxvl " -> '┤',
"boxvL " -> '╡',
"boxVl " -> '╢',
"boxVL " -> '╣',
"boxvr " -> '├',
"boxvR " -> '╞',
"boxVr " -> '╟',
"boxVR " -> '╠',
"bprime" -> '‵',
"breve" -> '˘',
"Breve " -> '˘',
"brvbar " -> '¦',
"Bscr " -> 'ℬ',
"bsemi " -> '⁏',
"bsim" -> '∽',
"bsime" -> '⋍',
"bsol " -> '\\\\',
"bsolb " -> '⧅',
"bull" -> '•',
"bullet " -> '•',
"bump" -> '≎',
"bumpe" -> '≏',
"Bumpeq " -> '≎',
"cacute " -> 'ć',
"Cacute " -> 'Ć',
"cap " -> '∩',
"Cap " -> '⋒',
"capand " -> '⩄',
"capbrcup " -> '⩉',
"capcap " -> '⩋',
"capcup " -> '⩇',
"capdot " -> '⩀',
"CapitalDifferentialD" -> 'ⅅ',
"caret " -> '⁁',
"caron" -> 'ˇ',
"Cayleys " -> 'ℭ',
"ccaps " -> '⩍',
"ccaron " -> 'č',
"Ccaron " -> 'Č',
"ccedil " -> 'ç',
"Ccedil " -> 'Ç',
"ccirc " -> 'ĉ',
"Ccirc " -> 'Ĉ',
"Cconint " -> '∰',
"ccups " -> '⩌',
"cdot " -> 'ċ',
"Cdot " -> 'Ċ',
"cedil" -> '¸',
"Cedilla " -> '¸',
"cemptyv " -> '⦲',
"cent " -> '¢',
"centerdot" -> '·',
"CenterDot " -> '·',
"Cfr" -> 'ℭ',
"chcy " -> 'ч',
"CHcy " -> 'Ч',
"check" -> '✓',
"checkmark " -> '✓',
"chi " -> 'χ',
"Chi " -> 'Χ',
"cir " -> '○',
"circ " -> 'ˆ',
"circeq " -> '≗',
"circlearrowleft " -> '↺',
"circlearrowright " -> '↻',
"circledast " -> '⊛',
"circledcirc " -> '⊚',
"circleddash " -> '⊝',
"CircleDot " -> '⊙',
"circledR" -> '®',
"circledS " -> 'Ⓢ',
"CircleMinus " -> '⊖',
"CirclePlus " -> '⊕',
"CircleTimes " -> '⊗',
"cire" -> '≗',
"cirE " -> '⧃',
"cirfnint " -> '⨐',
"cirscir " -> '⧂',
"ClockwiseContourIntegral " -> '∲',
"CloseCurlyDoubleQuote " -> '”',
"CloseCurlyQuote " -> '’',
"clubs" -> '♣',
"clubsuit " -> '♣',
"colon " -> ':',
"Colon" -> '∷',
"colone" -> '≔',
"coloneq" -> '≔',
"comma " -> ',',
"commat " -> '@',
"comp" -> '∁',
"compfn" -> '∘',
"complement " -> '∁',
"complexes " -> 'ℂ',
"cong" -> '≅',
"Congruent " -> '≡',
"conint" -> '∮',
"Conint" -> '∯',
"ContourIntegral " -> '∮',
"Copf" -> 'ℂ',
"coprod" -> '∐',
"Coproduct " -> '∐',
"copy" -> '©',
"COPY " -> '©',
"copysr " -> '℗',
"CounterClockwiseContourIntegral " -> '∲',
"crarr " -> '↵',
"cross " -> '✗',
"Cross " -> '⨯',
"ctdot " -> '⋯',
"cudarrl " -> '⤸',
"cudarrr " -> '⤵',
"cuepr" -> '⋞',
"cuesc" -> '⋟',
"cularr" -> '↶',
"cularrp " -> '⤽',
"cup " -> '∪',
"Cup " -> '⋓',
"cupbrcap " -> '⩈',
"cupcap " -> '⩆',
"CupCap " -> '≍',
"cupcup " -> '⩊',
"cupdot " -> '⊍',
"cupor " -> '⩅',
"curarr" -> '↷',
"curarrm " -> '⤼',
"curlyeqprec " -> '⋞',
"curlyeqsucc " -> '⋟',
"curlyvee " -> '⋎',
"curlywedge " -> '⋏',
"curren " -> '¤',
"curvearrowleft " -> '↶',
"curvearrowright " -> '↷',
"cuvee" -> '⋎',
"cuwed" -> '⋏',
"cwconint" -> '∲',
"cwint " -> '∱',
"cylcty " -> '⌭',
"dagger " -> '†',
"Dagger" -> '‡',
"daleth " -> 'ℸ',
"darr" -> '↓',
"dArr" -> '⇓',
"Darr " -> '↡',
"dash " -> '‐',
"dashv" -> '⊣',
"dbkarow " -> '⤏',
"dblac" -> '˝',
"dcaron " -> 'ď',
"Dcaron " -> 'Ď',
"dcy " -> 'д',
"Dcy " -> 'Д',
"dd " -> 'ⅆ',
"DD " -> 'ⅅ',
"ddagger " -> '‡',
"ddarr" -> '⇉',
"DDotrahd " -> '⤑',
"deg " -> '°',
"Del " -> '∇',
"delta " -> 'δ',
"Delta " -> 'Δ',
"demptyv " -> '⦱',
"dfisht " -> '⥿',
"dHar " -> '⥥',
"dharl" -> '⇃',
"dharr" -> '⇂',
"DiacriticalAcute " -> '´',
"DiacriticalDot " -> '˙',
"DiacriticalDoubleAcute " -> '˝',
"DiacriticalGrave " -> '`',
"DiacriticalTilde " -> '˜',
"diam" -> '⋄',
"diamond" -> '⋄',
"Diamond " -> '⋄',
"diamondsuit " -> '♦',
"diams" -> '♦',
"die" -> '¨',
"DifferentialD" -> 'ⅆ',
"digamma " -> 'ϝ',
"disin " -> '⋲',
"divide " -> '÷',
"divideontimes " -> '⋇',
"divonx" -> '⋇',
"djcy " -> 'ђ',
"DJcy " -> 'Ђ',
"dlcorn" -> '⌞',
"dlcorner " -> '⌞',
"dlcrop " -> '⌍',
"dollar " -> '$',
"dot" -> '˙',
"Dot" -> '¨',
"DotDot " -> '⃜',
"doteq " -> '≐',
"doteqdot " -> '≑',
"DotEqual" -> '≐',
"dotminus " -> '∸',
"dotplus " -> '∔',
"dotsquare " -> '⊡',
"doublebarwedge " -> '⌆',
"DoubleContourIntegral " -> '∯',
"DoubleDot" -> '¨',
"DoubleDownArrow " -> '⇓',
"DoubleLeftArrow " -> '⇐',
"DoubleLeftRightArrow" -> '⇔',
"DoubleLongLeftArrow " -> '⟸',
"DoubleLongLeftRightArrow " -> '⟺',
"DoubleLongRightArrow " -> '⟹',
"DoubleRightArrow " -> '⇒',
"DoubleRightTee " -> '⊨',
"DoubleUpArrow " -> '⇑',
"DoubleUpDownArrow " -> '⇕',
"DoubleVerticalBar" -> '∥',
"downarrow" -> '↓',
"Downarrow" -> '⇓',
"DownArrow" -> '↓',
"DownArrowBar " -> '⤓',
"DownArrowUpArrow " -> '⇵',
"DownBreve " -> '̑',
"downdownarrows " -> '⇉',
"downharpoonleft " -> '⇃',
"downharpoonright " -> '⇂',
"DownLeftRightVector " -> '⥐',
"DownLeftTeeVector " -> '⥞',
"DownLeftVector " -> '↽',
"DownLeftVectorBar " -> '⥖',
"DownRightTeeVector " -> '⥟',
"DownRightVector " -> '⇁',
"DownRightVectorBar " -> '⥗',
"DownTee " -> '⊤',
"DownTeeArrow" -> '↧',
"drbkarow " -> '⤐',
"drcorn" -> '⌟',
"drcorner " -> '⌟',
"drcrop " -> '⌌',
"dscy " -> 'ѕ',
"DScy " -> 'Ѕ',
"dsol " -> '⧶',
"dstrok " -> 'đ',
"Dstrok " -> 'Đ',
"dtdot " -> '⋱',
"dtri" -> '▿',
"dtrif" -> '▾',
"duarr" -> '⇵',
"duhar" -> '⥯',
"dwangle " -> '⦦',
"dzcy " -> 'џ',
"DZcy " -> 'Џ',
"dzigrarr " -> '⟿',
"eacute " -> 'é',
"Eacute " -> 'É',
"ecaron " -> 'ě',
"Ecaron " -> 'Ě',
"ecir" -> '≖',
"ecirc " -> 'ê',
"Ecirc " -> 'Ê',
"ecolon" -> '≕',
"ecy " -> 'э',
"Ecy " -> 'Э',
"edot" -> '≑',
"edot " -> 'ė',
"Edot " -> 'Ė',
"ee " -> 'ⅇ',
"efdot" -> '≒',
"egrave " -> 'è',
"Egrave " -> 'È',
"Element" -> '∈',
"elinters " -> '⏧',
"ell " -> 'ℓ',
"emacr " -> 'ē',
"Emacr " -> 'Ē',
"empty" -> '∅',
"emptyset" -> '∅',
"EmptySmallSquare " -> '◻',
"emptyv" -> '∅',
"EmptyVerySmallSquare " -> '▫',
"emsp " -> ' ',
"emsp13 " -> ' ',
"emsp14 " -> ' ',
"eng " -> 'ŋ',
"ENG " -> 'Ŋ',
"ensp " -> ' ',
"eogon " -> 'ę',
"Eogon " -> 'Ę',
"epar " -> '⋕',
"eparsl " -> '⧣',
"epsi" -> 'ϵ',
"epsilon" -> 'ε',
"Epsilon " -> 'Ε',
"epsiv" -> 'ε',
"eqcirc " -> '≖',
"eqcolon " -> '≕',
"eqsim " -> '≂',
"equal " -> '=',
"EqualTilde" -> '≂',
"equest" -> '≟',
"Equilibrium" -> '⇌',
"equiv" -> '≡',
"eqvparsl " -> '⧥',
"erarr " -> '⥱',
"erdot" -> '≓',
"escr " -> 'ℯ',
"Escr" -> 'ℰ',
"esdot" -> '≐',
"esim" -> '≂',
"eta " -> 'η',
"Eta " -> 'Η',
"eth " -> 'ð',
"ETH " -> 'Ð',
"euml " -> 'ë',
"Euml " -> 'Ë',
"euro " -> '€',
"excl " -> '!',
"exist" -> '∃',
"Exists " -> '∃',
"expectation " -> 'ℰ',
"exponentiale" -> 'ⅇ',
"ExponentialE" -> 'ⅇ',
"fallingdotseq " -> '≒',
"fcy " -> 'ф',
"Fcy " -> 'Ф',
"female " -> '♀',
"ffilig " -> 'ffi',
"fflig " -> 'ff',
"ffllig " -> 'ffl',
"filig " -> 'fi',
"FilledSmallSquare " -> '◼',
"FilledVerySmallSquare " -> '▪',
"flat " -> '♭',
"fllig " -> 'fl',
"fltns " -> '▱',
"fnof " -> 'ƒ',
"forall" -> '∀',
"ForAll " -> '∀',
"fork" -> '⋔',
"Fouriertrf " -> 'ℱ',
"fpartint " -> '⨍',
"frac12" -> '½',
"frac13 " -> '⅓',
"frac14 " -> '¼',
"frac15 " -> '⅕',
"frac16 " -> '⅙',
"frac18 " -> '⅛',
"frac23 " -> '⅔',
"frac25 " -> '⅖',
"frac34 " -> '¾',
"frac35 " -> '⅗',
"frac38 " -> '⅜',
"frac45 " -> '⅘',
"frac56 " -> '⅚',
"frac58 " -> '⅝',
"frac78 " -> '⅞',
"frasl " -> '⁄',
"frown" -> '⌢',
"Fscr" -> 'ℱ',
"gacute " -> 'ǵ',
"gammad" -> 'ϝ',
"Gammad " -> 'Ϝ',
"gamma " -> 'γ',
"Gamma " -> 'Γ',
"gbreve " -> 'ğ',
"Gbreve " -> 'Ğ',
"Gcedil " -> 'Ģ',
"gcirc " -> 'ĝ',
"Gcirc " -> 'Ĝ',
"gcy " -> 'г',
"Gcy " -> 'Г',
"gdot " -> 'ġ',
"Gdot " -> 'Ġ',
"ge" -> '≥',
"gE" -> '≧',
"gel" -> '⋛',
"geq " -> '≥',
"geqq " -> '≧',
"gg " -> '≫',
"Gg" -> '⋙',
"ggg " -> '⋙',
"gimel " -> 'ℷ',
"gjcy " -> 'ѓ',
"GJcy " -> 'Ѓ',
"gl" -> '≷',
"gnE" -> '≩',
"gneqq " -> '≩',
"gnsim " -> '⋧',
"grave" -> '`',
"GreaterEqual" -> '≥',
"GreaterEqualLess " -> '⋛',
"GreaterFullEqual" -> '≧',
"GreaterLess " -> '≷',
"GreaterTilde" -> '≳',
"gscr " -> 'ℊ',
"gsim" -> '≳',
"gt" -> '>',
"Gt" -> '≫',
"GT " -> '>',
"gtdot" -> '⋗',
"gtlPar " -> '⦕',
"gtrarr " -> '⥸',
"gtrdot " -> '⋗',
"gtreqless" -> '⋛',
"gtrless" -> '≷',
"gtrsimsim " -> '≳',
"Hacek " -> 'ˇ',
"hairsp" -> ' ',
"half " -> '½',
"hamilt" -> 'ℋ',
"hardcy " -> 'ъ',
"HARDcy " -> 'Ъ',
"harr" -> '↔',
"hArr" -> '⇔',
"harrcir " -> '⥈',
"harrw" -> '↭',
"Hat " -> '^',
"hbar" -> 'ℏ',
"hcirc " -> 'ĥ',
"Hcirc " -> 'Ĥ',
"hearts" -> '♥',
"heartsuit " -> '♥',
"hellip" -> '…',
"hercon " -> '⊹',
"Hfr" -> 'ℌ',
"HilbertSpace" -> 'ℋ',
"hksearow " -> '⤥',
"hkswarow " -> '⤦',
"hoarr " -> '⇿',
"homtht " -> '∻',
"hookleftarrow " -> '↩',
"hookrightarrow " -> '↪',
"Hopf " -> 'ℍ',
"horbar " -> '―',
"HorizontalLine " -> '─',
"Hscr " -> 'ℋ',
"hslash " -> 'ℏ',
"hstrok " -> 'ħ',
"Hstrok " -> 'Ħ',
"HumpDownHump" -> '≎',
"Humpeq " -> '≏',
"HumpEqual" -> '≏',
"hybull " -> '⁃',
"hyphen" -> '‐',
"iacute " -> 'í',
"Iacute " -> 'Í',
"icirc " -> 'î',
"Icirc " -> 'Î',
"icy " -> 'и',
"Icy " -> 'И',
"Idot " -> 'İ',
"iecy " -> 'е',
"IEcy " -> 'Е',
"iexcl " -> '¡',
"iff " -> '⇔',
"Ifr " -> 'ℑ',
"igrave " -> 'ì',
"Igrave " -> 'Ì',
"ii " -> 'ⅈ',
"iiiint " -> '⨌',
"iiint " -> '∭',
"iinfin " -> '⧜',
"iiota " -> '℩',
"ijlig " -> 'ij',
"IJlig " -> 'IJ',
"Im" -> 'ℑ',
"imacr " -> 'ī',
"Imacr " -> 'Ī',
"image" -> 'ℑ',
"ImaginaryI" -> 'ⅈ',
"imagline " -> 'ℐ',
"imagpart" -> 'ℑ',
"imath" -> 'ı',
"imof " -> '⊷',
"imped " -> 'Ƶ',
"Implies" -> '⇒',
"in " -> '∈',
"incare " -> '℅',
"infin " -> '∞',
"infintie " -> '⧝',
"inodot " -> 'ı',
"int" -> '∫',
"intcal" -> '⊺',
"integers" -> 'ℤ',
"Integral " -> '∫',
"intercal " -> '⊺',
"Intersection" -> '⋂',
"intlarhk " -> '⨗',
"intprod " -> '⨼',
"iocy " -> 'ё',
"IOcy " -> 'Ё',
"iogon " -> 'į',
"Iogon " -> 'Į',
"iota " -> 'ι',
"Iota " -> 'Ι',
"iprod" -> '⨼',
"iquest " -> '¿',
"Iscr" -> 'ℐ',
"isin" -> '∈',
"isindot " -> '⋵',
"isinE " -> '⋹',
"isins " -> '⋴',
"isinsv " -> '⋳',
"isinv" -> '∈',
"itilde " -> 'ĩ',
"Itilde " -> 'Ĩ',
"iukcy " -> 'і',
"Iukcy " -> 'І',
"iuml " -> 'ï',
"Iuml " -> 'Ï',
"jcirc " -> 'ĵ',
"Jcirc " -> 'Ĵ',
"jcy " -> 'й',
"Jcy " -> 'Й',
"jmath " -> 'ȷ',
"jsercy " -> 'ј',
"Jsercy " -> 'Ј',
"jukcy " -> 'є',
"Jukcy " -> 'Є',
"kappav" -> 'ϰ',
"kappa " -> 'κ',
"Kappa " -> 'Κ',
"kcedil " -> 'ķ',
"Kcedil " -> 'Ķ',
"kcy " -> 'к',
"Kcy " -> 'К',
"kgreen " -> 'ĸ',
"khcy " -> 'х',
"KHcy " -> 'Х',
"kjcy " -> 'ќ',
"KJcy " -> 'Ќ',
"lAarr" -> '⇚',
"lacute " -> 'ĺ',
"Lacute " -> 'Ĺ',
"laemptyv " -> '⦳',
"lagram" -> 'ℒ',
"lambda " -> 'λ',
"Lambda " -> 'Λ',
"lang" -> '⟨',
"Lang " -> '⟪',
"langd " -> '⦑',
"langle " -> '⟨',
"Laplacetrf " -> 'ℒ',
"laquo " -> '«',
"larr" -> '←',
"lArr" -> '⇐',
"Larr" -> '↞',
"larrb" -> '⇤',
"larrbfs " -> '⤟',
"larrfs " -> '⤝',
"larrhk" -> '↩',
"larrlp" -> '↫',
"larrpl " -> '⤹',
"larrsim " -> '⥳',
"larrtl" -> '↢',
"latail " -> '⤙',
"lAtail " -> '⤛',
"lbarr " -> '⤌',
"lBarr " -> '⤎',
"lbbrk " -> '❲',
"lbrace " -> '{',
"lbrack " -> '[',
"lbrke " -> '⦋',
"lbrksld " -> '⦏',
"lbrkslu " -> '⦍',
"lcaron " -> 'ľ',
"Lcaron " -> 'Ľ',
"lcedil " -> 'ļ',
"Lcedil " -> 'Ļ',
"lceil" -> '⌈',
"lcub" -> '{',
"lcy " -> 'л',
"Lcy " -> 'Л',
"ldca " -> '⤶',
"ldquo" -> '“',
"ldquor " -> '„',
"ldrdhar " -> '⥧',
"ldrushar " -> '⥋',
"ldsh " -> '↲',
"le" -> '≤',
"lE" -> '≦',
"LeftAngleBracket" -> '⟨',
"leftarrow" -> '←',
"Leftarrow" -> '⇐',
"LeftArrow" -> '←',
"LeftArrowBar " -> '⇤',
"LeftArrowRightArrow " -> '⇆',
"leftarrowtail " -> '↢',
"LeftCeiling " -> '⌈',
"LeftDoubleBracket " -> '⟦',
"LeftDownTeeVector " -> '⥡',
"LeftDownVector" -> '⇃',
"LeftDownVectorBar " -> '⥙',
"LeftFloor " -> '⌊',
"leftharpoondown" -> '↽',
"leftharpoonup " -> '↼',
"leftleftarrows " -> '⇇',
"leftrightarrow" -> '↔',
"Leftrightarrow" -> '⇔',
"LeftRightArrow " -> '↔',
"leftrightarrows" -> '⇆',
"leftrightharpoons " -> '⇋',
"leftrightsquigarrow " -> '↭',
"LeftRightVector " -> '⥎',
"LeftTee " -> '⊣',
"LeftTeeArrow" -> '↤',
"LeftTeeVector " -> '⥚',
"leftthreetimes " -> '⋋',
"LeftTriangle " -> '⊲',
"LeftTriangleBar " -> '⧏',
"LeftTriangleEqual " -> '⊴',
"LeftUpDownVector " -> '⥑',
"LeftUpTeeVector " -> '⥠',
"LeftUpVector " -> '↿',
"LeftUpVectorBar " -> '⥘',
"LeftVector" -> '↼',
"LeftVectorBar " -> '⥒',
"leg" -> '⋚',
"leq " -> '≤',
"leqq " -> '≦',
"lessdot " -> '⋖',
"lesseqgtr " -> '⋚',
"LessEqual" -> '≤',
"LessEqualGreater" -> '⋚',
"LessFullEqual" -> '≦',
"LessGreater " -> '≶',
"lessgtr" -> '≶',
"lesssim " -> '≲',
"LessTilde" -> '≲',
"lfisht " -> '⥼',
"lfloor" -> '⌊',
"lg" -> '≶',
"lHar " -> '⥢',
"lhard" -> '↽',
"lharu" -> '↼',
"lharul " -> '⥪',
"lhblk " -> '▄',
"ljcy " -> 'љ',
"LJcy " -> 'Љ',
"ll " -> '≪',
"Ll" -> '⋘',
"llarr" -> '⇇',
"Lleftarrow " -> '⇚',
"llhard " -> '⥫',
"lll " -> '⋘',
"lltri " -> '◺',
"lmidot " -> 'ŀ',
"Lmidot " -> 'Ŀ',
"lmoust" -> '⎰',
"lmoustache " -> '⎰',
"lnE" -> '≨',
"lneqq " -> '≨',
"lnsim " -> '⋦',
"loang " -> '⟬',
"loarr " -> '⇽',
"lobrk" -> '⟦',
"longleftarrow" -> '⟵',
"Longleftarrow" -> '⟸',
"LongLeftArrow " -> '⟵',
"longleftrightarrow" -> '⟷',
"Longleftrightarrow" -> '⟺',
"LongLeftRightArrow " -> '⟷',
"longmapsto " -> '⟼',
"longrightarrow" -> '⟶',
"Longrightarrow" -> '⟹',
"LongRightArrow " -> '⟶',
"looparrowleft " -> '↫',
"looparrowright " -> '↬',
"lopar " -> '⦅',
"loplus " -> '⨭',
"lotimes " -> '⨴',
"lowast " -> '∗',
"lowbar " -> '_',
"LowerLeftArrow " -> '↙',
"LowerRightArrow " -> '↘',
"loz" -> '◊',
"lozenge " -> '◊',
"lozf" -> '⧫',
"lpar " -> '(',
"lparlt " -> '⦓',
"lrarr" -> '⇆',
"lrhar" -> '⇋',
"lrhard " -> '⥫',
"lrtri " -> '⊿',
"lsaquo " -> '‹',
"Lscr" -> 'ℒ',
"lsh" -> '↰',
"Lsh " -> '↰',
"lsim" -> '≲',
"lsqb" -> '[',
"lsquo" -> '‘',
"lsquor " -> '‚',
"lstrok " -> 'ł',
"Lstrok " -> 'Ł',
"lt" -> '<',
"Lt" -> '≪',
"LT " -> '<',
"ltdot" -> '⋖',
"lthree" -> '⋋',
"ltimes " -> '⋉',
"ltlarr " -> '⥶',
"ltri" -> '◃',
"ltrie" -> '⊴',
"ltrif" -> '◂',
"ltrPar " -> '⦖',
"lurdshar " -> '⥊',
"luruhar " -> '⥦',
"macr" -> '¯',
"male " -> '♂',
"malt" -> '✠',
"Maltese " -> '✠',
"map" -> '↦',
"Map " -> '⤅',
"mapsto " -> '↦',
"mapstodown " -> '↧',
"mapstoleft " -> '↤',
"mapstoup " -> '↥',
"marker " -> '▮',
"mcomma " -> '⨩',
"mcy " -> 'м',
"Mcy " -> 'М',
"mdash " -> '—',
"mDDot " -> '∺',
"measuredangle " -> '∡',
"MediumSpace " -> ' ',
"Mellintrf " -> 'ℳ',
"mho " -> '℧',
"micro " -> 'µ',
"mid" -> '∣',
"midast " -> '*',
"middot" -> '·',
"minus " -> '−',
"minusb" -> '⊟',
"minusd" -> '∸',
"minusdu " -> '⨪',
"MinusPlus " -> '∓',
"mldr " -> '…',
"mmap" -> '⊸',
"mnplus" -> '∓',
"models " -> '⊧',
"mp" -> '∓',
"Mscr" -> 'ℳ',
"mstpos " -> '∾',
"multimap " -> '⊸',
"mu " -> 'μ',
"Mu " -> 'Μ',
"nabla" -> '∇',
"nacute " -> 'ń',
"Nacute " -> 'Ń',
"nap" -> '≉',
"napos " -> 'ʼn',
"napprox " -> '≉',
"natur" -> '♮',
"natural " -> '♮',
"naturals " -> 'ℕ',
"nbsp" -> ' ',
"ncap " -> '⩃',
"ncaron " -> 'ň',
"Ncaron " -> 'Ň',
"ncedil " -> 'ņ',
"Ncedil " -> 'Ņ',
"ncong" -> '≇',
"ncup " -> '⩂',
"ncy " -> 'н',
"Ncy " -> 'Н',
"ndash " -> '–',
"ne" -> '≠',
"nearhk " -> '⤤',
"nearr" -> '↗',
"neArr " -> '⇗',
"nearrow " -> '↗',
"nequiv" -> '≢',
"nesear" -> '⤨',
"NestedGreaterGreater" -> '≫',
"NestedLessLess" -> '≪',
"nexist" -> '∄',
"nexists " -> '∄',
"nge" -> '≱',
"ngeq " -> '≱',
"ngsim" -> '≵',
"ngt" -> '≯',
"ngtr " -> '≯',
"nhArr" -> '⇍',
"nharrow" -> '↮',
"ni" -> '∋',
"nis " -> '⋼',
"nisd " -> '⋺',
"niv" -> '∋',
"njcy " -> 'њ',
"NJcy " -> 'Њ',
"nlarr" -> '↚',
"nlArr" -> '⇍',
"nldr " -> '‥',
"nle" -> '≰',
"nleftarrow " -> '↚',
"nLeftArrow " -> '⇍',
"nleftrightarrow " -> '↮',
"nLeftrightarrow " -> '⇍',
"nleq " -> '≰',
"nless " -> '≮',
"nlsim" -> '≴',
"nlt" -> '≮',
"nltri" -> '⋪',
"nltrie" -> '⋬',
"nmid" -> '∤',
"Nopf" -> 'ℕ',
"not " -> '¬',
"NotCongruent " -> '≢',
"NotCupCap " -> '≭',
"NotDoubleVerticalBar" -> '∦',
"NotElement" -> '∉',
"NotEqual " -> '≠',
"NotExists" -> '∄',
"NotGreater" -> '≯',
"NotGreaterEqual" -> '≱',
"NotGreaterTilde " -> '≵',
"notin" -> '∉',
"notinva " -> '∉',
"notinvb " -> '⋷',
"notinvc " -> '⋶',
"NotLeftTriangle " -> '⋪',
"NotLeftTriangleEqual " -> '⋬',
"NotLess" -> '≮',
"NotLessEqual" -> '≰',
"NotLessGreater " -> '≸',
"NotLessTilde " -> '≴',
"notni" -> '∌',
"notniva" -> '∌',
"notnivb " -> '⋾',
"notnivc " -> '⋽',
"NotPrecedes " -> '⊀',
"NotPrecedesSlantEqual " -> '⋠',
"NotReverseElement " -> '∌',
"NotRightTriangle " -> '⋫',
"NotRightTriangleEqual " -> '⋭',
"NotSquareSubsetEqual " -> '⋢',
"NotSquareSupersetEqual " -> '⋣',
"NotSubsetEqual " -> '⊈',
"NotSucceeds " -> '⊁',
"NotSucceedsSlantEqual " -> '⋡',
"NotSupersetEqual " -> '⊉',
"NotTilde " -> '≁',
"NotTildeEqual " -> '≄',
"NotTildeFullEqual " -> '≇',
"NotTildeTilde" -> '≉',
"NotVerticalBar" -> '∤',
"npar" -> '∦',
"nparallel" -> '∦',
"npolint " -> '⨔',
"npr" -> '⊀',
"nprcue" -> '⋠',
"nprec" -> '⊀',
"nrarr" -> '↛',
"nrArr" -> '⇍',
"nrightarrow " -> '↛',
"nRightArrow " -> '⇍',
"nrtri" -> '⋫',
"nrtrie" -> '⋭',
"nsc" -> '⊁',
"nsccue" -> '⋡',
"nshortmid " -> '∤',
"nshortparallel " -> '∦',
"nsim" -> '≁',
"nsime" -> '≄',
"nsimeq" -> '≄',
"nsmid" -> '∤',
"nspar" -> '∦',
"nsqsube" -> '⋢',
"nsqsupe" -> '⋣',
"nsub " -> '⊄',
"nsube" -> '⊈',
"nsubseteq" -> '⊈',
"nsucc" -> '⊁',
"nsup " -> '⊅',
"nsupe" -> '⊉',
"nsupseteq" -> '⊉',
"ntgl" -> '≹',
"ntilde " -> 'ñ',
"Ntilde " -> 'Ñ',
"ntlg" -> '≸',
"ntriangleleft" -> '⋪',
"ntrianglelefteq" -> '⋬',
"ntriangleright" -> '⋫',
"ntrianglerighteq" -> '⋭',
"num " -> '#',
"numero " -> '№',
"numsp " -> ' ',
"nu " -> 'ν',
"Nu " -> 'Ν',
"nvdash " -> '⊬',
"nvDash " -> '⊭',
"nVdash " -> '⊮',
"nVDash " -> '⊯',
"nvHArr " -> '⤄',
"nvinfin " -> '⧞',
"nvlArr " -> '⤂',
"nvrArr " -> '⤃',
"nwarhk " -> '⤣',
"nwarr" -> '↖',
"nwArr " -> '⇖',
"nwarrow " -> '↖',
"nwnear " -> '⤧',
"oacute " -> 'ó',
"Oacute " -> 'Ó',
"oast" -> '⊛',
"ocirc " -> 'ô',
"Ocirc " -> 'Ô',
"ocr" -> '⊚',
"ocy " -> 'о',
"Ocy " -> 'О',
"odash" -> '⊝',
"odblac " -> 'ő',
"Odblac " -> 'Ő',
"odiv " -> '⨸',
"odot" -> '⊙',
"odsold " -> '⦼',
"oelig " -> 'œ',
"OElig " -> 'Œ',
"ofcir " -> '⦿',
"ogon " -> '˛',
"ograve " -> 'ò',
"Ograve " -> 'Ò',
"ogt " -> '⧁',
"ohbar " -> '⦵',
"ohm " -> 'Ω',
"oint" -> '∮',
"olarr" -> '↺',
"olcir " -> '⦾',
"olcross " -> '⦻',
"oline " -> '‾',
"olt " -> '⧀',
"omacr " -> 'ō',
"Omacr " -> 'Ō',
"omega " -> 'ω',
"Omega " -> 'Ω',
"omicron " -> 'ο',
"Omicron " -> 'Ο',
"omid " -> '⦶',
"ominus" -> '⊖',
"opar " -> '⦶',
"OpenCurlyDoubleQuote " -> '“',
"OpenCurlyQuote " -> '‘',
"operp " -> '⦹',
"oplus" -> '⊕',
"or" -> '∨',
"Or " -> '⩔',
"orarr" -> '↻',
"ord " -> '⩝',
"order" -> 'ℴ',
"orderof" -> 'ℴ',
"ordf " -> 'ª',
"ordm " -> 'º',
"origof " -> '⊶',
"oror " -> '⩕',
"orslope " -> '⩗',
"orv " -> '⩛',
"oS" -> 'Ⓢ',
"oscr " -> 'ℴ',
"oslash " -> 'ø',
"Oslash " -> 'Ø',
"osol " -> '⊘',
"otilde " -> 'õ',
"Otilde " -> 'Õ',
"otimes" -> '⊗',
"Otimes " -> '⨷',
"otimesas " -> '⨶',
"ouml " -> 'ö',
"Ouml " -> 'Ö',
"ovbar " -> '⌽',
"OverBar" -> '¯',
"OverBrace " -> '⏞',
"OverBracket " -> '⎴',
"OverParenthesis " -> '⏜',
"par" -> '∥',
"para " -> '¶',
"parallel" -> '∥',
"part" -> '∂',
"PartialD " -> '∂',
"pcy " -> 'п',
"Pcy " -> 'П',
"percnt " -> '%',
"period " -> '.',
"permil " -> '‰',
"perp" -> '⊥',
"pertenk " -> '‱',
"phi " -> 'φ',
"Phi " -> 'Φ',
"phmmat" -> 'ℳ',
"phone " -> '☎',
"pitchfork " -> '⋔',
"piv" -> 'ϖ',
"pi " -> 'π',
"Pi " -> 'Π',
"planck" -> 'ℏ',
"planckh " -> 'ℎ',
"plankv" -> 'ℏ',
"plusacir " -> '⨣',
"plusb" -> '⊞',
"pluscir " -> '⨢',
"plusdo" -> '∔',
"plusdu " -> '⨥',
"PlusMinus " -> '±',
"plusmn" -> '±',
"plussim " -> '⨦',
"plustwo " -> '⨧',
"pm" -> '±',
"PoincarePlane " -> 'ℌ',
"pointint " -> '⨕',
"Popf" -> 'ℙ',
"pound " -> '£',
"pr" -> '≺',
"prcue" -> '≼',
"prec " -> '≺',
"preccurlyeq " -> '≼',
"Precedes" -> '≺',
"PrecedesSlantEqual" -> '≼',
"PrecedesTilde " -> '≾',
"precnsim " -> '⋨',
"precsim" -> '≾',
"prime " -> '′',
"Prime " -> '″',
"primes " -> 'ℙ',
"prnsim" -> '⋨',
"prod" -> '∏',
"Product " -> '∏',
"profalar " -> '⌮',
"profline " -> '⌒',
"profsurf " -> '⌓',
"prop" -> '∝',
"Proportion " -> '∷',
"Proportional" -> '∝',
"propto" -> '∝',
"prsim" -> '≾',
"prurel " -> '⊰',
"psi " -> 'ψ',
"Psi " -> 'Ψ',
"puncsp " -> ' ',
"qint" -> '⨌',
"Qopf " -> 'ℚ',
"qprime " -> '⁗',
"quaternions" -> 'ℍ',
"quatint " -> '⨖',
"quest " -> '?',
"questeq " -> '≟',
"quot" -> '"',
"QUOT " -> '"',
"rAarr" -> '⇛',
"race " -> '⧚',
"racute " -> 'ŕ',
"Racute " -> 'Ŕ',
"radic" -> '√',
"raemptyv " -> '⦳',
"rang" -> '⟩',
"Rang " -> '⟫',
"rangd " -> '⦒',
"range " -> '⦥',
"rangle " -> '⟩',
"raquo " -> '»',
"rarr" -> '→',
"rArr" -> '⇒',
"Rarr" -> '↠',
"rarrap " -> '⥵',
"rarrb" -> '⇤',
"rarrbfs " -> '⤠',
"rarrc " -> '⤳',
"rarrfs " -> '⤞',
"rarrhk" -> '↪',
"rarrlp" -> '↬',
"rarrpl " -> '⥅',
"rarrsim " -> '⥴',
"rarrtl" -> '↣',
"Rarrtl " -> '⤖',
"rarrw" -> '↝',
"ratail " -> '⤚',
"rAtail " -> '⤜',
"ratio " -> '∶',
"rationals" -> 'ℚ',
"rbarr" -> '⤍',
"rBarr" -> '⤏',
"RBarr" -> '⤐',
"rbbrk " -> '❳',
"rbrace " -> '}',
"rbrack " -> ']',
"rbrke " -> '⦌',
"rbrksld " -> '⦎',
"rbrkslu " -> '⦐',
"rcaron " -> 'ř',
"Rcaron " -> 'Ř',
"rcedil " -> 'ŗ',
"Rcedil " -> 'Ŗ',
"rceil" -> '⌉',
"rcub" -> '}',
"rcy " -> 'р',
"Rcy " -> 'Р',
"rdca " -> '⤷',
"rdldhar " -> '⥩',
"rdquo" -> '”',
"rdquor" -> '”',
"rdsh " -> '↳',
"Re" -> 'ℜ',
"real" -> 'ℜ',
"realine " -> 'ℛ',
"realpart" -> 'ℜ',
"reals" -> 'ℝ',
"rect " -> '▭',
"reg" -> '®',
"REG " -> '®',
"ReverseElement" -> '∋',
"ReverseEquilibrium" -> '⇋',
"ReverseUpEquilibrium " -> '⥯',
"rfisht " -> '⥽',
"rfloor" -> '⌋',
"Rfr " -> 'ℜ',
"rHar " -> '⥤',
"rhard" -> '⇁',
"rharu" -> '⇀',
"rharul " -> '⥬',
"rhov" -> 'ϱ',
"rho " -> 'ρ',
"Rho " -> 'Ρ',
"RightAngleBracket" -> '⟩',
"rightarrow" -> '→',
"Rightarrow" -> '⇒',
"RightArrow" -> '→',
"RightArrowBar " -> '⇤',
"RightArrowLeftArrow " -> '⇄',
"rightarrowtail " -> '↣',
"RightCeiling " -> '⌉',
"RightDoubleBracket " -> '⟧',
"RightDownTeeVector " -> '⥝',
"RightDownVector" -> '⇂',
"RightDownVectorBar " -> '⥕',
"RightFloor " -> '⌋',
"rightharpoondown" -> '⇁',
"rightharpoonup " -> '⇀',
"rightleftarrows" -> '⇄',
"rightleftharpoons " -> '⇌',
"rightrightarrows " -> '⇉',
"rightsquigarrow " -> '↝',
"RightTee " -> '⊢',
"RightTeeArrow" -> '↦',
"RightTeeVector " -> '⥛',
"rightthreetimes " -> '⋌',
"RightTriangle; " -> '⊳',
"RightTriangleBar " -> '⧐',
"RightTriangleEqual " -> '⊵',
"RightUpDownVector " -> '⥏',
"RightUpTeeVector " -> '⥜',
"RightUpVector " -> '↾',
"RightUpVectorBar " -> '⥔',
"RightVector" -> '⇀',
"RightVectorBar " -> '⥓',
"ring " -> '˚',
"risingdotseq " -> '≓',
"rlarr" -> '⇄',
"rlhar" -> '⇌',
"rmoust" -> '⎱',
"rmoustache " -> '⎱',
"roang " -> '⟭',
"roarr " -> '⇾',
"robrk" -> '⟧',
"ropar " -> '⦆',
"Ropf " -> 'ℝ',
"roplus " -> '⨮',
"rotimes " -> '⨵',
"RoundImplies " -> '⥰',
"rpar " -> ')',
"rpargt " -> '⦔',
"rppolint " -> '⨒',
"rrarr" -> '⇉',
"Rrightarrow " -> '⇛',
"rsaquo " -> '›',
"Rscr" -> 'ℛ',
"rsh" -> '↱',
"Rsh " -> '↱',
"rsqb" -> ']',
"rsquo" -> '’',
"rsquor" -> '’',
"rthree" -> '⋌',
"rtimes " -> '⋊',
"rtri" -> '▹',
"rtrie" -> '⊵',
"rtrif" -> '▸',
"rtriltri " -> '⧎',
"RuleDelayed " -> '⧴',
"ruluhar " -> '⥨',
"rx " -> '℞',
"sacute " -> 'ś',
"Sacute " -> 'Ś',
"sbquo" -> '‚',
"sc" -> '≻',
"scaron " -> 'š',
"Scaron " -> 'Š',
"sccue" -> '≽',
"scedil " -> 'ş',
"Scedil " -> 'Ş',
"scirc " -> 'ŝ',
"Scirc " -> 'Ŝ',
"scnsim" -> '⋩',
"scpolint " -> '⨓',
"scy " -> 'с',
"Scy " -> 'С',
"sdot " -> '⋅',
"sdotb" -> '⊡',
"searhk" -> '⤥',
"searr" -> '↘',
"seArr " -> '⇘',
"searrow" -> '↘',
"sect " -> '§',
"semi " -> ';',
"seswar" -> '⤩',
"setminus" -> '∖',
"setmn" -> '∖',
"sext " -> '✶',
"sfrown " -> '⌢',
"sharp " -> '♯',
"shchcy " -> 'щ',
"SHCHcy " -> 'Щ',
"shcy " -> 'ш',
"SHcy " -> 'Ш',
"ShortDownArrow " -> '↓',
"ShortLeftArrow " -> '←',
"shortmid " -> '∣',
"shortparallel " -> '∥',
"ShortRightArrow " -> '→',
"ShortUpArrow " -> '↑',
"sigmaf" -> 'ς',
"sigmav" -> 'ς',
"sigma " -> 'σ',
"Sigma " -> 'Σ',
"sim" -> '∼',
"sime" -> '≃',
"simeq " -> '≃',
"simne " -> '≆',
"simplus " -> '⨤',
"simrarr " -> '⥲',
"slarr" -> '←',
"SmallCircle " -> '∘',
"smallsetminus " -> '∖',
"smashp " -> '⨳',
"smeparsl " -> '⧤',
"smid" -> '∣',
"smile" -> '⌣',
"softcy " -> 'ь',
"SOFTcy " -> 'Ь',
"sol " -> '/',
"solb " -> '⧄',
"solbar " -> '⌿',
"spades" -> '♠',
"spadesuit " -> '♠',
"spar" -> '∥',
"sqcap" -> '⊓',
"sqcup" -> '⊔',
"Sqrt " -> '√',
"sqsub" -> '⊏',
"sqsube" -> '⊑',
"sqsubset " -> '⊏',
"sqsubseteq " -> '⊑',
"sqsup" -> '⊐',
"sqsupe" -> '⊒',
"sqsupset " -> '⊐',
"sqsupseteq " -> '⊒',
"squ" -> '□',
"square" -> '□',
"Square " -> '□',
"squaref" -> '▪',
"SquareIntersection " -> '⊓',
"SquareSubset" -> '⊏',
"SquareSubsetEqual" -> '⊑',
"SquareSuperset" -> '⊐',
"SquareSupersetEqual" -> '⊒',
"SquareUnion " -> '⊔',
"squf" -> '▪',
"srarr" -> '→',
"ssetmn" -> '∖',
"ssmile " -> '⌣',
"sstarf" -> '⋆',
"star " -> '☆',
"Star " -> '⋆',
"starf" -> '★',
"straightepsilon " -> 'ϵ',
"straightphi " -> 'ϕ',
"strns " -> '¯',
"sub" -> '⊂',
"Sub" -> '⋐',
"sube" -> '⊆',
"subne" -> '⊊',
"subrarr " -> '⥹',
"subset " -> '⊂',
"Subset " -> '⋐',
"subseteq " -> '⊆',
"SubsetEqual" -> '⊆',
"subsetneq " -> '⊊',
"succ " -> '≻',
"succcurlyeq " -> '≽',
"Succeeds" -> '≻',
"SucceedsSlantEqual" -> '≽',
"succnsim " -> '⋩',
"SuchThat " -> '∋',
"sum" -> '∑',
"Sum " -> '∑',
"sung " -> '♪',
"sup" -> '⊃',
"Sup" -> '⋑',
"sup1 " -> '¹',
"sup2 " -> '²',
"sup3 " -> '³',
"supe" -> '⊇',
"Superset " -> '⊃',
"SupersetEqual " -> '⊇',
"supne" -> '⊋',
"suprarr " -> '⥻',
"supset" -> '⊃',
"Supset " -> '⋑',
"supseteq" -> '⊇',
"supsetneq " -> '⊋',
"swarhk" -> '⤦',
"swarr" -> '↙',
"swArr " -> '⇙',
"swarrow" -> '↙',
"swnwar " -> '⤪',
"szlig " -> 'ß',
"target " -> '⌖',
"tau " -> 'τ',
"Tau " -> 'Τ',
"tbrk" -> '⎴',
"tcaron " -> 'ť',
"Tcaron " -> 'Ť',
"tcedil " -> 'ţ',
"Tcedil " -> 'Ţ',
"tcy " -> 'т',
"Tcy " -> 'Т',
"tdot" -> '⃛',
"telrec " -> '⌕',
"there4" -> '∴',
"therefore" -> '∴',
"Therefore " -> '∴',
"thetasym" -> 'ϑ',
"thetav" -> 'ϑ',
"theta " -> 'θ',
"Theta " -> 'Θ',
"thickapprox " -> '≈',
"thicksim " -> '∼',
"thinsp" -> ' ',
"ThinSpace " -> ' ',
"thkap" -> '≈',
"thksim" -> '∼',
"thorn " -> 'þ',
"THORN " -> 'Þ',
"tilde" -> '˜',
"Tilde" -> '∼',
"TildeEqual" -> '≃',
"TildeFullEqual " -> '≅',
"TildeTilde" -> '≈',
"times " -> '×',
"timesb" -> '⊠',
"timesbar " -> '⨱',
"timesd " -> '⨰',
"tint" -> '∭',
"toea " -> '⤨',
"top" -> '⊤',
"topbot " -> '⌶',
"tosa " -> '⤩',
"tprime " -> '‴',
"trade " -> '™',
"triangle " -> '▵',
"triangledown " -> '▿',
"triangleleft " -> '◃',
"trianglelefteq" -> '⊴',
"triangleq " -> '≜',
"triangleright " -> '▹',
"trianglerighteq" -> '⊵',
"tridot " -> '◬',
"trie" -> '≜',
"triminus " -> '⨺',
"TripleDot " -> '⃛',
"triplus " -> '⨹',
"trisb " -> '⧍',
"tritime " -> '⨻',
"trpezium " -> '⏢',
"tscy " -> 'ц',
"TScy " -> 'Ц',
"tshcy " -> 'ћ',
"TSHcy " -> 'Ћ',
"tstrok " -> 'ŧ',
"Tstrok " -> 'Ŧ',
"twixt" -> '≬',
"twoheadleftarrow " -> '↞',
"twoheadrightarrow " -> '↠',
"uacute " -> 'ú',
"Uacute " -> 'Ú',
"uarr" -> '↑',
"uArr" -> '⇑',
"Uarr " -> '↟',
"Uarrocir " -> '⥉',
"ubrcy " -> 'ў',
"Ubrcy " -> 'Ў',
"ubreve " -> 'ŭ',
"Ubreve " -> 'Ŭ',
"ucirc " -> 'û',
"Ucirc " -> 'Û',
"ucy " -> 'у',
"Ucy " -> 'У',
"udarr" -> '⇅',
"udblac " -> 'ű',
"Udblac " -> 'Ű',
"udhar" -> '⥮',
"ufisht " -> '⥾',
"ugrave " -> 'ù',
"Ugrave " -> 'Ù',
"uHar " -> '⥣',
"uharl" -> '↿',
"uharr" -> '↾',
"uhblk " -> '▀',
"ulcorn" -> '⌜',
"ulcorner " -> '⌜',
"ulcrop " -> '⌏',
"ultri " -> '◸',
"umacr " -> 'ū',
"Umacr " -> 'Ū',
"uml " -> '¨',
"UnderBar " -> '̲',
"UnderBrace " -> '⏟',
"UnderBracket " -> '⎵',
"UnderParenthesis " -> '⏝',
"Union" -> '⋃',
"UnionPlus " -> '⊎',
"uogon " -> 'ų',
"Uogon " -> 'Ų',
"uparrow" -> '↑',
"Uparrow" -> '⇑',
"UpArrow" -> '↑',
"UpArrowBar " -> '⤒',
"UpArrowDownArrow " -> '⇅',
"updownarrow" -> '↕',
"Updownarrow" -> '⇕',
"UpDownArrow " -> '↕',
"UpEquilibrium " -> '⥮',
"upharpoonleft" -> '↿',
"upharpoonright" -> '↾',
"uplus" -> '⊎',
"UpperLeftArrow" -> '↖',
"UpperRightArrow" -> '↗',
"upsih" -> 'ϒ',
"upsilon" -> 'υ',
"Upsilon " -> 'Υ',
"upsi " -> 'υ',
"Upsi " -> 'ϒ',
"UpTee " -> '⊥',
"UpTeeArrow" -> '↥',
"upuparrows " -> '⇈',
"urcorn" -> '⌝',
"urcorner " -> '⌝',
"urcrop " -> '⌎',
"uring " -> 'ů',
"Uring " -> 'Ů',
"urtri " -> '◹',
"utdot " -> '⋰',
"utilde " -> 'ũ',
"Utilde " -> 'Ũ',
"utri" -> '▵',
"utrif" -> '▴',
"uuarr" -> '⇈',
"uuml " -> 'ü',
"Uuml " -> 'Ü',
"uwangle " -> '⦧',
"vangrt " -> '⦜',
"varepsilon " -> 'ε',
"varkappa " -> 'ϰ',
"varnothing " -> '∅',
"varpi " -> 'ϖ',
"varpropto " -> '∝',
"varr" -> '↕',
"vArr" -> '⇕',
"varrho " -> 'ϱ',
"varsigma " -> 'ς',
"vartheta " -> 'ϑ',
"vartriangleleft" -> '⊲',
"vartriangleright" -> '⊳',
"vcy " -> 'в',
"Vcy " -> 'В',
"vdash" -> '⊢',
"vDash" -> '⊨',
"Vdash " -> '⊩',
"VDash " -> '⊫',
"vee " -> '∨',
"Vee" -> '⋁',
"veebar " -> '⊻',
"veeeq " -> '≚',
"vellip " -> '⋮',
"verbar" -> '|',
"Verbar" -> '‖',
"vert" -> '|',
"Vert " -> '‖',
"VerticalBar" -> '∣',
"VerticalLine " -> '|',
"VerticalSeparator " -> '❘',
"VerticalTilde" -> '≀',
"VeryThinSpace " -> ' ',
"vltri" -> '⊲',
"vprop" -> '∝',
"vrtri" -> '⊳',
"Vvdash " -> '⊪',
"vzigzag " -> '⦚',
"wcirc " -> 'ŵ',
"Wcirc " -> 'Ŵ',
"wedbar " -> '⩟',
"wedge " -> '∧',
"Wedge" -> '⋀',
"wedgeq " -> '≙',
"weierp" -> '℘',
"wp " -> '℘',
"wr " -> '≀',
"wreath" -> '≀',
"xcap" -> '⋂',
"xcirc" -> '◯',
"xcup" -> '⋃',
"xdtri" -> '▽',
"xharr" -> '⟷',
"xhArr" -> '⟺',
"xi " -> 'ξ',
"Xi " -> 'Ξ',
"xlarr" -> '⟵',
"xlArr" -> '⟸',
"xmap" -> '⟼',
"xnis " -> '⋻',
"xodot" -> '⨀',
"xoplus" -> '⨁',
"xotime" -> '⨂',
"xrarr" -> '⟶',
"xrArr" -> '⟹',
"xsqcup" -> '⨆',
"xuplus" -> '⨄',
"xutri" -> '△',
"xvee" -> '⋁',
"xwedge" -> '⋀',
"yacute " -> 'ý',
"Yacute " -> 'Ý',
"yacy " -> 'я',
"YAcy " -> 'Я',
"ycirc " -> 'ŷ',
"Ycirc " -> 'Ŷ',
"ycy " -> 'ы',
"Ycy " -> 'Ы',
"yen " -> '¥',
"yicy " -> 'ї',
"YIcy " -> 'Ї',
"yucy " -> 'ю',
"YUcy " -> 'Ю',
"yuml " -> 'ÿ',
"Yuml " -> 'Ÿ',
"zacute " -> 'ź',
"Zacute " -> 'Ź',
"zcaron " -> 'ž',
"Zcaron " -> 'Ž',
"zcy " -> 'з',
"Zcy " -> 'З',
"zdot " -> 'ż',
"Zdot " -> 'Ż',
"zeetrf " -> 'ℨ',
"zeta " -> 'ζ',
"Zeta " -> 'Ζ',
"Zfr" -> 'ℨ',
"zhcy " -> 'ж',
"ZHcy " -> 'Ж',
"zigrarr " -> '⇝',
"Zopf " -> 'ℤ'
)
}
| lexml/lexml-parser-projeto-lei | src/main/scala/br/gov/lexml/parser/pl/util/Entities.scala | Scala | gpl-2.0 | 41,747 |
package com.pauldoo.euler.puzzle
import com.pauldoo.euler.common.Summations.sumOfSquares
import com.pauldoo.euler.common.Summations.sum
object Puzzle6 extends Puzzle {
def answer() = {
def square(n: BigInt) = n * n;
square(sum(1 to 100)) - sumOfSquares(100);
}
} | pauldoo/projecteuler | src/com/pauldoo/euler/puzzle/Puzzle6.scala | Scala | isc | 275 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.codegen.calls
import org.apache.flink.table.api.DataTypes
import org.apache.flink.table.dataformat.DataFormatConverters
import org.apache.flink.table.planner.codegen.CodeGenUtils._
import org.apache.flink.table.planner.codegen.GenerateUtils.{generateCallIfArgsNotNull, generateCallIfArgsNullable, generateStringResultCallIfArgsNotNull}
import org.apache.flink.table.planner.codegen.calls.ScalarOperatorGens._
import org.apache.flink.table.planner.codegen.{CodeGeneratorContext, GeneratedExpression}
import org.apache.flink.table.planner.functions.sql.FlinkSqlOperatorTable._
import org.apache.flink.table.runtime.functions.SqlFunctionUtils
import org.apache.flink.table.runtime.typeutils.TypeCheckUtils.{isCharacterString, isTimestamp, isTimestampWithLocalZone}
import org.apache.flink.table.types.logical.{BooleanType, IntType, LogicalType, MapType, VarBinaryType, VarCharType}
import org.apache.calcite.runtime.SqlFunctions
import org.apache.calcite.sql.SqlOperator
import org.apache.calcite.sql.fun.SqlTrimFunction.Flag.{BOTH, LEADING, TRAILING}
import java.lang.reflect.Method
/**
* Code generator for call with string parameters or return value.
* 1.Some specific optimization of BinaryString.
* 2.Deal with conversions between Java String and internal String.
*
* <p>TODO Need to rewrite most of the methods here, calculated directly on the BinaryString
* instead of convert BinaryString to String.
*/
object StringCallGen {
def generateCallExpression(
ctx: CodeGeneratorContext,
operator: SqlOperator,
operands: Seq[GeneratedExpression],
returnType: LogicalType): Option[GeneratedExpression] = {
def methodGen(method: Method): GeneratedExpression = {
new MethodCallGen(method).generate(ctx, operands, returnType)
}
val generator = operator match {
case LIKE =>
new LikeCallGen().generate(ctx, operands, new BooleanType())
case NOT_LIKE =>
generateNot(ctx, new LikeCallGen().generate(ctx, operands, new BooleanType()))
case SUBSTR | SUBSTRING => generateSubString(ctx, operands)
case LEFT => generateLeft(ctx, operands.head, operands(1))
case RIGHT => generateRight(ctx, operands.head, operands(1))
case CHAR_LENGTH | CHARACTER_LENGTH => generateCharLength(ctx, operands)
case SIMILAR_TO => generateSimilarTo(ctx, operands)
case NOT_SIMILAR_TO => generateNot(ctx, generateSimilarTo(ctx, operands))
case REGEXP_EXTRACT => generateRegexpExtract(ctx, operands)
case REGEXP_REPLACE => generateRegexpReplace(ctx, operands)
case IS_DECIMAL => generateIsDecimal(ctx, operands)
case IS_DIGIT => generateIsDigit(ctx, operands)
case IS_ALPHA => generateIsAlpha(ctx, operands)
case UPPER => generateUpper(ctx, operands)
case LOWER => generateLower(ctx, operands)
case INITCAP => generateInitcap(ctx, operands)
case POSITION => generatePosition(ctx, operands)
case LOCATE => generateLocate(ctx, operands)
case OVERLAY => generateOverlay(ctx, operands)
case LPAD => generateLpad(ctx, operands)
case RPAD => generateRpad(ctx, operands)
case REPEAT => generateRepeat(ctx, operands)
case REVERSE => generateReverse(ctx, operands)
case REPLACE => generateReplace(ctx, operands)
case SPLIT_INDEX => generateSplitIndex(ctx, operands)
case HASH_CODE if isCharacterString(operands.head.resultType) =>
generateHashCode(ctx, operands)
case MD5 => generateMd5(ctx, operands)
case SHA1 => generateSha1(ctx, operands)
case SHA224 => generateSha224(ctx, operands)
case SHA256 => generateSha256(ctx, operands)
case SHA384 => generateSha384(ctx, operands)
case SHA512 => generateSha512(ctx, operands)
case SHA2 => generateSha2(ctx, operands)
case PARSE_URL => generateParserUrl(ctx, operands)
case FROM_BASE64 => generateFromBase64(ctx, operands)
case TO_BASE64 => generateToBase64(ctx, operands)
case CHR => generateChr(ctx, operands)
case REGEXP => generateRegExp(ctx, operands)
case BIN => generateBin(ctx, operands)
case CONCAT_FUNCTION =>
operands.foreach(requireCharacterString)
generateConcat(ctx, operands)
case CONCAT_WS =>
operands.foreach(requireCharacterString)
generateConcatWs(ctx, operands)
case STR_TO_MAP => generateStrToMap(ctx, operands)
case TRIM => generateTrim(ctx, operands)
case LTRIM => generateTrimLeft(ctx, operands)
case RTRIM => generateTrimRight(ctx, operands)
case CONCAT =>
val left = operands.head
val right = operands(1)
requireCharacterString(left)
generateArithmeticConcat(ctx, left, right)
case UUID => generateUuid(ctx, operands)
case ASCII => generateAscii(ctx, operands.head)
case ENCODE => generateEncode(ctx, operands.head, operands(1))
case DECODE => generateDecode(ctx, operands.head, operands(1))
case INSTR => generateInstr(ctx, operands)
case PRINT => new PrintCallGen().generate(ctx, operands, returnType)
case IF =>
requireBoolean(operands.head)
new IfCallGen().generate(ctx, operands, returnType)
// Date/Time & BinaryString Converting -- start
case TO_DATE if operands.size == 1 && isCharacterString(operands.head.resultType) =>
methodGen(BuiltInMethods.STRING_TO_DATE)
case TO_DATE if operands.size == 2 &&
isCharacterString(operands.head.resultType) &&
isCharacterString(operands(1).resultType) =>
methodGen(BuiltInMethods.STRING_TO_DATE_WITH_FORMAT)
case TO_TIMESTAMP if operands.size == 1 && isCharacterString(operands.head.resultType) =>
methodGen(BuiltInMethods.STRING_TO_TIMESTAMP)
case TO_TIMESTAMP if operands.size == 2 &&
isCharacterString(operands.head.resultType) &&
isCharacterString(operands(1).resultType) =>
methodGen(BuiltInMethods.STRING_TO_TIMESTAMP_WITH_FORMAT)
case UNIX_TIMESTAMP if operands.size == 1 && isCharacterString(operands.head.resultType) =>
methodGen(BuiltInMethods.UNIX_TIMESTAMP_STR)
case UNIX_TIMESTAMP if operands.size == 2 &&
isCharacterString(operands.head.resultType) &&
isCharacterString(operands(1).resultType) =>
methodGen(BuiltInMethods.UNIX_TIMESTAMP_FORMAT)
case DATE_FORMAT if operands.size == 2 &&
isTimestamp(operands.head.resultType) &&
isCharacterString(operands(1).resultType) =>
methodGen(BuiltInMethods.DATE_FORMAT_LONG_STRING)
case DATE_FORMAT if operands.size == 2 &&
isTimestampWithLocalZone(operands.head.resultType) &&
isCharacterString(operands(1).resultType) =>
methodGen(BuiltInMethods.DATE_FORMAT_LONG_STRING_TIME_ZONE)
case DATE_FORMAT if operands.size == 2 &&
isCharacterString(operands.head.resultType) &&
isCharacterString(operands(1).resultType) =>
methodGen(BuiltInMethods.DATE_FORMAT_STIRNG_STRING)
case CONVERT_TZ if operands.size == 3 &&
isCharacterString(operands.head.resultType) &&
isCharacterString(operands(1).resultType) &&
isCharacterString(operands(2).resultType) =>
methodGen(BuiltInMethods.CONVERT_TZ)
case _ => null
}
Option(generator)
}
private def toStringTerms(terms: Seq[String], operands: Seq[GeneratedExpression]) = {
terms.zipWithIndex.map { case (term, index) =>
if (isCharacterString(operands(index).resultType)) {
s"$term.toString()"
} else {
term
}
}.mkString(",")
}
private def safeToStringTerms(terms: Seq[String], operands: Seq[GeneratedExpression]) = {
terms.zipWithIndex.map { case (term, index) =>
if (isCharacterString(operands(index).resultType)) {
s"$STRING_UTIL.safeToString($term)"
} else {
term
}
}.mkString(",")
}
def generateConcat(
ctx: CodeGeneratorContext,
operands: Seq[GeneratedExpression]): GeneratedExpression = {
generateCallIfArgsNullable(ctx, new VarCharType(VarCharType.MAX_LENGTH), operands) {
terms => s"$STRING_UTIL.concat(${terms.mkString(", ")})"
}
}
def generateConcatWs(
ctx: CodeGeneratorContext,
operands: Seq[GeneratedExpression]): GeneratedExpression = {
generateCallIfArgsNullable(ctx, new VarCharType(VarCharType.MAX_LENGTH), operands) {
terms => s"$STRING_UTIL.concatWs(${terms.mkString(", ")})"
}
}
/**
* Optimization: use BinaryString equals instead of compare.
*/
def generateStringEquals(
ctx: CodeGeneratorContext,
left: GeneratedExpression,
right: GeneratedExpression): GeneratedExpression = {
generateCallIfArgsNotNull(ctx, new BooleanType(), Seq(left, right)) {
terms => s"(${terms.head}.equals(${terms(1)}))"
}
}
/**
* Optimization: use BinaryString equals instead of compare.
*/
def generateStringNotEquals(
ctx: CodeGeneratorContext,
left: GeneratedExpression,
right: GeneratedExpression): GeneratedExpression = {
generateCallIfArgsNotNull(ctx, new BooleanType(), Seq(left, right)) {
terms => s"!(${terms.head}.equals(${terms(1)}))"
}
}
def generateSubString(
ctx: CodeGeneratorContext,
operands: Seq[GeneratedExpression]): GeneratedExpression = {
generateCallIfArgsNotNull(ctx, new VarCharType(VarCharType.MAX_LENGTH), operands) {
terms => s"$STRING_UTIL.substringSQL(${terms.head}, ${terms.drop(1).mkString(", ")})"
}
}
def generateLeft(
ctx: CodeGeneratorContext,
str: GeneratedExpression,
len: GeneratedExpression): GeneratedExpression = {
generateCallIfArgsNotNull(ctx, new VarCharType(VarCharType.MAX_LENGTH), Seq(str, len)) {
val emptyString = s"$BINARY_STRING.EMPTY_UTF8"
terms =>
s"${terms(1)} <= 0 ? $emptyString :" +
s" $STRING_UTIL.substringSQL(${terms.head}, 1, ${terms(1)})"
}
}
def generateRight(
ctx: CodeGeneratorContext,
str: GeneratedExpression,
len: GeneratedExpression): GeneratedExpression = {
generateCallIfArgsNotNull(ctx, new VarCharType(VarCharType.MAX_LENGTH), Seq(str, len)) {
terms =>
s"""
|${terms(1)} <= 0 ?
| $BINARY_STRING.EMPTY_UTF8 :
| ${terms(1)} >= ${terms.head}.numChars() ?
| ${terms.head} :
| $STRING_UTIL.substringSQL(${terms.head}, -${terms(1)})
""".stripMargin
}
}
def generateCharLength(
ctx: CodeGeneratorContext,
operands: Seq[GeneratedExpression]): GeneratedExpression = {
generateCallIfArgsNotNull(ctx, new IntType(), operands) {
terms => s"${terms.head}.numChars()"
}
}
def generateSimilarTo(
ctx: CodeGeneratorContext,
operands: Seq[GeneratedExpression]): GeneratedExpression = {
val className = classOf[SqlFunctions].getCanonicalName
generateCallIfArgsNotNull(ctx, new BooleanType(), operands) {
terms => s"$className.similar(${toStringTerms(terms, operands)})"
}
}
def generateRegexpExtract(
ctx: CodeGeneratorContext,
operands: Seq[GeneratedExpression]): GeneratedExpression = {
val className = classOf[SqlFunctionUtils].getCanonicalName
generateStringResultCallIfArgsNotNull(ctx, operands) {
terms => s"$className.regexpExtract(${safeToStringTerms(terms, operands)})"
}
}
def generateRegexpReplace(
ctx: CodeGeneratorContext,
operands: Seq[GeneratedExpression]): GeneratedExpression = {
val className = classOf[SqlFunctionUtils].getCanonicalName
generateStringResultCallIfArgsNotNull(ctx, operands) {
terms => s"$className.regexpReplace(${toStringTerms(terms, operands)})"
}
}
def generateIsDecimal(
ctx: CodeGeneratorContext,
operands: Seq[GeneratedExpression]): GeneratedExpression = {
val className = classOf[SqlFunctionUtils].getCanonicalName
generateCallIfArgsNullable(ctx, new BooleanType(), operands) {
terms => s"$className.isDecimal(${safeToStringTerms(terms, operands)})"
}
}
def generateIsDigit(
ctx: CodeGeneratorContext,
operands: Seq[GeneratedExpression]): GeneratedExpression = {
val className = classOf[SqlFunctionUtils].getCanonicalName
generateCallIfArgsNullable(ctx, new BooleanType(), operands) {
terms => s"$className.isDigit(${safeToStringTerms(terms, operands)})"
}
}
def generateAscii(
ctx: CodeGeneratorContext,
str: GeneratedExpression): GeneratedExpression = {
generateCallIfArgsNotNull(ctx, new IntType(), Seq(str)) {
terms => s"${terms.head}.getSizeInBytes() <= 0 ? 0 : (int) ${terms.head}.byteAt(0)"
}
}
def generateIsAlpha(
ctx: CodeGeneratorContext,
operands: Seq[GeneratedExpression]): GeneratedExpression = {
val className = classOf[SqlFunctionUtils].getCanonicalName
generateCallIfArgsNullable(ctx, new BooleanType(), operands) {
terms => s"$className.isAlpha(${safeToStringTerms(terms, operands)})"
}
}
def generateUpper(
ctx: CodeGeneratorContext,
operands: Seq[GeneratedExpression]): GeneratedExpression = {
generateCallIfArgsNotNull(ctx, new VarCharType(VarCharType.MAX_LENGTH), operands) {
terms => s"${terms.head}.toUpperCase()"
}
}
def generateLower(
ctx: CodeGeneratorContext,
operands: Seq[GeneratedExpression]): GeneratedExpression = {
generateCallIfArgsNotNull(ctx, new VarCharType(VarCharType.MAX_LENGTH), operands) {
terms => s"${terms.head}.toLowerCase()"
}
}
def generateInitcap(
ctx: CodeGeneratorContext,
operands: Seq[GeneratedExpression]): GeneratedExpression = {
val className = classOf[SqlFunctions].getCanonicalName
generateStringResultCallIfArgsNotNull(ctx, operands) {
terms => s"$className.initcap(${terms.head}.toString())"
}
}
def generatePosition(
ctx: CodeGeneratorContext,
operands: Seq[GeneratedExpression]): GeneratedExpression = {
val className = classOf[SqlFunctionUtils].getCanonicalName
generateCallIfArgsNotNull(ctx, new IntType(), operands) {
terms => s"$className.position(${terms.mkString(",")})"
}
}
def generateLocate(
ctx: CodeGeneratorContext,
operands: Seq[GeneratedExpression]): GeneratedExpression = {
val className = classOf[SqlFunctionUtils].getCanonicalName
generateCallIfArgsNotNull(ctx, new IntType(), operands) {
terms => s"$className.position(${terms.mkString(",")})"
}
}
def generateInstr(
ctx: CodeGeneratorContext,
operands: Seq[GeneratedExpression]): GeneratedExpression = {
val className = classOf[SqlFunctionUtils].getCanonicalName
generateCallIfArgsNotNull(ctx, new IntType(), operands) {
terms =>
val startPosition = if (operands.length < 3) 1 else terms(2)
val nthAppearance = if (operands.length < 4) 1 else terms(3)
s"$className.instr(${terms.head}, ${terms(1)}, " +
s"$startPosition, $nthAppearance)"
}
}
def generateOverlay(
ctx: CodeGeneratorContext,
operands: Seq[GeneratedExpression]): GeneratedExpression = {
val className = classOf[SqlFunctionUtils].getCanonicalName
generateStringResultCallIfArgsNotNull(ctx, operands) {
terms => s"$className.overlay(${toStringTerms(terms, operands)})"
}
}
def generateArithmeticConcat(
ctx: CodeGeneratorContext,
left: GeneratedExpression,
right: GeneratedExpression): GeneratedExpression = {
generateStringResultCallIfArgsNotNull(ctx, Seq(left, right)) {
terms => s"${terms.head}.toString() + String.valueOf(${terms(1)})"
}
}
def generateLpad(
ctx: CodeGeneratorContext,
operands: Seq[GeneratedExpression]): GeneratedExpression = {
val className = classOf[SqlFunctionUtils].getCanonicalName
generateStringResultCallIfArgsNotNull(ctx, operands) {
terms =>
s"$className.lpad(${toStringTerms(terms, operands)})"
}
}
def generateRpad(
ctx: CodeGeneratorContext,
operands: Seq[GeneratedExpression]): GeneratedExpression = {
val className = classOf[SqlFunctionUtils].getCanonicalName
generateStringResultCallIfArgsNotNull(ctx, operands) {
terms =>
s"$className.rpad(${toStringTerms(terms, operands)})"
}
}
def generateRepeat(
ctx: CodeGeneratorContext,
operands: Seq[GeneratedExpression]): GeneratedExpression = {
val className = classOf[SqlFunctionUtils].getCanonicalName
generateStringResultCallIfArgsNotNull(ctx, operands) {
terms => s"$className.repeat(${toStringTerms(terms, operands)})"
}
}
def generateReverse(
ctx: CodeGeneratorContext,
operands: Seq[GeneratedExpression]): GeneratedExpression = {
generateCallIfArgsNotNull(ctx, new VarCharType(VarCharType.MAX_LENGTH), operands) {
terms => s"$STRING_UTIL.reverse(${terms.head})"
}
}
def generateReplace(
ctx: CodeGeneratorContext,
operands: Seq[GeneratedExpression]): GeneratedExpression = {
val className = classOf[SqlFunctionUtils].getCanonicalName
generateStringResultCallIfArgsNotNull(ctx, operands) {
terms => s"$className.replace(${toStringTerms(terms, operands)})"
}
}
def generateSplitIndex(
ctx: CodeGeneratorContext,
operands: Seq[GeneratedExpression]): GeneratedExpression = {
val className = classOf[SqlFunctionUtils].getCanonicalName
generateStringResultCallIfArgsNotNull(ctx, operands) {
terms => s"$className.splitIndex(${toStringTerms(terms, operands)})"
}
}
def generateKeyValue(
ctx: CodeGeneratorContext,
operands: Seq[GeneratedExpression]): GeneratedExpression = {
val className = classOf[SqlFunctionUtils].getCanonicalName
generateCallIfArgsNullable(ctx, new VarCharType(VarCharType.MAX_LENGTH), operands) {
terms => s"$className.keyValue(${terms.mkString(",")})"
}
}
def generateHashCode(
ctx: CodeGeneratorContext,
operands: Seq[GeneratedExpression]): GeneratedExpression = {
val className = classOf[SqlFunctionUtils].getCanonicalName
generateCallIfArgsNotNull(ctx, new IntType(), operands) {
terms => s"$className.hashCode(${terms.head}.toString())"
}
}
def generateMd5(
ctx: CodeGeneratorContext,
operands: Seq[GeneratedExpression]): GeneratedExpression =
generateHashInternal(ctx, "MD5", operands)
def generateHashInternal(
ctx: CodeGeneratorContext,
algorithm: String,
operands: Seq[GeneratedExpression]): GeneratedExpression = {
val digestTerm = ctx.addReusableMessageDigest(algorithm)
if (operands.length == 1) {
generateCallIfArgsNotNull(ctx, new VarCharType(VarCharType.MAX_LENGTH), operands) {
terms =>s"$STRING_UTIL.hash(${terms.head}, $digestTerm)"
}
} else {
val className = classOf[SqlFunctionUtils].getCanonicalName
generateStringResultCallIfArgsNotNull(ctx, operands) {
terms => s"$className.hash($digestTerm, ${toStringTerms(terms, operands)})"
}
}
}
def generateSha1(
ctx: CodeGeneratorContext,
operands: Seq[GeneratedExpression]): GeneratedExpression =
generateHashInternal(ctx, "SHA", operands)
def generateSha224(
ctx: CodeGeneratorContext,
operands: Seq[GeneratedExpression]): GeneratedExpression =
generateHashInternal(ctx, "SHA-224", operands)
def generateSha256(
ctx: CodeGeneratorContext,
operands: Seq[GeneratedExpression]): GeneratedExpression =
generateHashInternal(ctx, "SHA-256", operands)
def generateSha384(
ctx: CodeGeneratorContext,
operands: Seq[GeneratedExpression]): GeneratedExpression =
generateHashInternal(ctx, "SHA-384", operands)
def generateSha512(
ctx: CodeGeneratorContext,
operands: Seq[GeneratedExpression]): GeneratedExpression =
generateHashInternal(ctx, "SHA-512", operands)
def generateSha2(
ctx: CodeGeneratorContext,
operands: Seq[GeneratedExpression]): GeneratedExpression = {
val className = classOf[SqlFunctionUtils].getCanonicalName
if (operands.last.literal) {
val digestTerm = ctx.addReusableSha2MessageDigest(operands.last)
if (operands.length == 2) {
generateCallIfArgsNotNull(ctx, new VarCharType(VarCharType.MAX_LENGTH), operands) {
terms =>s"$STRING_UTIL.hash(${terms.head}, $digestTerm)"
}
} else {
generateStringResultCallIfArgsNotNull(ctx, operands) {
terms =>
s"$className.hash($digestTerm," +
s"${toStringTerms(terms.dropRight(1), operands.dropRight(1))})"
}
}
} else {
if (operands.length == 2) {
generateCallIfArgsNotNull(ctx, new VarCharType(VarCharType.MAX_LENGTH), operands) {
terms =>
s"""$STRING_UTIL.hash(${terms.head}, "SHA-" + ${terms.last})"""
}
} else {
generateStringResultCallIfArgsNotNull(ctx, operands) {
terms => {
val strTerms = toStringTerms(terms.dropRight(1), operands.dropRight(1))
s"""$className.hash("SHA-" + ${terms.last}, $strTerms)"""
}
}
}
}
}
def generateParserUrl(
ctx: CodeGeneratorContext,
operands: Seq[GeneratedExpression]): GeneratedExpression = {
val className = classOf[SqlFunctionUtils].getCanonicalName
generateStringResultCallIfArgsNotNull(ctx, operands) {
terms => s"$className.parseUrl(${safeToStringTerms(terms, operands)})"
}
}
def generateFromBase64(
ctx: CodeGeneratorContext,
operands: Seq[GeneratedExpression]): GeneratedExpression = {
val className = classOf[SqlFunctionUtils].getCanonicalName
generateCallIfArgsNotNull(ctx, new VarBinaryType(VarBinaryType.MAX_LENGTH), operands) {
terms => s"$className.fromBase64(${terms.head})"
}
}
def generateToBase64(
ctx: CodeGeneratorContext,
operands: Seq[GeneratedExpression]): GeneratedExpression = {
val className = classOf[SqlFunctionUtils].getCanonicalName
generateStringResultCallIfArgsNotNull(ctx, operands) {
terms => s"$className.toBase64(${terms.head})"
}
}
def generateChr(
ctx: CodeGeneratorContext,
operands: Seq[GeneratedExpression]): GeneratedExpression = {
val className = classOf[SqlFunctionUtils].getCanonicalName
generateStringResultCallIfArgsNotNull(ctx, operands) {
terms => s"$className.chr(${terms.head})"
}
}
def generateRegExp(
ctx: CodeGeneratorContext,
operands: Seq[GeneratedExpression]): GeneratedExpression = {
val className = classOf[SqlFunctionUtils].getCanonicalName
generateCallIfArgsNotNull(ctx, new BooleanType(), operands) {
terms => s"$className.regExp(${toStringTerms(terms, operands)})"
}
}
def generateJsonValue(
ctx: CodeGeneratorContext,
operands: Seq[GeneratedExpression]): GeneratedExpression = {
val className = classOf[SqlFunctionUtils].getCanonicalName
generateStringResultCallIfArgsNotNull(ctx, operands) {
terms =>
s"$className.jsonValue(${safeToStringTerms(terms, operands)})"
}
}
def generateBin(
ctx: CodeGeneratorContext,
operands: Seq[GeneratedExpression]): GeneratedExpression = {
generateStringResultCallIfArgsNotNull(ctx, operands) {
terms => s"Long.toBinaryString(${terms.head})"
}
}
def generateTrim(
ctx: CodeGeneratorContext,
operands: Seq[GeneratedExpression]): GeneratedExpression = {
generateCallIfArgsNotNull(ctx, new VarCharType(VarCharType.MAX_LENGTH), operands) {
terms =>
val leading = compareEnum(terms.head, BOTH) || compareEnum(terms.head, LEADING)
val trailing = compareEnum(terms.head, BOTH) || compareEnum(terms.head, TRAILING)
val args = s"$leading, $trailing, ${terms(1)}"
s"$STRING_UTIL.trim(${terms(2)}, $args)"
}
}
def generateTrimLeft(
ctx: CodeGeneratorContext,
operands: Seq[GeneratedExpression]): GeneratedExpression = {
generateCallIfArgsNotNull(ctx, new VarCharType(VarCharType.MAX_LENGTH), operands) {
terms => s"$STRING_UTIL.trimLeft(${terms.mkString(", ")})"
}
}
def generateTrimRight(
ctx: CodeGeneratorContext,
operands: Seq[GeneratedExpression]): GeneratedExpression = {
generateCallIfArgsNotNull(ctx, new VarCharType(VarCharType.MAX_LENGTH), operands) {
terms => s"$STRING_UTIL.trimRight(${terms.mkString(", ")})"
}
}
def generateUuid(
ctx: CodeGeneratorContext,
operands: Seq[GeneratedExpression]): GeneratedExpression = {
val className = classOf[SqlFunctionUtils].getCanonicalName
generateStringResultCallIfArgsNotNull(ctx, operands) {
terms => s"$className.uuid(${terms.mkString(",")})"
}
}
def generateStrToMap(
ctx: CodeGeneratorContext,
operands: Seq[GeneratedExpression]): GeneratedExpression = {
val className = classOf[SqlFunctionUtils].getCanonicalName
val t = new MapType(
new VarCharType(VarCharType.MAX_LENGTH), new VarCharType(VarCharType.MAX_LENGTH))
val converter = DataFormatConverters.getConverterForDataType(
DataTypes.MAP(DataTypes.STRING(), DataTypes.STRING()))
val converterTerm = ctx.addReusableObject(converter, "mapConverter")
generateCallIfArgsNotNull(ctx, t, operands) {
terms =>
val map = s"$className.strToMap(${toStringTerms(terms, operands)})"
s"($BINARY_MAP) $converterTerm.toInternal($map)"
}
}
def generateEncode(
ctx: CodeGeneratorContext,
str: GeneratedExpression,
charset: GeneratedExpression): GeneratedExpression = {
generateCallIfArgsNotNull(
ctx, new VarBinaryType(VarBinaryType.MAX_LENGTH), Seq(str, charset)) {
terms => s"${terms.head}.toString().getBytes(${terms(1)}.toString())"
}
}
def generateDecode(
ctx: CodeGeneratorContext,
binary: GeneratedExpression,
charset: GeneratedExpression): GeneratedExpression = {
generateStringResultCallIfArgsNotNull(ctx, Seq(binary, charset)) {
terms =>
s"new String(${terms.head}, ${terms(1)}.toString())"
}
}
}
| fhueske/flink | flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/codegen/calls/StringCallGen.scala | Scala | apache-2.0 | 27,018 |
package lila.tournament
import org.joda.time.DateTime
import reactivemongo.api.ReadPreference
import scala.concurrent.duration._
import lila.db.dsl._
import lila.user.User
import lila.memo.CacheApi._
final class TournamentShieldApi(
tournamentRepo: TournamentRepo,
cacheApi: lila.memo.CacheApi
)(implicit ec: scala.concurrent.ExecutionContext) {
import TournamentShield._
import BSONHandlers._
def active(u: User): Fu[List[Award]] =
cache.getUnit dmap {
_.value.values.flatMap(_.headOption.filter(_.owner.value == u.id)).toList
}
def history(maxPerCateg: Option[Int]): Fu[History] =
cache.getUnit dmap { h =>
maxPerCateg.fold(h)(h.take)
}
def byCategKey(k: String): Fu[Option[(Category, List[Award])]] =
Category.byKey(k) ?? { categ =>
cache.getUnit dmap {
_.value get categ map {
categ -> _
}
}
}
def currentOwner(tour: Tournament): Fu[Option[OwnerId]] =
tour.isShield ?? {
Category.of(tour) ?? { cat =>
history(none).map(_.current(cat).map(_.owner))
}
}
private[tournament] def clear(): Unit = cache.invalidateUnit().unit
private[tournament] def clearAfterMarking(userId: User.ID): Funit = cache.getUnit map { hist =>
import cats.implicits._
if (hist.value.exists(_._2.exists(_.owner.value === userId))) clear()
}
private val cache = cacheApi.unit[History] {
_.refreshAfterWrite(1 day)
.buildAsyncFuture { _ =>
tournamentRepo.coll
.find(
$doc(
"schedule.freq" -> scheduleFreqHandler.writeTry(Schedule.Freq.Shield).get,
"status" -> statusBSONHandler.writeTry(Status.Finished).get
)
)
.sort($sort asc "startsAt")
.cursor[Tournament](ReadPreference.secondaryPreferred)
.list() map { tours =>
for {
tour <- tours
categ <- Category of tour
winner <- tour.winnerId
} yield Award(
categ = categ,
owner = OwnerId(winner),
date = tour.finishesAt,
tourId = tour.id
)
} map {
_.foldLeft(Map.empty[Category, List[Award]]) { case (hist, entry) =>
hist + (entry.categ -> hist.get(entry.categ).fold(List(entry))(entry :: _))
}
} dmap History.apply
}
}
}
object TournamentShield {
case class OwnerId(value: String) extends AnyVal
case class Award(
categ: Category,
owner: OwnerId,
date: DateTime,
tourId: Tournament.ID
)
// newer entry first
case class History(value: Map[Category, List[Award]]) {
def sorted: List[(Category, List[Award])] =
Category.all map { categ =>
categ -> ~(value get categ)
}
def userIds: List[User.ID] = value.values.flatMap(_.map(_.owner.value)).toList
def current(cat: Category): Option[Award] = value get cat flatMap (_.headOption)
def take(max: Int) =
copy(
value = value.view.mapValues(_ take max).toMap
)
}
private type SpeedOrVariant = Either[Schedule.Speed, chess.variant.Variant]
sealed abstract class Category(
val of: SpeedOrVariant,
val iconChar: Char
) {
def key = of.fold(_.key, _.key)
def name = of.fold(_.name, _.name)
def matches(tour: Tournament) =
if (tour.variant.standard) ~(for {
tourSpeed <- tour.schedule.map(_.speed)
categSpeed <- of.left.toOption
} yield tourSpeed == categSpeed)
else of.toOption.has(tour.variant)
}
object Category {
case object UltraBullet
extends Category(
of = Left(Schedule.Speed.UltraBullet),
iconChar = ''
)
case object HyperBullet
extends Category(
of = Left(Schedule.Speed.HyperBullet),
iconChar = ''
)
case object Bullet
extends Category(
of = Left(Schedule.Speed.Bullet),
iconChar = ''
)
case object SuperBlitz
extends Category(
of = Left(Schedule.Speed.SuperBlitz),
iconChar = ''
)
case object Blitz
extends Category(
of = Left(Schedule.Speed.Blitz),
iconChar = ''
)
case object Rapid
extends Category(
of = Left(Schedule.Speed.Rapid),
iconChar = ''
)
case object Classical
extends Category(
of = Left(Schedule.Speed.Classical),
iconChar = ''
)
case object Chess960
extends Category(
of = Right(chess.variant.Chess960),
iconChar = ''
)
case object KingOfTheHill
extends Category(
of = Right(chess.variant.KingOfTheHill),
iconChar = ''
)
case object Antichess
extends Category(
of = Right(chess.variant.Antichess),
iconChar = ''
)
case object Atomic
extends Category(
of = Right(chess.variant.Atomic),
iconChar = ''
)
case object ThreeCheck
extends Category(
of = Right(chess.variant.ThreeCheck),
iconChar = ''
)
case object Horde
extends Category(
of = Right(chess.variant.Horde),
iconChar = ''
)
case object RacingKings
extends Category(
of = Right(chess.variant.RacingKings),
iconChar = ''
)
case object Crazyhouse
extends Category(
of = Right(chess.variant.Crazyhouse),
iconChar = ''
)
val all: List[Category] = List(
Bullet,
SuperBlitz,
Blitz,
Rapid,
Classical,
HyperBullet,
UltraBullet,
Crazyhouse,
Chess960,
KingOfTheHill,
ThreeCheck,
Antichess,
Atomic,
Horde,
RacingKings
)
def of(t: Tournament): Option[Category] = all.find(_ matches t)
def byKey(k: String): Option[Category] = all.find(_.key == k)
}
def spotlight(name: String) =
Spotlight(
iconFont = "".some,
headline = s"Battle for the $name Shield",
description =
s"""This [Shield trophy](https://lichess.org/blog/Wh36WiQAAMMApuRb/introducing-shield-tournaments) is unique.
The winner keeps it for one month,
then must defend it during the next $name Shield tournament!""",
homepageHours = 6.some
)
}
| luanlv/lila | modules/tournament/src/main/TournamentShield.scala | Scala | mit | 6,468 |
package edu.gemini.pit.ui.util
import com.jgoodies.forms.factories.Borders.DLU4_BORDER
import swing.{Separator, GridBagPanel, Component, Label}
trait Rows { this: GridBagPanel =>
border = DLU4_BORDER
private var row = 0
def addRow(a: Component, b: Component, f:GridBagPanel.Fill.Value = GridBagPanel.Fill.Horizontal, wy:Int = 0 ) {
add(a, new Constraints { gridx = 0; gridy = row; ipadx = 10; ipady = 4; anchor = GridBagPanel.Anchor.NorthEast })
add(b, new Constraints { gridx = 1; gridy = row; fill = f; weightx = 1; weighty = wy })
row = row + 1
}
def addRow(a: Component, b: Component, c: Component) {
add(a, new Constraints { gridx = 0; gridy = row; ipadx = 10; ipady = 4; anchor = GridBagPanel.Anchor.West })
add(b, new Constraints { gridx = 1; gridy = row; fill = GridBagPanel.Fill.Horizontal; weightx = 2 })
add(c, new Constraints { gridx = 2; gridy = row; weightx = 1 })
row = row + 1
}
def addRow(a: Component) {
add(a, new Constraints { gridx = 0; gridwidth = 2; gridy = row; ipadx = 10; ipady = 4; anchor = GridBagPanel.Anchor.West })
row = row + 1
}
def addRow(a: Component, cols:Int) {
add(a, new Constraints { gridx = 0; gridwidth = cols; gridy = row; ipadx = 10; ipady = 4; anchor = GridBagPanel.Anchor.West })
row = row + 1
}
def addCentered(a: Component) {
add(a, new Constraints { gridx = 0; gridwidth = 3; gridy = row; ipadx = 10; ipady = 4 })
row = row + 1
}
def addSpacer() {
addRow(new Label(""), new Label("")) // :-/
}
def addSeparator() {
add(new Separator(), new Constraints { gridx = 0; gridwidth = 3; gridy = row; ipadx = 10; ipady = 4; fill = GridBagPanel.Fill.Horizontal ; weightx = 1.0})
row = row + 1
}
} | arturog8m/ocs | bundle/edu.gemini.pit/src/main/scala/edu/gemini/pit/ui/util/Rows.scala | Scala | bsd-3-clause | 1,735 |
package blended.activemq.client.internal
import java.io.File
import java.util.concurrent.atomic.{AtomicBoolean, AtomicInteger}
import akka.actor.ActorSystem
import akka.testkit.TestProbe
import blended.activemq.client.{ConnectionVerifierFactory, RoundtripConnectionVerifier, VerificationFailedHandler}
import blended.akka.internal.BlendedAkkaActivator
import blended.container.context.api.ContainerContext
import blended.jms.utils._
import blended.streams.jms.{JmsProducerSettings, JmsStreamSupport}
import blended.streams.message.{FlowEnvelope, FlowMessage}
import blended.streams.processor.Collector
import blended.testsupport.pojosr.{PojoSrTestHelper, SimplePojoContainerSpec}
import blended.testsupport.scalatest.LoggingFreeSpecLike
import blended.testsupport.{BlendedTestSupport, RequiresForkedJVM}
import blended.util.logging.Logger
import domino.DominoActivator
import org.apache.activemq.ActiveMQConnectionFactory
import org.apache.activemq.broker.BrokerService
import org.apache.activemq.store.memory.MemoryPersistenceAdapter
import org.osgi.framework.BundleActivator
import org.scalatest.BeforeAndAfterAll
import org.scalatest.matchers.should.Matchers
import scala.concurrent.duration._
import scala.concurrent.{Await, ExecutionContext}
@RequiresForkedJVM
class SlowRoundtripSpec extends SimplePojoContainerSpec
with LoggingFreeSpecLike
with PojoSrTestHelper
with Matchers
with JmsStreamSupport
with BeforeAndAfterAll {
private val brokerName : String = "slow"
private val log : Logger = Logger[SlowRoundtripSpec]
private val verifyRequest : String = "verify"
private val verifyRespond : String = "verified"
private val vendor : String = "activemq"
private val provider : String = "conn1"
private val broker : BrokerService = {
val b = new BrokerService()
b.setBrokerName(brokerName)
b.setPersistent(false)
b.setUseJmx(false)
b.setPersistenceAdapter(new MemoryPersistenceAdapter)
b.setDedicatedTaskRunner(true)
b.start()
b.waitUntilStarted()
b
}
override protected def beforeAll(): Unit = {
super.beforeAll()
}
override protected def afterAll(): Unit = {
broker.stop()
broker.waitUntilStopped()
}
class SimpleResponder(system : ActorSystem) {
implicit val actorSys : ActorSystem = system
implicit val eCtxt : ExecutionContext = system.dispatcher
val simpleCf : IdAwareConnectionFactory = SimpleIdAwareConnectionFactory(
vendor = "activemq", provider = "spec",
clientId = "spec",
cf = new ActiveMQConnectionFactory(s"vm://$brokerName?create=false"),
minReconnect = 5.seconds
)
def respond() : Unit = {
log.info("Trying to receive verification request")
val verifyRec : Collector[FlowEnvelope] = receiveMessages(
headerCfg = headerCfg,
cf = simpleCf,
dest = JmsQueue(verifyRequest),
log = envLogger(log),
listener = 1,
minMessageDelay = None,
selector = None,
completeOn = Some(_.nonEmpty),
timeout = Some(3.seconds),
ackTimeout = 1.second
)
val verifyMsg : FlowEnvelope = Await.result(verifyRec.result, 4.seconds).headOption.get
log.info("sending verification response")
sendMessages(
producerSettings = JmsProducerSettings(
log = envLogger(log),
headerCfg = headerCfg,
connectionFactory = simpleCf,
jmsDestination = Some(JmsQueue("verified"))
),
log = envLogger(log),
timeout = 10.seconds,
verifyMsg
)
}
}
override def baseDir: String = new File(BlendedTestSupport.projectTestOutput, "slow").getAbsolutePath()
private var failed : List[String] = List.empty
private val verifyCounter : AtomicInteger = new AtomicInteger(0)
private class SlowRoundtripActivator extends DominoActivator {
private val firstTry : AtomicBoolean = new AtomicBoolean(true)
whenBundleActive {
whenServicePresent[ActorSystem] { system =>
implicit val actorSys : ActorSystem = system
val responder : SimpleResponder = new SimpleResponder(system)
val slowFactory : ConnectionVerifierFactory = () => new RoundtripConnectionVerifier(
probeMsg = id => FlowEnvelope(FlowMessage(FlowMessage.noProps), id),
verify = _ => true,
requestDest = JmsQueue(verifyRequest),
responseDest = JmsQueue(verifyRespond),
retryInterval = 5.seconds,
receiveTimeout = 5.seconds
) {
override protected def probe(ctCtxt: ContainerContext)(cf: IdAwareConnectionFactory): Unit = {
verifyCounter.incrementAndGet()
if (firstTry.get()) {
val probe: TestProbe = TestProbe()
system.eventStream.subscribe(probe.ref, classOf[ConnectionStateChanged])
system.eventStream.publish(QueryConnectionState(vendor, provider))
probe.fishForMessage(timeout, "Waiting for first connection") {
case evt: ConnectionStateChanged => evt.state.status == Connected
}
super.probe(ctCtxt)(cf)
system.eventStream.publish(MaxKeepAliveExceeded(vendor, provider))
probe.fishForMessage(timeout, "Waiting for disconnect") {
case evt: ConnectionStateChanged => evt.state.status == Disconnected
}
system.stop(probe.ref)
responder.respond()
firstTry.set(false)
} else {
super.probe(ctCtxt)(cf)
responder.respond()
}
}
}
val slowHandler : VerificationFailedHandler = (cf: IdAwareConnectionFactory) => {
failed = s"${cf.vendor}:${cf.provider}" :: failed
}
slowFactory.providesService[ConnectionVerifierFactory]("name" -> "slow")
slowHandler.providesService[VerificationFailedHandler]("name" -> "slow")
}
}
}
override def bundles: Seq[(String, BundleActivator)] = Seq(
"blended.akka" -> new BlendedAkkaActivator(),
"blended.activemq.client" -> new AmqClientActivator(),
"slow" -> new SlowRoundtripActivator
)
"The ActiveMQ Client Activator should" - {
"register a connection factory after the underlying connection factory has been restarted due to failed pings" in {
implicit val system : ActorSystem = mandatoryService[ActorSystem](registry)
val probe : TestProbe = TestProbe()
system.eventStream.subscribe(probe.ref, classOf[ConnectionStateChanged])
probe.fishForMessage(5.seconds, "Waiting for second connected event"){
case evt : ConnectionStateChanged =>
evt.state.status == Connected
}
// The service will be available after the verifier has finally verified the connection
// It should still succeed after the connection restart
mandatoryService[IdAwareConnectionFactory](registry, filter = Some("(&(vendor=activemq)(provider=conn1))"), timeout = 30.seconds)
failed should be (empty)
assert(verifyCounter.get() > 1)
}
}
}
| woq-blended/blended | blended.activemq.client/src/test/scala/blended/activemq/client/internal/SlowRoundtripSpec.scala | Scala | apache-2.0 | 7,101 |
/** Copyright 2015 TappingStone, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prediction.data.webhooks
import org.json4s.JObject
/** Connector for Webhooks connection */
private[prediction] trait JsonConnector {
// TODO: support conversion to multiple events?
/** Convert from original JObject to Event JObject
* @param data original JObject recevived through webhooks
* @return Event JObject
*/
def toEventJson(data: JObject): JObject
}
| wenaz/PredictionIO | data/src/main/scala/io/prediction/data/webhooks/JsonConnector.scala | Scala | apache-2.0 | 1,007 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql
import java.util.{Locale, UUID}
import scala.concurrent.Future
import org.apache.spark.{MapOutputStatistics, SparkFunSuite, TaskContext}
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.internal.Logging
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.catalyst.{FunctionIdentifier, InternalRow, TableIdentifier}
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.parser.{CatalystSqlParser, ParserInterface}
import org.apache.spark.sql.catalyst.plans.logical.{LocalRelation, LogicalPlan, Statistics, UnresolvedHint}
import org.apache.spark.sql.catalyst.plans.physical.Partitioning
import org.apache.spark.sql.catalyst.rules.Rule
import org.apache.spark.sql.catalyst.trees.TreeNodeTag
import org.apache.spark.sql.execution._
import org.apache.spark.sql.execution.adaptive.{AdaptiveSparkPlanExec, QueryStageExec}
import org.apache.spark.sql.execution.exchange.{BroadcastExchangeExec, BroadcastExchangeLike, ShuffleExchangeExec, ShuffleExchangeLike, ShuffleOrigin}
import org.apache.spark.sql.execution.vectorized.OnHeapColumnVector
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.internal.SQLConf.COLUMN_BATCH_SIZE
import org.apache.spark.sql.internal.StaticSQLConf.SPARK_SESSION_EXTENSIONS
import org.apache.spark.sql.types.{DataType, Decimal, IntegerType, LongType, Metadata, StructType}
import org.apache.spark.sql.vectorized.{ColumnarArray, ColumnarBatch, ColumnarMap, ColumnVector}
import org.apache.spark.unsafe.types.UTF8String
/**
* Test cases for the [[SparkSessionExtensions]].
*/
class SparkSessionExtensionSuite extends SparkFunSuite {
private def create(
builder: SparkSessionExtensionsProvider): Seq[SparkSessionExtensionsProvider] = Seq(builder)
private def stop(spark: SparkSession): Unit = {
spark.stop()
SparkSession.clearActiveSession()
SparkSession.clearDefaultSession()
}
private def withSession(
builders: Seq[SparkSessionExtensionsProvider])(f: SparkSession => Unit): Unit = {
val builder = SparkSession.builder().master("local[1]")
builders.foreach(builder.withExtensions)
val spark = builder.getOrCreate()
try f(spark) finally {
stop(spark)
}
}
test("inject analyzer rule") {
withSession(Seq(_.injectResolutionRule(MyRule))) { session =>
assert(session.sessionState.analyzer.extendedResolutionRules.contains(MyRule(session)))
}
}
test("inject post hoc resolution analyzer rule") {
withSession(Seq(_.injectPostHocResolutionRule(MyRule))) { session =>
assert(session.sessionState.analyzer.postHocResolutionRules.contains(MyRule(session)))
}
}
test("inject check analysis rule") {
withSession(Seq(_.injectCheckRule(MyCheckRule))) { session =>
assert(session.sessionState.analyzer.extendedCheckRules.contains(MyCheckRule(session)))
}
}
test("inject optimizer rule") {
withSession(Seq(_.injectOptimizerRule(MyRule))) { session =>
assert(session.sessionState.optimizer.batches.flatMap(_.rules).contains(MyRule(session)))
}
}
test("SPARK-33621: inject a pre CBO rule") {
withSession(Seq(_.injectPreCBORule(MyRule))) { session =>
assert(session.sessionState.optimizer.preCBORules.contains(MyRule(session)))
}
}
test("inject spark planner strategy") {
withSession(Seq(_.injectPlannerStrategy(MySparkStrategy))) { session =>
assert(session.sessionState.planner.strategies.contains(MySparkStrategy(session)))
}
}
test("inject parser") {
val extension = create { extensions =>
extensions.injectParser((_: SparkSession, _: ParserInterface) => CatalystSqlParser)
}
withSession(extension) { session =>
assert(session.sessionState.sqlParser === CatalystSqlParser)
}
}
test("inject multiple rules") {
withSession(Seq(_.injectOptimizerRule(MyRule),
_.injectPlannerStrategy(MySparkStrategy))) { session =>
assert(session.sessionState.optimizer.batches.flatMap(_.rules).contains(MyRule(session)))
assert(session.sessionState.planner.strategies.contains(MySparkStrategy(session)))
}
}
test("inject stacked parsers") {
val extension = create { extensions =>
extensions.injectParser((_: SparkSession, _: ParserInterface) => CatalystSqlParser)
extensions.injectParser(MyParser)
extensions.injectParser(MyParser)
}
withSession(extension) { session =>
val parser = MyParser(session, MyParser(session, CatalystSqlParser))
assert(session.sessionState.sqlParser === parser)
}
}
test("inject function") {
val extensions = create { extensions =>
extensions.injectFunction(MyExtensions.myFunction)
}
withSession(extensions) { session =>
assert(session.sessionState.functionRegistry
.lookupFunction(MyExtensions.myFunction._1).isDefined)
}
}
case class MyHintRule(spark: SparkSession) extends Rule[LogicalPlan] {
val MY_HINT_NAME = Set("CONVERT_TO_EMPTY")
override def apply(plan: LogicalPlan): LogicalPlan =
plan.resolveOperators {
case h: UnresolvedHint if MY_HINT_NAME.contains(h.name.toUpperCase(Locale.ROOT)) =>
LocalRelation(h.output, data = Seq.empty, isStreaming = h.isStreaming)
}
}
test("inject custom hint rule") {
withSession(Seq(_.injectPostHocResolutionRule(MyHintRule))) { session =>
assert(
session.range(1).hint("CONVERT_TO_EMPTY").logicalPlan.isInstanceOf[LocalRelation],
"plan is expected to be a local relation"
)
}
}
test("inject adaptive query prep rule") {
val extensions = create { extensions =>
// inject rule that will run during AQE query stage preparation and will add custom tags
// to the plan
extensions.injectQueryStagePrepRule(session => MyQueryStagePrepRule())
// inject rule that will run during AQE query stage optimization and will verify that the
// custom tags were written in the preparation phase
extensions.injectColumnar(session =>
MyColumnarRule(MyNewQueryStageRule(), MyNewQueryStageRule()))
}
withSession(extensions) { session =>
session.sessionState.conf.setConf(SQLConf.ADAPTIVE_EXECUTION_ENABLED, true)
assert(session.sessionState.queryStagePrepRules.contains(MyQueryStagePrepRule()))
assert(session.sessionState.columnarRules.contains(
MyColumnarRule(MyNewQueryStageRule(), MyNewQueryStageRule())))
import session.sqlContext.implicits._
val data = Seq((100L), (200L), (300L)).toDF("vals").repartition(1)
val df = data.selectExpr("vals + 1")
df.collect()
}
}
test("inject columnar AQE on") {
testInjectColumnar(true)
}
test("inject columnar AQE off") {
testInjectColumnar(false)
}
private def testInjectColumnar(enableAQE: Boolean): Unit = {
def collectPlanSteps(plan: SparkPlan): Seq[Int] = plan match {
case a: AdaptiveSparkPlanExec =>
assert(a.toString.startsWith("AdaptiveSparkPlan isFinalPlan=true"))
collectPlanSteps(a.executedPlan)
case _ => plan.collect {
case _: ReplacedRowToColumnarExec => 1
case _: ColumnarProjectExec => 10
case _: ColumnarToRowExec => 100
case s: QueryStageExec => collectPlanSteps(s.plan).sum
case _: MyShuffleExchangeExec => 1000
case _: MyBroadcastExchangeExec => 10000
}
}
val extensions = create { extensions =>
extensions.injectColumnar(session =>
MyColumnarRule(PreRuleReplaceAddWithBrokenVersion(), MyPostRule()))
}
withSession(extensions) { session =>
session.sessionState.conf.setConf(SQLConf.ADAPTIVE_EXECUTION_ENABLED, enableAQE)
assert(session.sessionState.columnarRules.contains(
MyColumnarRule(PreRuleReplaceAddWithBrokenVersion(), MyPostRule())))
import session.sqlContext.implicits._
// perform a join to inject a broadcast exchange
val left = Seq((1, 50L), (2, 100L), (3, 150L)).toDF("l1", "l2")
val right = Seq((1, 50L), (2, 100L), (3, 150L)).toDF("r1", "r2")
val data = left.join(right, $"l1" === $"r1")
// repartitioning avoids having the add operation pushed up into the LocalTableScan
.repartition(1)
val df = data.selectExpr("l2 + r2")
// execute the plan so that the final adaptive plan is available when AQE is on
df.collect()
val found = collectPlanSteps(df.queryExecution.executedPlan).sum
// 1 MyBroadcastExchangeExec
// 1 MyShuffleExchangeExec
// 1 ColumnarToRowExec
// 2 ColumnarProjectExec
// 1 ReplacedRowToColumnarExec
// so 11121 is expected.
assert(found == 11121)
// Verify that we get back the expected, wrong, result
val result = df.collect()
assert(result(0).getLong(0) == 101L) // Check that broken columnar Add was used.
assert(result(1).getLong(0) == 201L)
assert(result(2).getLong(0) == 301L)
}
}
test("reset column vectors") {
val session = SparkSession.builder()
.master("local[1]")
.config(COLUMN_BATCH_SIZE.key, 2)
.withExtensions { extensions =>
extensions.injectColumnar(session =>
MyColumnarRule(PreRuleReplaceAddWithBrokenVersion(), MyPostRule())) }
.getOrCreate()
try {
assert(session.sessionState.columnarRules.contains(
MyColumnarRule(PreRuleReplaceAddWithBrokenVersion(), MyPostRule())))
import session.sqlContext.implicits._
val input = Seq((100L), (200L), (300L))
val data = input.toDF("vals").repartition(1)
val df = data.selectExpr("vals + 1")
val result = df.collect()
assert(result sameElements input.map(x => Row(x + 2)))
} finally {
stop(session)
}
}
test("use custom class for extensions") {
val session = SparkSession.builder()
.master("local[1]")
.config(SPARK_SESSION_EXTENSIONS.key, classOf[MyExtensions].getCanonicalName)
.getOrCreate()
try {
assert(session.sessionState.planner.strategies.contains(MySparkStrategy(session)))
assert(session.sessionState.analyzer.extendedResolutionRules.contains(MyRule(session)))
assert(session.sessionState.analyzer.postHocResolutionRules.contains(MyRule(session)))
assert(session.sessionState.analyzer.extendedCheckRules.contains(MyCheckRule(session)))
assert(session.sessionState.optimizer.batches.flatMap(_.rules).contains(MyRule(session)))
assert(session.sessionState.sqlParser.isInstanceOf[MyParser])
assert(session.sessionState.functionRegistry
.lookupFunction(MyExtensions.myFunction._1).isDefined)
assert(session.sessionState.columnarRules.contains(
MyColumnarRule(PreRuleReplaceAddWithBrokenVersion(), MyPostRule())))
} finally {
stop(session)
}
}
test("use multiple custom class for extensions in the specified order") {
val session = SparkSession.builder()
.master("local[1]")
.config(SPARK_SESSION_EXTENSIONS.key, Seq(
classOf[MyExtensions2].getCanonicalName,
classOf[MyExtensions].getCanonicalName).mkString(","))
.getOrCreate()
try {
assert(session.sessionState.planner.strategies.containsSlice(
Seq(MySparkStrategy2(session), MySparkStrategy(session))))
val orderedRules = Seq(MyRule2(session), MyRule(session))
val orderedCheckRules = Seq(MyCheckRule2(session), MyCheckRule(session))
val parser = MyParser(session, CatalystSqlParser)
assert(session.sessionState.analyzer.extendedResolutionRules.containsSlice(orderedRules))
assert(session.sessionState.analyzer.postHocResolutionRules.containsSlice(orderedRules))
assert(session.sessionState.analyzer.extendedCheckRules.containsSlice(orderedCheckRules))
assert(session.sessionState.optimizer.batches.flatMap(_.rules).filter(orderedRules.contains)
.containsSlice(orderedRules ++ orderedRules)) // The optimizer rules are duplicated
assert(session.sessionState.sqlParser === parser)
assert(session.sessionState.functionRegistry
.lookupFunction(MyExtensions.myFunction._1).isDefined)
assert(session.sessionState.functionRegistry
.lookupFunction(MyExtensions2.myFunction._1).isDefined)
} finally {
stop(session)
}
}
test("allow an extension to be duplicated") {
val session = SparkSession.builder()
.master("local[1]")
.config(SPARK_SESSION_EXTENSIONS.key, Seq(
classOf[MyExtensions].getCanonicalName,
classOf[MyExtensions].getCanonicalName).mkString(","))
.getOrCreate()
try {
assert(session.sessionState.planner.strategies.count(_ === MySparkStrategy(session)) === 2)
assert(session.sessionState.analyzer.extendedResolutionRules.count(_ === MyRule(session)) ===
2)
assert(session.sessionState.analyzer.postHocResolutionRules.count(_ === MyRule(session)) ===
2)
assert(session.sessionState.analyzer.extendedCheckRules.count(_ === MyCheckRule(session)) ===
2)
assert(session.sessionState.optimizer.batches.flatMap(_.rules)
.count(_ === MyRule(session)) === 4) // The optimizer rules are duplicated
val outerParser = session.sessionState.sqlParser
assert(outerParser.isInstanceOf[MyParser])
assert(outerParser.asInstanceOf[MyParser].delegate.isInstanceOf[MyParser])
assert(session.sessionState.functionRegistry
.lookupFunction(MyExtensions.myFunction._1).isDefined)
} finally {
stop(session)
}
}
test("use the last registered function name when there are duplicates") {
val session = SparkSession.builder()
.master("local[1]")
.config(SPARK_SESSION_EXTENSIONS.key, Seq(
classOf[MyExtensions2].getCanonicalName,
classOf[MyExtensions2Duplicate].getCanonicalName).mkString(","))
.getOrCreate()
try {
val lastRegistered = session.sessionState.functionRegistry
.lookupFunction(FunctionIdentifier("myFunction2"))
assert(lastRegistered.isDefined)
assert(lastRegistered.get !== MyExtensions2.myFunction._2)
assert(lastRegistered.get === MyExtensions2Duplicate.myFunction._2)
} finally {
stop(session)
}
}
test("SPARK-35380: Loading extensions from ServiceLoader") {
val builder = SparkSession.builder().master("local[1]")
Seq(None, Some(classOf[YourExtensions].getName)).foreach { ext =>
ext.foreach(builder.config(SPARK_SESSION_EXTENSIONS.key, _))
val session = builder.getOrCreate()
try {
assert(session.sql("select get_fake_app_name()").head().getString(0) === "Fake App Name")
} finally {
stop(session)
}
}
}
test("SPARK-35673: user-defined hint and unrecognized hint in subquery") {
withSession(Seq(_.injectPostHocResolutionRule(MyHintRule))) { session =>
// unrecognized hint
QueryTest.checkAnswer(
session.sql(
"""
|SELECT *
|FROM (
| SELECT /*+ some_random_hint_that_does_not_exist */ 42
|)
|""".stripMargin),
Row(42) :: Nil)
// user-defined hint
QueryTest.checkAnswer(
session.sql(
"""
|SELECT *
|FROM (
| SELECT /*+ CONVERT_TO_EMPTY */ 42
|)
|""".stripMargin),
Nil)
}
}
}
case class MyRule(spark: SparkSession) extends Rule[LogicalPlan] {
override def apply(plan: LogicalPlan): LogicalPlan = plan
}
case class MyCheckRule(spark: SparkSession) extends (LogicalPlan => Unit) {
override def apply(plan: LogicalPlan): Unit = { }
}
case class MySparkStrategy(spark: SparkSession) extends SparkStrategy {
override def apply(plan: LogicalPlan): Seq[SparkPlan] = Seq.empty
}
case class MyParser(spark: SparkSession, delegate: ParserInterface) extends ParserInterface {
override def parsePlan(sqlText: String): LogicalPlan =
delegate.parsePlan(sqlText)
override def parseExpression(sqlText: String): Expression =
delegate.parseExpression(sqlText)
override def parseTableIdentifier(sqlText: String): TableIdentifier =
delegate.parseTableIdentifier(sqlText)
override def parseFunctionIdentifier(sqlText: String): FunctionIdentifier =
delegate.parseFunctionIdentifier(sqlText)
override def parseMultipartIdentifier(sqlText: String): Seq[String] =
delegate.parseMultipartIdentifier(sqlText)
override def parseTableSchema(sqlText: String): StructType =
delegate.parseTableSchema(sqlText)
override def parseDataType(sqlText: String): DataType =
delegate.parseDataType(sqlText)
}
object MyExtensions {
val myFunction = (FunctionIdentifier("myFunction"),
new ExpressionInfo(
"noClass",
"myDb",
"myFunction",
"usage",
"extended usage",
" Examples:",
"""
note
""",
"",
"3.0.0",
"""
deprecated
""",
""),
(_: Seq[Expression]) => Literal(5, IntegerType))
}
case class CloseableColumnBatchIterator(itr: Iterator[ColumnarBatch],
f: ColumnarBatch => ColumnarBatch) extends Iterator[ColumnarBatch] {
var cb: ColumnarBatch = null
private def closeCurrentBatch(): Unit = {
if (cb != null) {
cb.close
cb = null
}
}
TaskContext.get().addTaskCompletionListener[Unit]((tc: TaskContext) => {
closeCurrentBatch()
})
override def hasNext: Boolean = {
closeCurrentBatch()
itr.hasNext
}
override def next(): ColumnarBatch = {
closeCurrentBatch()
cb = f(itr.next())
cb
}
}
object NoCloseColumnVector extends Logging {
def wrapIfNeeded(cv: ColumnVector): NoCloseColumnVector = cv match {
case ref: NoCloseColumnVector =>
ref
case vec => NoCloseColumnVector(vec)
}
}
/**
* Provide a ColumnVector so ColumnarExpression can close temporary values without
* having to guess what type it really is.
*/
case class NoCloseColumnVector(wrapped: ColumnVector) extends ColumnVector(wrapped.dataType) {
private var refCount = 1
/**
* Don't actually close the ColumnVector this wraps. The producer of the vector will take
* care of that.
*/
override def close(): Unit = {
// Empty
}
override def hasNull: Boolean = wrapped.hasNull
override def numNulls(): Int = wrapped.numNulls
override def isNullAt(rowId: Int): Boolean = wrapped.isNullAt(rowId)
override def getBoolean(rowId: Int): Boolean = wrapped.getBoolean(rowId)
override def getByte(rowId: Int): Byte = wrapped.getByte(rowId)
override def getShort(rowId: Int): Short = wrapped.getShort(rowId)
override def getInt(rowId: Int): Int = wrapped.getInt(rowId)
override def getLong(rowId: Int): Long = wrapped.getLong(rowId)
override def getFloat(rowId: Int): Float = wrapped.getFloat(rowId)
override def getDouble(rowId: Int): Double = wrapped.getDouble(rowId)
override def getArray(rowId: Int): ColumnarArray = wrapped.getArray(rowId)
override def getMap(ordinal: Int): ColumnarMap = wrapped.getMap(ordinal)
override def getDecimal(rowId: Int, precision: Int, scale: Int): Decimal =
wrapped.getDecimal(rowId, precision, scale)
override def getUTF8String(rowId: Int): UTF8String = wrapped.getUTF8String(rowId)
override def getBinary(rowId: Int): Array[Byte] = wrapped.getBinary(rowId)
override protected def getChild(ordinal: Int): ColumnVector = wrapped.getChild(ordinal)
}
trait ColumnarExpression extends Expression with Serializable {
/**
* Returns true if this expression supports columnar processing through [[columnarEval]].
*/
def supportsColumnar: Boolean = true
/**
* Returns the result of evaluating this expression on the entire
* [[org.apache.spark.sql.vectorized.ColumnarBatch]]. The result of
* calling this may be a single [[org.apache.spark.sql.vectorized.ColumnVector]] or a scalar
* value. Scalar values typically happen if they are a part of the expression i.e. col("a") + 100.
* In this case the 100 is a [[org.apache.spark.sql.catalyst.expressions.Literal]] that
* [[org.apache.spark.sql.catalyst.expressions.Add]] would have to be able to handle.
*
* By convention any [[org.apache.spark.sql.vectorized.ColumnVector]] returned by [[columnarEval]]
* is owned by the caller and will need to be closed by them. This can happen by putting it into
* a [[org.apache.spark.sql.vectorized.ColumnarBatch]] and closing the batch or by closing the
* vector directly if it is a temporary value.
*/
def columnarEval(batch: ColumnarBatch): Any = {
throw new IllegalStateException(s"Internal Error ${this.getClass} has column support mismatch")
}
// We need to override equals because we are subclassing a case class
override def equals(other: Any): Boolean = {
if (!super.equals(other)) {
return false
}
return other.isInstanceOf[ColumnarExpression]
}
override def hashCode(): Int = super.hashCode()
}
object ColumnarBindReferences extends Logging {
// Mostly copied from BoundAttribute.scala so we can do columnar processing
def bindReference[A <: ColumnarExpression](
expression: A,
input: AttributeSeq,
allowFailures: Boolean = false): A = {
expression.transform { case a: AttributeReference =>
val ordinal = input.indexOf(a.exprId)
if (ordinal == -1) {
if (allowFailures) {
a
} else {
sys.error(s"Couldn't find $a in ${input.attrs.mkString("[", ",", "]")}")
}
} else {
new ColumnarBoundReference(ordinal, a.dataType, input(ordinal).nullable)
}
}.asInstanceOf[A]
}
/**
* A helper function to bind given expressions to an input schema.
*/
def bindReferences[A <: ColumnarExpression](
expressions: Seq[A],
input: AttributeSeq): Seq[A] = {
expressions.map(ColumnarBindReferences.bindReference(_, input))
}
}
class ColumnarBoundReference(ordinal: Int, dataType: DataType, nullable: Boolean)
extends BoundReference(ordinal, dataType, nullable) with ColumnarExpression {
override def columnarEval(batch: ColumnarBatch): Any = {
// Because of the convention that the returned ColumnVector must be closed by the
// caller we wrap this column vector so a close is a NOOP, and let the original source
// of the vector close it.
NoCloseColumnVector.wrapIfNeeded(batch.column(ordinal))
}
}
class ColumnarAlias(child: ColumnarExpression, name: String)(
override val exprId: ExprId = NamedExpression.newExprId,
override val qualifier: Seq[String] = Seq.empty,
override val explicitMetadata: Option[Metadata] = None,
override val nonInheritableMetadataKeys: Seq[String] = Seq.empty)
extends Alias(child, name)(exprId, qualifier, explicitMetadata, nonInheritableMetadataKeys)
with ColumnarExpression {
override def columnarEval(batch: ColumnarBatch): Any = child.columnarEval(batch)
override protected def withNewChildInternal(newChild: Expression): ColumnarAlias =
new ColumnarAlias(newChild.asInstanceOf[ColumnarExpression], name)(exprId, qualifier,
explicitMetadata, nonInheritableMetadataKeys)
}
class ColumnarAttributeReference(
name: String,
dataType: DataType,
nullable: Boolean = true,
override val metadata: Metadata = Metadata.empty)(
override val exprId: ExprId = NamedExpression.newExprId,
override val qualifier: Seq[String] = Seq.empty[String])
extends AttributeReference(name, dataType, nullable, metadata)(exprId, qualifier)
with ColumnarExpression {
// No columnar eval is needed because this must be bound before it is evaluated
}
class ColumnarLiteral (value: Any, dataType: DataType) extends Literal(value, dataType)
with ColumnarExpression {
override def columnarEval(batch: ColumnarBatch): Any = value
}
/**
* A version of ProjectExec that adds in columnar support.
*/
class ColumnarProjectExec(projectList: Seq[NamedExpression], child: SparkPlan)
extends ProjectExec(projectList, child) {
override def supportsColumnar: Boolean =
projectList.forall(_.asInstanceOf[ColumnarExpression].supportsColumnar)
// Disable code generation
override def supportCodegen: Boolean = false
override def doExecuteColumnar() : RDD[ColumnarBatch] = {
val boundProjectList: Seq[Any] =
ColumnarBindReferences.bindReferences(
projectList.asInstanceOf[Seq[ColumnarExpression]], child.output)
val rdd = child.executeColumnar()
rdd.mapPartitions((itr) => CloseableColumnBatchIterator(itr,
(cb) => {
val newColumns = boundProjectList.map(
expr => expr.asInstanceOf[ColumnarExpression].columnarEval(cb).asInstanceOf[ColumnVector]
).toArray
new ColumnarBatch(newColumns, cb.numRows())
})
)
}
// We have to override equals because subclassing a case class like ProjectExec is not that clean
// One of the issues is that the generated equals will see ColumnarProjectExec and ProjectExec
// as being equal and this can result in the withNewChildren method not actually replacing
// anything
override def equals(other: Any): Boolean = {
if (!super.equals(other)) {
return false
}
return other.isInstanceOf[ColumnarProjectExec]
}
override def hashCode(): Int = super.hashCode()
override def withNewChildInternal(newChild: SparkPlan): ColumnarProjectExec =
new ColumnarProjectExec(projectList, newChild)
}
/**
* A version of add that supports columnar processing for longs. This version is broken
* on purpose so it adds the numbers plus 1 so that the tests can show that it was replaced.
*/
class BrokenColumnarAdd(
left: ColumnarExpression,
right: ColumnarExpression,
failOnError: Boolean = false)
extends Add(left, right, failOnError) with ColumnarExpression {
override def supportsColumnar(): Boolean = left.supportsColumnar && right.supportsColumnar
override def columnarEval(batch: ColumnarBatch): Any = {
var lhs: Any = null
var rhs: Any = null
var ret: Any = null
try {
lhs = left.columnarEval(batch)
rhs = right.columnarEval(batch)
if (lhs == null || rhs == null) {
ret = null
} else if (lhs.isInstanceOf[ColumnVector] && rhs.isInstanceOf[ColumnVector]) {
val l = lhs.asInstanceOf[ColumnVector]
val r = rhs.asInstanceOf[ColumnVector]
val result = new OnHeapColumnVector(batch.numRows(), dataType)
ret = result
for (i <- 0 until batch.numRows()) {
result.appendLong(l.getLong(i) + r.getLong(i) + 1) // BUG to show we replaced Add
}
} else if (rhs.isInstanceOf[ColumnVector]) {
val l = lhs.asInstanceOf[Long]
val r = rhs.asInstanceOf[ColumnVector]
val result = new OnHeapColumnVector(batch.numRows(), dataType)
ret = result
for (i <- 0 until batch.numRows()) {
result.appendLong(l + r.getLong(i) + 1) // BUG to show we replaced Add
}
} else if (lhs.isInstanceOf[ColumnVector]) {
val l = lhs.asInstanceOf[ColumnVector]
val r = rhs.asInstanceOf[Long]
val result = new OnHeapColumnVector(batch.numRows(), dataType)
ret = result
for (i <- 0 until batch.numRows()) {
result.appendLong(l.getLong(i) + r + 1) // BUG to show we replaced Add
}
} else {
ret = nullSafeEval(lhs, rhs)
}
} finally {
if (lhs != null && lhs.isInstanceOf[ColumnVector]) {
lhs.asInstanceOf[ColumnVector].close()
}
if (rhs != null && rhs.isInstanceOf[ColumnVector]) {
rhs.asInstanceOf[ColumnVector].close()
}
}
ret
}
override def withNewChildrenInternal(
newLeft: Expression, newRight: Expression): BrokenColumnarAdd =
new BrokenColumnarAdd(
left = newLeft.asInstanceOf[ColumnarExpression],
right = newRight.asInstanceOf[ColumnarExpression], failOnError)
}
class CannotReplaceException(str: String) extends RuntimeException(str) {
}
case class PreRuleReplaceAddWithBrokenVersion() extends Rule[SparkPlan] {
def replaceWithColumnarExpression(exp: Expression): ColumnarExpression = exp match {
case a: Alias =>
new ColumnarAlias(replaceWithColumnarExpression(a.child),
a.name)(a.exprId, a.qualifier, a.explicitMetadata, a.nonInheritableMetadataKeys)
case att: AttributeReference =>
new ColumnarAttributeReference(att.name, att.dataType, att.nullable,
att.metadata)(att.exprId, att.qualifier)
case lit: Literal =>
new ColumnarLiteral(lit.value, lit.dataType)
case add: Add if (add.dataType == LongType) &&
(add.left.dataType == LongType) &&
(add.right.dataType == LongType) =>
// Add only supports Longs for now.
new BrokenColumnarAdd(replaceWithColumnarExpression(add.left),
replaceWithColumnarExpression(add.right))
case exp =>
throw new CannotReplaceException(s"expression " +
s"${exp.getClass} ${exp} is not currently supported.")
}
def replaceWithColumnarPlan(plan: SparkPlan): SparkPlan =
try {
plan match {
case e: ShuffleExchangeExec =>
// note that this is not actually columnar but demonstrates that exchanges can
// be replaced.
val replaced = e.withNewChildren(e.children.map(replaceWithColumnarPlan))
MyShuffleExchangeExec(replaced.asInstanceOf[ShuffleExchangeExec])
case e: BroadcastExchangeExec =>
// note that this is not actually columnar but demonstrates that exchanges can
// be replaced.
val replaced = e.withNewChildren(e.children.map(replaceWithColumnarPlan))
MyBroadcastExchangeExec(replaced.asInstanceOf[BroadcastExchangeExec])
case plan: ProjectExec =>
new ColumnarProjectExec(plan.projectList.map((exp) =>
replaceWithColumnarExpression(exp).asInstanceOf[NamedExpression]),
replaceWithColumnarPlan(plan.child))
case p =>
logWarning(s"Columnar processing for ${p.getClass} is not currently supported.")
p.withNewChildren(p.children.map(replaceWithColumnarPlan))
}
} catch {
case exp: CannotReplaceException =>
logWarning(s"Columnar processing for ${plan.getClass} is not currently supported" +
s"because ${exp.getMessage}")
plan
}
override def apply(plan: SparkPlan): SparkPlan = replaceWithColumnarPlan(plan)
}
/**
* Custom Exchange used in tests to demonstrate that shuffles can be replaced regardless of
* whether AQE is enabled.
*/
case class MyShuffleExchangeExec(delegate: ShuffleExchangeExec) extends ShuffleExchangeLike {
override def numMappers: Int = delegate.numMappers
override def numPartitions: Int = delegate.numPartitions
override def shuffleOrigin: ShuffleOrigin = {
delegate.shuffleOrigin
}
override def mapOutputStatisticsFuture: Future[MapOutputStatistics] =
delegate.mapOutputStatisticsFuture
override def getShuffleRDD(partitionSpecs: Array[ShufflePartitionSpec]): RDD[_] =
delegate.getShuffleRDD(partitionSpecs)
override def runtimeStatistics: Statistics = delegate.runtimeStatistics
override def child: SparkPlan = delegate.child
override protected def doExecute(): RDD[InternalRow] = delegate.execute()
override def outputPartitioning: Partitioning = delegate.outputPartitioning
override protected def withNewChildInternal(newChild: SparkPlan): SparkPlan =
super.legacyWithNewChildren(Seq(newChild))
}
/**
* Custom Exchange used in tests to demonstrate that broadcasts can be replaced regardless of
* whether AQE is enabled.
*/
case class MyBroadcastExchangeExec(delegate: BroadcastExchangeExec) extends BroadcastExchangeLike {
override def runId: UUID = delegate.runId
override def relationFuture: java.util.concurrent.Future[Broadcast[Any]] =
delegate.relationFuture
override def completionFuture: Future[Broadcast[Any]] = delegate.completionFuture
override def runtimeStatistics: Statistics = delegate.runtimeStatistics
override def child: SparkPlan = delegate.child
override protected def doPrepare(): Unit = delegate.prepare()
override protected def doExecute(): RDD[InternalRow] = delegate.execute()
override def doExecuteBroadcast[T](): Broadcast[T] = delegate.executeBroadcast()
override def outputPartitioning: Partitioning = delegate.outputPartitioning
override protected def withNewChildInternal(newChild: SparkPlan): SparkPlan =
super.legacyWithNewChildren(Seq(newChild))
}
class ReplacedRowToColumnarExec(override val child: SparkPlan)
extends RowToColumnarExec(child) {
// We have to override equals because subclassing a case class like ProjectExec is not that clean
// One of the issues is that the generated equals will see ColumnarProjectExec and ProjectExec
// as being equal and this can result in the withNewChildren method not actually replacing
// anything
override def equals(other: Any): Boolean = {
if (!super.equals(other)) {
return false
}
return other.isInstanceOf[ReplacedRowToColumnarExec]
}
override def hashCode(): Int = super.hashCode()
override def withNewChildInternal(newChild: SparkPlan): ReplacedRowToColumnarExec =
new ReplacedRowToColumnarExec(newChild)
}
case class MyPostRule() extends Rule[SparkPlan] {
override def apply(plan: SparkPlan): SparkPlan = plan match {
case rc: RowToColumnarExec => new ReplacedRowToColumnarExec(rc.child)
case plan => plan.withNewChildren(plan.children.map(apply))
}
}
case class MyColumnarRule(pre: Rule[SparkPlan], post: Rule[SparkPlan]) extends ColumnarRule {
override def preColumnarTransitions: Rule[SparkPlan] = pre
override def postColumnarTransitions: Rule[SparkPlan] = post
}
class MyExtensions extends (SparkSessionExtensions => Unit) {
def apply(e: SparkSessionExtensions): Unit = {
e.injectPlannerStrategy(MySparkStrategy)
e.injectResolutionRule(MyRule)
e.injectPostHocResolutionRule(MyRule)
e.injectCheckRule(MyCheckRule)
e.injectOptimizerRule(MyRule)
e.injectParser(MyParser)
e.injectFunction(MyExtensions.myFunction)
e.injectColumnar(session => MyColumnarRule(PreRuleReplaceAddWithBrokenVersion(), MyPostRule()))
}
}
object QueryPrepRuleHelper {
val myPrepTag: TreeNodeTag[String] = TreeNodeTag[String]("myPrepTag")
val myPrepTagValue: String = "myPrepTagValue"
}
// this rule will run during AQE query preparation and will write custom tags to each node
case class MyQueryStagePrepRule() extends Rule[SparkPlan] {
override def apply(plan: SparkPlan): SparkPlan = plan.transformDown {
case plan =>
plan.setTagValue(QueryPrepRuleHelper.myPrepTag, QueryPrepRuleHelper.myPrepTagValue)
plan
}
}
// this rule will run during AQE query stage optimization and will verify custom tags were
// already written during query preparation phase
case class MyNewQueryStageRule() extends Rule[SparkPlan] {
override def apply(plan: SparkPlan): SparkPlan = plan.transformDown {
case plan if !plan.isInstanceOf[AdaptiveSparkPlanExec] =>
assert(plan.getTagValue(QueryPrepRuleHelper.myPrepTag).get ==
QueryPrepRuleHelper.myPrepTagValue)
plan
}
}
case class MyRule2(spark: SparkSession) extends Rule[LogicalPlan] {
override def apply(plan: LogicalPlan): LogicalPlan = plan
}
case class MyCheckRule2(spark: SparkSession) extends (LogicalPlan => Unit) {
override def apply(plan: LogicalPlan): Unit = { }
}
case class MySparkStrategy2(spark: SparkSession) extends SparkStrategy {
override def apply(plan: LogicalPlan): Seq[SparkPlan] = Seq.empty
}
object MyExtensions2 {
val myFunction = (FunctionIdentifier("myFunction2"),
new ExpressionInfo(
"noClass",
"myDb",
"myFunction2",
"usage",
"extended usage",
" Examples:",
"""
note
""",
"",
"3.0.0",
"""
deprecated
""",
""),
(_: Seq[Expression]) => Literal(5, IntegerType))
}
class MyExtensions2 extends (SparkSessionExtensions => Unit) {
def apply(e: SparkSessionExtensions): Unit = {
e.injectPlannerStrategy(MySparkStrategy2)
e.injectResolutionRule(MyRule2)
e.injectPostHocResolutionRule(MyRule2)
e.injectCheckRule(MyCheckRule2)
e.injectOptimizerRule(MyRule2)
e.injectParser((_: SparkSession, _: ParserInterface) => CatalystSqlParser)
e.injectFunction(MyExtensions2.myFunction)
}
}
object MyExtensions2Duplicate {
val myFunction = (FunctionIdentifier("myFunction2"),
new ExpressionInfo(
"noClass",
"myDb",
"myFunction2",
"usage",
"extended usage",
" Examples:",
"""
note
""",
"",
"3.0.0",
"""
deprecated
""",
""),
(_: Seq[Expression]) => Literal(5, IntegerType))
}
class MyExtensions2Duplicate extends (SparkSessionExtensions => Unit) {
def apply(e: SparkSessionExtensions): Unit = {
e.injectFunction(MyExtensions2Duplicate.myFunction)
}
}
class YourExtensions extends SparkSessionExtensionsProvider {
val getAppName = (FunctionIdentifier("get_fake_app_name"),
new ExpressionInfo(
"zzz.zzz.zzz",
"",
"get_fake_app_name"),
(_: Seq[Expression]) => Literal("Fake App Name"))
override def apply(v1: SparkSessionExtensions): Unit = {
v1.injectFunction(getAppName)
}
}
| maropu/spark | sql/core/src/test/scala/org/apache/spark/sql/SparkSessionExtensionSuite.scala | Scala | apache-2.0 | 38,125 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.amaterasu.common.execution.actions
import org.apache.amaterasu.common.execution.actions.NotificationLevel.NotificationLevel
import org.apache.amaterasu.common.execution.actions.NotificationType.NotificationType
/**
* Created by roadan on 8/20/16.
*/
abstract class Notifier {
def info(msg: String)
def success(line: String)
def error(line: String, msg: String)
}
object NotificationType extends Enumeration {
type NotificationType = Value
val success = Value("success")
val error = Value("error")
val info = Value("info")
}
object NotificationLevel extends Enumeration {
type NotificationLevel = Value
val execution = Value("execution")
val code = Value("code")
val none = Value("none")
}
case class Notification(line: String,
msg: String,
notType: NotificationType,
notLevel: NotificationLevel)
| shintoio/amaterasu | common/src/main/scala/org/apache/ameterasu/common/execution/actions/Notifier.scala | Scala | apache-2.0 | 1,728 |
package scala.meta.tests
package contrib
import munit.FunSuite
import scala.meta._
import scala.meta.contrib._
class SetExtensionsTest extends FunSuite {
val typeFoo = t"Foo"
val termFoo = q"Foo"
val set = Set(typeFoo, termFoo, q"Foo")
test("Reference equality holds normally") {
assert(set.size == 3)
assert(set.contains(typeFoo))
assert(set.contains(termFoo))
assert(!set.contains(q"Foo"))
}
test("Structurally") {
val structuralSet = set.structurally
assert(structuralSet.size == 2)
assert(structuralSet.contains(typeFoo))
assert(structuralSet.contains(termFoo))
assert(structuralSet.contains(q"Foo"))
}
test("Syntactically") {
val syntacticSet = set.syntactically
assert(syntacticSet.size == 1)
assert(syntacticSet.contains(typeFoo))
assert(syntacticSet.contains(termFoo))
assert(syntacticSet.contains(q"Foo"))
}
}
| scalameta/scalameta | tests/shared/src/test/scala/scala/meta/tests/contrib/SetExtensionsTest.scala | Scala | bsd-3-clause | 899 |
package metrics
import akka.actor.Actor
import com.amazonaws.services.cloudwatch.AmazonCloudWatchClient
import com.amazonaws.services.cloudwatch.model.{MetricDatum, PutMetricDataRequest, StandardUnit, StatisticSet}
import org.slf4j.{Logger, LoggerFactory}
import scala.jdk.CollectionConverters._
import play.api.{Environment, Mode}
import com.gu.{AppIdentity, AwsIdentity}
trait MetricActorLogic {
private val logger: Logger = LoggerFactory.getLogger(this.getClass)
def cloudWatchClient: AmazonCloudWatchClient
def stage: String
def appName: String
private def aggregatePointsPerMetric(metricDataPoints: List[MetricDataPoint], metricName: String): MetricDatum = {
val (sum, min, max) = metricDataPoints.foldLeft((0d, Double.MaxValue, Double.MinValue)) { case ((aggSum, aggMin, aggMax), dataPoint) =>
(aggSum + dataPoint.value, aggMin.min(dataPoint.value), aggMax.max(dataPoint.value))
}
val stats = new StatisticSet
stats.setMaximum(max)
stats.setMinimum(min)
stats.setSum(sum)
stats.setSampleCount(metricDataPoints.size.toDouble)
val unit = metricDataPoints.headOption.map(_.unit).getOrElse(StandardUnit.None)
val metric = new MetricDatum()
metric.setMetricName(metricName)
metric.setUnit(unit)
metric.setStatisticValues(stats)
metric
}
private def aggregatePointsPerNamespaceBatches(points: List[MetricDataPoint]): List[(String, List[MetricDatum])] = {
val pointsPerMetric = points.groupBy { point => (point.namespace, point.name) }.toList
val allAwsMetrics = pointsPerMetric.map { case ((namespace, metricName), metricPoints) =>
namespace -> aggregatePointsPerMetric(metricPoints, metricName)
}
val metricsPerNamespace = allAwsMetrics.foldLeft(Map.empty[String, List[MetricDatum]]) {
case (agg, (namespace, awsPoint)) =>
val points = agg.getOrElse(namespace, Nil)
agg + (namespace -> (awsPoint :: points))
}
metricsPerNamespace.toList.flatMap { case (namespace, awsMetrics) =>
val awsMetricsBatches = awsMetrics.grouped(20)
awsMetricsBatches.map { batch =>
namespace -> batch
}
}
}
def aggregatePoints(points: List[MetricDataPoint]): Unit = {
if (points.isEmpty) {
logger.debug(s"No metric sent to cloudwatch.")
} else {
val metricsPerNamespaceBatches = aggregatePointsPerNamespaceBatches(points)
val metricsCount = metricsPerNamespaceBatches.foldLeft(0) { case (sum, (_, batch)) => sum + batch.size }
val batchesCount = metricsPerNamespaceBatches.size
val namespacesCount = metricsPerNamespaceBatches.map(_._1).toSet.size
try {
metricsPerNamespaceBatches.foreach { case (namespace, awsMetricBatch) =>
val metricRequest = new PutMetricDataRequest()
metricRequest.setNamespace(s"$namespace/$stage/$appName")
metricRequest.setMetricData(awsMetricBatch.asJava)
cloudWatchClient.putMetricData(metricRequest)
}
logger.info("Sent metrics to cloudwatch. " +
s"Data points: ${points.size}, " +
s"Metrics: $metricsCount, " +
s"Namespaces: $namespacesCount, " +
s"Batches: $batchesCount")
} catch {
case e: Exception => logger.error(s"Unable to send metrics to cloudwatch", e)
}
}
}
}
class MetricActor(val cloudWatchClient: AmazonCloudWatchClient, val identity: AppIdentity, val env: Environment) extends Actor with MetricActorLogic {
var dataPoints = List.empty[MetricDataPoint]
override def stage: String = identity match {
case AwsIdentity(_, _, stage, _) => stage
case _ => "DEV"
}
override def appName: String = identity match {
case AwsIdentity(app, _, _, _) => app
case _ => "DEV"
}
override def receive: Receive = {
case metricDataPoint: MetricDataPoint if env.mode != Mode.Test =>
dataPoints = metricDataPoint :: dataPoints
case MetricActor.Aggregate =>
aggregatePoints(dataPoints)
dataPoints = Nil
}
}
object MetricActor {
case object Aggregate
}
| guardian/mobile-n10n | common/src/main/scala/metrics/MetricActor.scala | Scala | apache-2.0 | 4,062 |
package org.jetbrains.sbt
package project.template
import java.awt.FlowLayout
import java.io.File
import javax.swing.border.EmptyBorder
import javax.swing._
import com.intellij.ide.util.projectWizard.{ModuleBuilder, ModuleWizardStep, SdkSettingsStep, SettingsStep}
import com.intellij.openapi.application.ApplicationManager
import com.intellij.openapi.externalSystem.importing.ImportSpecBuilder
import com.intellij.openapi.externalSystem.service.execution.ProgressExecutionMode
import com.intellij.openapi.externalSystem.service.project.wizard.AbstractExternalModuleBuilder
import com.intellij.openapi.externalSystem.settings.{AbstractExternalSystemSettings, ExternalSystemSettingsListener}
import com.intellij.openapi.externalSystem.util.{ExternalSystemApiUtil, ExternalSystemUtil}
import com.intellij.openapi.fileEditor.FileDocumentManager
import com.intellij.openapi.module.{JavaModuleType, ModifiableModuleModel, Module, ModuleType}
import com.intellij.openapi.options.ConfigurationException
import com.intellij.openapi.projectRoots.{JavaSdk, JavaSdkVersion, SdkTypeId}
import com.intellij.openapi.roots.ModifiableRootModel
import com.intellij.openapi.util.Condition
import com.intellij.openapi.util.io.FileUtil._
import com.intellij.openapi.util.text.StringUtil
import com.intellij.openapi.vfs.LocalFileSystem
import org.jetbrains.plugins.scala.extensions.JComponentExt.ActionListenersOwner
import org.jetbrains.plugins.scala.extensions._
import org.jetbrains.plugins.scala.project.Platform.{Dotty, Scala}
import org.jetbrains.plugins.scala.project.{Platform, Version, Versions}
import org.jetbrains.sbt.project.SbtProjectSystem
import org.jetbrains.sbt.project.settings.SbtProjectSettings
import scala.collection.mutable
/**
* User: Dmitry Naydanov, Pavel Fatin
* Date: 11/23/13
*/
class SbtModuleBuilder extends AbstractExternalModuleBuilder[SbtProjectSettings](SbtProjectSystem.Id, new SbtProjectSettings) {
private class Selections(var sbtVersion: String = null,
var scalaPlatform: Platform = Platform.Default,
var scalaVersion: String = null,
var resolveClassifiers: Boolean = true,
var resolveSbtClassifiers: Boolean = false)
private val selections = new Selections()
private lazy val sbtVersions: Array[String] = withProgressSynchronously("Fetching sbt versions") { _ =>
Versions.loadSbtVersions
}
private val scalaVersions: mutable.Map[Platform, Array[String]] = mutable.Map.empty
private def loadedScalaVersions(platform: Platform) = scalaVersions.getOrElseUpdate(platform, {
withProgressSynchronously(s"Fetching ${platform.name} versions") { _ =>
Versions.loadScalaVersions(platform)
}
})
getExternalProjectSettings.setResolveJavadocs(false)
getExternalProjectSettings.setUseAutoImport(false)
getExternalProjectSettings.setCreateEmptyContentRootDirectories(false)
def getModuleType: ModuleType[_ <: ModuleBuilder] = JavaModuleType.getModuleType
override def createModule(moduleModel: ModifiableModuleModel): Module = {
val root = getModuleFileDirectory.toFile
if (root.exists) {
import selections._
getExternalProjectSettings.setResolveClassifiers(resolveClassifiers)
getExternalProjectSettings.setResolveSbtClassifiers(resolveSbtClassifiers)
createProjectTemplateIn(root, getName, scalaPlatform, scalaVersion, sbtVersion)
updateModulePath()
}
super.createModule(moduleModel)
}
// TODO customize the path in UI when IDEA-122951 will be implemented
private def updateModulePath() {
val file = getModuleFilePath.toFile
val path = file.getParent + "/" + Sbt.ModulesDirectory + "/" + file.getName.toLowerCase
setModuleFilePath(path)
}
override def modifySettingsStep(settingsStep: SettingsStep): ModuleWizardStep = {
setupDefaultVersions()
val sbtVersionComboBox = applyTo(new SComboBox())(
_.setItems(sbtVersions),
_.setSelectedItem(selections.sbtVersion)
)
val scalaPlatformComboBox = applyTo(new SComboBox()) {
_.setItems(Platform.Values)
}
val scalaVersionComboBox = applyTo(new SComboBox())(
setupScalaVersionItems
)
val step = sdkSettingsStep(settingsStep)
val resolveClassifiersCheckBox: JCheckBox = applyTo(new JCheckBox(SbtBundle("sbt.settings.sources")))(
_.setToolTipText("Download Scala standard library sources (useful for editing the source code)"),
_.setSelected(selections.resolveClassifiers)
)
val resolveSbtClassifiersCheckBox = applyTo(new JCheckBox(SbtBundle("sbt.settings.sources")))(
_.setToolTipText("Download sbt sources (useful for editing the project definition)"),
_.setSelected(selections.resolveSbtClassifiers)
)
sbtVersionComboBox.addActionListenerEx {
selections.sbtVersion = sbtVersionComboBox.getSelectedItem.asInstanceOf[String]
}
scalaPlatformComboBox.addActionListenerEx {
selections.scalaPlatform = scalaPlatformComboBox.getSelectedItem.asInstanceOf[Platform]
setupScalaVersionItems(scalaVersionComboBox)
}
scalaVersionComboBox.addActionListenerEx {
selections.scalaVersion = scalaVersionComboBox.getSelectedItem.asInstanceOf[String]
}
resolveClassifiersCheckBox.addActionListenerEx {
selections.resolveClassifiers = resolveClassifiersCheckBox.isSelected
}
resolveSbtClassifiersCheckBox.addActionListenerEx {
selections.resolveSbtClassifiers = resolveSbtClassifiersCheckBox.isSelected
}
val sbtVersionPanel = applyTo(new JPanel(new FlowLayout(FlowLayout.LEFT, 0, 0)))(
_.add(sbtVersionComboBox),
_.add(resolveSbtClassifiersCheckBox)
)
val scalaVersionPanel = applyTo(new JPanel(new FlowLayout(FlowLayout.LEFT, 0, 0)))(
_.setBorder(new EmptyBorder(1, 0, 0, 0)),
_.add(scalaPlatformComboBox),
_.add(Box.createHorizontalStrut(4)),
_.add(scalaVersionComboBox),
_.add(resolveClassifiersCheckBox)
)
settingsStep.addSettingsField(SbtBundle("sbt.settings.sbt"), sbtVersionPanel)
settingsStep.addSettingsField(SbtBundle("sbt.settings.scala"), scalaVersionPanel)
// TODO Remove the label patching when the External System will use the concise and proper labels natively
Option(sbtVersionPanel.getParent).foreach { parent =>
parent.getComponents.toSeq.foreachDefined {
case label: JLabel if label.getText == "Project SDK:" =>
label.setText("JDK:")
label.setDisplayedMnemonic('J')
case label: JLabel if label.getText.startsWith("Project ") && label.getText.length > 8 =>
label.setText(label.getText.substring(8) |> (s => s.substring(0, 1).toUpperCase + s.substring(1)))
}
}
step
}
private def createProjectTemplateIn(root: File, name: String, platform: Platform, scalaVersion: String, sbtVersion: String) {
val buildFile = root / Sbt.BuildFile
val projectDir = root / Sbt.ProjectDirectory
val propertiesFile = projectDir / Sbt.PropertiesFile
val pluginsFile = projectDir / Sbt.PluginsFile
if (!buildFile.createNewFile() ||
!projectDir.mkdir()) return
(root / "src" / "main" / "scala").mkdirs()
(root / "src" / "test" / "scala").mkdirs()
writeToFile(buildFile, SbtModuleBuilder.formatProjectDefinition(name, platform, scalaVersion))
writeToFile(propertiesFile, SbtModuleBuilder.formatSbtProperties(sbtVersion))
SbtModuleBuilder.formatSbtPlugins(platform) match {
case "" =>
case content =>
writeToFile(pluginsFile, content)
}
}
private def sdkSettingsStep(settingsStep: SettingsStep): SdkSettingsStep = {
val filter = new Condition[SdkTypeId] {
def value(t: SdkTypeId): Boolean = t != null && t.isInstanceOf[JavaSdk]
}
new SdkSettingsStep(settingsStep, this, filter) {
override def updateDataModel() {
settingsStep.getContext setProjectJdk myJdkComboBox.getSelectedJdk
}
override def validate(): Boolean = {
if (!super.validate()) return false
val selectedSdk = myJdkComboBox.getSelectedJdk
def isJava8 = JavaSdk.getInstance().getVersion(selectedSdk).isAtLeast(JavaSdkVersion.JDK_1_8)
val scalaVersion = selections.scalaVersion
if (scalaVersion == null || selectedSdk == null) true
else {
val selectedVersion = Version(scalaVersion)
val needJdk8 = selectedVersion >= Version("2.12") && !isJava8
if (needJdk8) {
throw new ConfigurationException("Scala 2.12 requires JDK 1.8", "Wrong JDK version")
}
else true
}
}
}
}
private def setupScalaVersionItems(cbx: SComboBox): Unit = {
val platform = selections.scalaPlatform
val loadedVersions = loadedScalaVersions(platform)
cbx.setItems(loadedVersions)
if (loadedVersions.contains(selections.scalaVersion)) {
cbx.setSelectedItem(selections.scalaVersion)
} else {
cbx.setSelectedIndex(0)
}
}
private def setupDefaultVersions(): Unit = {
if (selections.sbtVersion == null) {
selections.sbtVersion = sbtVersions.headOption.getOrElse(Versions.DefaultSbtVersion)
}
if (selections.scalaVersion == null) {
selections.scalaVersion = loadedScalaVersions(selections.scalaPlatform).headOption.getOrElse {
selections.scalaPlatform match {
case Dotty => Versions.DefaultDottyVersion
case Scala => Versions.DefaultScalaVersion
}
}
}
}
override def getNodeIcon: Icon = Sbt.Icon
override def setupRootModel(model: ModifiableRootModel) {
val contentPath = getContentEntryPath
if (StringUtil.isEmpty(contentPath)) return
val contentRootDir = contentPath.toFile
createDirectory(contentRootDir)
val fileSystem = LocalFileSystem.getInstance
val vContentRootDir = fileSystem.refreshAndFindFileByIoFile(contentRootDir)
if (vContentRootDir == null) return
model.addContentEntry(vContentRootDir)
model.inheritSdk()
val settings =
ExternalSystemApiUtil.getSettings(model.getProject, SbtProjectSystem.Id).
asInstanceOf[AbstractExternalSystemSettings[_ <: AbstractExternalSystemSettings[_, SbtProjectSettings, _],
SbtProjectSettings, _ <: ExternalSystemSettingsListener[SbtProjectSettings]]]
val externalProjectSettings = getExternalProjectSettings
externalProjectSettings.setExternalProjectPath(getContentEntryPath)
settings.linkProject(externalProjectSettings)
if (!externalProjectSettings.isUseAutoImport) {
FileDocumentManager.getInstance.saveAllDocuments()
ApplicationManager.getApplication.invokeLater(() => ExternalSystemUtil.refreshProjects(
new ImportSpecBuilder(model.getProject, SbtProjectSystem.Id)
.forceWhenUptodate()
.use(ProgressExecutionMode.IN_BACKGROUND_ASYNC)
))
}
}
}
private object SbtModuleBuilder {
def formatProjectDefinition(name: String, platform: Platform, scalaVersion: String): String = platform match {
case Scala =>
s"""name := "$name"
|
|version := "0.1"
|
|scalaVersion := "$scalaVersion"
""".stripMargin.trim
case Dotty =>
s"""
|name := "dotty-example-project"
|description := "Example sbt project that compiles using Dotty"
|version := "0.1"
|
|scalaVersion := "$scalaVersion"
""".stripMargin.trim
}
def formatSbtPlugins(platform: Platform): String = platform match {
case Dotty => """addSbtPlugin("ch.epfl.lamp" % "sbt-dotty" % "0.1.4")"""
case Scala => s""
}
def formatSbtProperties(sbtVersion: String) = s"sbt.version = $sbtVersion"
}
| triplequote/intellij-scala | scala/scala-impl/src/org/jetbrains/sbt/project/template/SbtModuleBuilder.scala | Scala | apache-2.0 | 11,784 |
package renesca.graph
import renesca.parameter.{PropertyKey, PropertyValue}
sealed trait GraphChange {
def isValid: Boolean
}
sealed trait GraphItemChange extends GraphChange {
val item: Item
}
sealed trait GraphPathChange extends GraphChange {
val path: Path
def isValid = (path.relations ++ path.nodes).forall(_.origin.kind == path.origin.kind)
}
sealed trait GraphContentChange extends GraphItemChange {
require(isValid, "GraphContentChanges can only be applied to non-local items")
def isValid = !item.origin.isLocal
}
case class SetProperty(item: Item, key: PropertyKey, value: PropertyValue) extends GraphContentChange
case class RemoveProperty(item: Item, key: PropertyKey) extends GraphContentChange
case class SetLabel(item: Node, label: Label) extends GraphContentChange
case class RemoveLabel(item: Node, label: Label) extends GraphContentChange
//TODO: rename to RemoveItem?
case class DeleteItem(item: Item) extends GraphItemChange {
def isValid = true
}
sealed trait GraphStructureChange extends GraphChange
case class AddItem(item: Item) extends GraphStructureChange with GraphItemChange {
require(isValid, "AddItem changes can only be applied to local items")
def isValid = item.origin.isLocal
}
case class AddPath(path: Path) extends GraphStructureChange with GraphPathChange {
require(isValid, "AddPath changes can only be applied to local paths")
}
| renesca/renesca | shared/src/main/scala/renesca/graph/GraphChange.scala | Scala | apache-2.0 | 1,404 |
package ch.bsisa.hyperbird.db.evolution
import play.api.Logger
import play.api.libs.concurrent.Execution.Implicits._
import play.api.libs.ws.Response
import play.api.libs.ws.WS
import scala.concurrent.Future
import ch.bsisa.hyperbird.dao.ws.WSQueries
import ch.bsisa.hyperbird.Implicits._
import ch.bsisa.hyperbird.model.format.ElfinFormat
import ch.bsisa.hyperbird.dao.ElfinDAO
object Version4To5 {
def fontainesStructUpdate() = {
val fontainesCollectionId = "G20040930101030004"
val xpath = "//ELFIN[@CLASSE='FONTAINE']"
val query = WSQueries.filteredCollectionQuery(fontainesCollectionId, xpath)
// Perform call to eXist REST service to get collections list
val responseFuture: Future[Response] = WS.url(query).get()
// asynchronous call
responseFuture.map { resp =>
// We expect to receive XML content
Logger.debug(s"Result of type ${resp.ahcResponse.getContentType} received")
// Parse XML (Need to wrap the list of XML elements received to obtain valid XML.)
val melfinElem = scala.xml.XML.loadString("<MELFIN>" + resp.body.mkString + "</MELFIN>")
// elfinsFromXml unwraps ELFINS from the MELFIN element to return a Seq[ELFIN]
// Unwrap wrap tag (should be MELFIN)
val elfinNodeSeq = melfinElem \\ "ELFIN"
// TODO: Perform conversion: copy all enforcing top elements order
val elfins = for { elfinNode <- elfinNodeSeq } yield {
/*
<xs:element ref="MUTATIONS" minOccurs="0" maxOccurs="1"></xs:element>
<xs:element ref="GEOSELECTION" minOccurs="0" maxOccurs="1"/>
<xs:element ref="IDENTIFIANT" minOccurs="0" maxOccurs="1"/>
<xs:element ref="CARACTERISTIQUE" minOccurs="0" maxOccurs="1"/>
<xs:element ref="PARTENAIRE" minOccurs="0" maxOccurs="1"/>
<xs:element ref="ACTIVITE" minOccurs="0" maxOccurs="1"/>
<xs:element ref="FORME" minOccurs="0" maxOccurs="1"/>
<xs:element ref="ANNEXE" minOccurs="0" maxOccurs="1"/>
<xs:element ref="DIVERS" minOccurs="0" maxOccurs="1"/>
*/
val newElfinNode =
<ELFIN>
{ elfinNode \ "MUTATIONS" }
{ elfinNode \ "GEOSELECTION" }
{ elfinNode \ "IDENTIFIANT" }
{ elfinNode \ "CARACTERISTIQUE" }
{ elfinNode \ "PARTENAIRE" }
{ elfinNode \ "ACTIVITE" }
{ elfinNode \ "FORME" }
{ elfinNode \ "ANNEXE" }
{ elfinNode \ "DIVERS" }
</ELFIN>.%(elfinNode.attributes)
//val newElfinNode = <ELFIN Id={elfinNode \ "@Id"} ID_G={elfinNode \ "@ID_G"} CLASSE={elfinNode \ "@CLASSE"}></ELFIN>
newElfinNode
}
Logger.debug(s"Found ${elfins.size} FONTAINES...")
for (elfin <- elfins) {
ElfinDAO.update(elfin)
Logger.debug(s"elfin: ${elfin}")
}
}
}
} | bsisa/hb-api | app/ch/bsisa/hyperbird/db/evolution/Version4To5.scala | Scala | gpl-2.0 | 2,914 |
package shared
/**
* Contains the name of a file and its size in bytes.
* <p>
* Created by Matthias Braun on 9/4/2016.
*/
case class FileData(name: String, size: Long)
| mb720/scalajsClientServer | app/shared/src/main/scala/shared/FileData.scala | Scala | bsd-2-clause | 187 |
/**
* This file is part of mycollab-web.
*
* mycollab-web is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* mycollab-web is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with mycollab-web. If not, see <http://www.gnu.org/licenses/>.
*/
package com.esofthead.mycollab.module.project.view.bug
import com.esofthead.mycollab.common.UrlTokenizer
import com.esofthead.mycollab.core.arguments.NumberSearchField
import com.esofthead.mycollab.core.{MyCollabException, ResourceNotFoundException}
import com.esofthead.mycollab.eventmanager.EventBusFactory
import com.esofthead.mycollab.module.project.ProjectLinkParams
import com.esofthead.mycollab.module.project.events.ProjectEvent
import com.esofthead.mycollab.module.project.view.ProjectUrlResolver
import com.esofthead.mycollab.module.project.view.parameters.{BugFilterParameter, VersionScreenData, BugScreenData, ProjectScreenData}
import com.esofthead.mycollab.module.tracker.domain.SimpleBug
import com.esofthead.mycollab.module.tracker.domain.criteria.{BugSearchCriteria, VersionSearchCriteria}
import com.esofthead.mycollab.module.tracker.service.BugService
import com.esofthead.mycollab.spring.ApplicationContextUtil
import com.esofthead.mycollab.vaadin.AppContext
import com.esofthead.mycollab.vaadin.mvp.PageActionChain
/**
* @author MyCollab Ltd
* @since 5.0.9
*/
class BugUrlResolver extends ProjectUrlResolver {
this.defaultUrlResolver = new DefaultUrlResolver
this.addSubResolver("dashboard", new DefaultUrlResolver)
this.addSubResolver("add", new AddUrlResolver)
this.addSubResolver("list", new ListUrlResolver)
this.addSubResolver("edit", new EditUrlResolver)
this.addSubResolver("preview", new PreviewUrlResolver)
this.addSubResolver("component", new ComponentUrlResolver)
this.addSubResolver("version", new VersionUrlResolver)
private class DefaultUrlResolver extends ProjectUrlResolver {
protected override def handlePage(params: String*) {
val projectId = new UrlTokenizer(params(0)).getInt
val chain = new PageActionChain(new ProjectScreenData.Goto(projectId),
new BugScreenData.GotoDashboard)
EventBusFactory.getInstance.post(new ProjectEvent.GotoMyProject(this, chain))
}
}
private class ListUrlResolver extends ProjectUrlResolver {
protected override def handlePage(params: String*) {
val projectId = new UrlTokenizer(params(0)).getInt
val bugSearchCriteria = new BugSearchCriteria
bugSearchCriteria.setProjectId(new NumberSearchField(projectId))
val chain = new PageActionChain(new ProjectScreenData.Goto(projectId),
new BugScreenData.Search(new BugFilterParameter("List", bugSearchCriteria)))
EventBusFactory.getInstance.post(new ProjectEvent.GotoMyProject(this, chain))
}
}
private class PreviewUrlResolver extends ProjectUrlResolver {
protected override def handlePage(params: String*) {
var projectId: Integer = 0
var bugId: Integer = 0
if (ProjectLinkParams.isValidParam(params(0))) {
val prjShortName = ProjectLinkParams.getProjectShortName(params(0))
val itemKey = ProjectLinkParams.getItemKey(params(0))
val bugService = ApplicationContextUtil.getSpringBean(classOf[BugService])
val bug = bugService.findByProjectAndBugKey(itemKey, prjShortName, AppContext.getAccountId)
if (bug != null) {
projectId = bug.getProjectid
bugId = bug.getId
}
else {
throw new ResourceNotFoundException("Can not get bug with bugkey %d and project short name %s".format(itemKey, prjShortName))
}
}
else {
throw new MyCollabException("Invalid bug link " + params(0))
}
val chain = new PageActionChain(new ProjectScreenData.Goto(projectId), new BugScreenData.Read(bugId))
EventBusFactory.getInstance.post(new ProjectEvent.GotoMyProject(this, chain))
}
}
private class EditUrlResolver extends ProjectUrlResolver {
protected override def handlePage(params: String*) {
var bug: SimpleBug = null
if (ProjectLinkParams.isValidParam(params(0))) {
val prjShortName = ProjectLinkParams.getProjectShortName(params(0))
val itemKey = ProjectLinkParams.getItemKey(params(0))
val bugService = ApplicationContextUtil.getSpringBean(classOf[BugService])
bug = bugService.findByProjectAndBugKey(itemKey, prjShortName, AppContext.getAccountId)
}
else {
throw new MyCollabException("Invalid bug link: " + params(0))
}
if (bug == null) {
throw new ResourceNotFoundException
}
val chain = new PageActionChain(new ProjectScreenData.Goto(bug.getProjectid), new BugScreenData.Edit(bug))
EventBusFactory.getInstance.post(new ProjectEvent.GotoMyProject(this, chain))
}
}
private class AddUrlResolver extends ProjectUrlResolver {
protected override def handlePage(params: String*) {
val projectId = new UrlTokenizer(params(0)).getInt
val chain = new PageActionChain(new ProjectScreenData.Goto(projectId), new BugScreenData.Add(new SimpleBug))
EventBusFactory.getInstance.post(new ProjectEvent.GotoMyProject(this, chain))
}
}
}
| uniteddiversity/mycollab | mycollab-web/src/main/scala/com/esofthead/mycollab/module/project/view/bug/BugUrlResolver.scala | Scala | agpl-3.0 | 6,040 |
/*
* This file is part of the \\BlueLaTeX project.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package gnieh.blue
package common
package impl
import couch.impl._
import http.impl._
import org.osgi.framework._
import org.osgi.service.log.LogService
import akka.actor.ActorSystem
import akka.osgi.ActorSystemActivator
import akka.util._
import scala.concurrent.duration._
import org.slf4j.LoggerFactory
import ch.qos.logback.classic.LoggerContext
import ch.qos.logback.classic.joran.JoranConfigurator
import java.io.File
import java.util.concurrent.TimeUnit
import com.typesafe.config._
import gnieh.sohva.control.CouchClient
import gnieh.sohva.control.entities.EntityManager
import gnieh.sohva.JsonSerializer
/** Register the configuration loader service that is used by everybody
*
* @author Lucas Satabin
*/
class BlueCommonActivator extends ActorSystemActivator {
import FileUtils._
import OsgiUtils._
private var dbManager: Option[DbManager] = None
private var couch: Option[CouchClient] = None
private var templates: Option[Templates] = None
private var server: Option[BlueServer] = None
def configure(context: BundleContext, system: ActorSystem): Unit = {
val configBase = new File(context.getProperty("blue.configuration.base"))
// the bundle configuration loader server
val loader = new ConfigurationLoaderImpl(context.getBundle.getSymbolicName, configBase)
// register it
context.registerService(classOf[ConfigurationLoader], loader, null)
// configure the logging framework
val loggerContext = LoggerFactory.getILoggerFactory.asInstanceOf[LoggerContext]
try {
val configurator = new JoranConfigurator
configurator.setContext(loggerContext)
configurator.doConfigure(configBase / "logback.xml")
} catch {
case e: Exception =>
// TODO what to do?
e.printStackTrace
}
context.registerService(classOf[LogService].getName, new LogServiceFactory, null)
for(logger <- context.get[Logger]) try {
val config = loader.load(context.getBundle)
// register the couch client service
val client = couch(system, config)
context.registerService(classOf[CouchClient], client, null)
// create the database, etc...
val couchConfig = new CouchConfiguration(config)
dbManager = Some(new DbManager(client, couchConfig, logger))
dbManager.foreach(_.start())
// force the creation of design documents for entities if they don't exist
couchConfig.asAdmin(client) { session =>
new EntityManager(session.database(couchConfig.database("blue_papers"))).entities("")
new EntityManager(session.database(couchConfig.database("blue_users"))).entities("")
}
val configuration = new BlueConfiguration(config)
// register the mail agent client
val mailAgent = new MailAgentImpl(client, configuration, logger)
context.registerService(classOf[MailAgent], mailAgent, null)
// register the recaptcha service
val recaptcha = new ReCaptchaUtilImpl(configuration)
context.registerService(classOf[ReCaptcha], recaptcha, null)
// create and start the http server
server = Some(new BlueServer(context, system, config, logger))
server.foreach(_.start)
templates = Some(new TemplatesImpl(configuration))
context.registerService(classOf[Templates], templates.get, null)
} catch {
case e: Exception =>
logger.log(LogService.LOG_ERROR, s"Unable to start the core bundle", e)
throw e
}
// register the actor system as service so that other bundle can use it
registerService(context, system)
}
override def stop(context: BundleContext): Unit = {
// stop the server
server.foreach(_.stop)
server = None
// stop the template engine
templates = None
// stop the framework
context.getBundle(0).stop
}
private def couch(system: ActorSystem, config: Config): CouchClient = {
val hostname = config.getString("couch.hostname")
val port = config.getInt("couch.port")
val ssl = config.getBoolean("couch.ssl")
val timeout = Timeout(config.getDuration("couch.timeout", TimeUnit.SECONDS).seconds)
val c = new CouchClient(host = hostname, port = port, ssl = ssl, custom = List(BluePermissionSerializer))(system, timeout)
couch = Some(c)
c
}
}
| tdurieux/bluelatex | blue-common/src/main/scala/gnieh/blue/common/impl/BlueCommonActivator.scala | Scala | apache-2.0 | 4,891 |
package djinni
import djinni.ast._
import djinni.generatorTools._
import djinni.meta._
class ObjcMarshal(spec: Spec) extends Marshal(spec) {
val constructProxy = "ConstructProxy"
val constructProxyHeader = headerName(constructProxy)
val constructProxyObjc = idObjc.ty(constructProxy) + "." + spec.objcppExt
override def typename(tm: MExpr): String = {
val (name, _) = toObjcType(tm)
name
}
override def typename(name: String): String = idObjc.ty(name)
def typename(name: String, ty: TypeDef): String = idObjc.ty(name)
override def fqTypename(tm: MExpr): String = typename(tm)
def fqTypename(name: String, ty: TypeDef): String = typename(name, ty)
def nullability(tm: MExpr): Option[String] = {
val nonnull = Some("nonnull")
val nullable = Some("nullable")
val interfaceNullity = if (spec.cppNnType.nonEmpty) nonnull else nullable
tm.base match {
case MOptional => nullable
case MPrimitive(_,_,_,_,_,_,_,_) => None
case d: MDef => d.defType match {
case DEnum => None
case DInterface => interfaceNullity
case DRecord => nonnull
}
case e: MExtern => e.defType match {
case DEnum => None
case DInterface => interfaceNullity
case DRecord => if(e.objc.pointer) nonnull else None
}
case _ => nonnull
}
}
override def paramType(tm: MExpr): String = {
nullability(tm).fold("")(_ + " ") + toObjcParamType(tm)
}
override def fqParamType(tm: MExpr): String = paramType(tm)
override def returnType(ret: Option[TypeRef]): String = ret.fold("void")((t: TypeRef) => nullability(t.resolved).fold("")(_ + " ") + toObjcParamType(t.resolved))
override def fqReturnType(ret: Option[TypeRef]): String = returnType(ret)
override def fieldType(tm: MExpr): String = toObjcParamType(tm)
override def fqFieldType(tm: MExpr): String = toObjcParamType(tm)
override def toCpp(tm: MExpr, expr: String): String = throw new AssertionError("direct objc to cpp conversion not possible")
override def fromCpp(tm: MExpr, expr: String): String = throw new AssertionError("direct cpp to objc conversion not possible")
def references(m: Meta, exclude: String = ""): Seq[SymbolReference] = m match {
case o: MOpaque =>
List(ImportRef("<Foundation/Foundation.h>"))
case d: MDef => d.defType match {
case DEnum =>
List(ImportRef(include(d.name)))
case DInterface =>
val ext = d.body.asInstanceOf[Interface].ext
if (!ext.objc) {
List(ImportRef("<Foundation/Foundation.h>"), DeclRef(s"@class ${typename(d.name, d.body)};", None))
}
else {
List(ImportRef("<Foundation/Foundation.h>"), DeclRef(s"@protocol ${typename(d.name, d.body)};", None))
}
case DRecord =>
val r = d.body.asInstanceOf[Record]
val prefix = if (r.ext.objc) spec.objcExtendedRecordIncludePrefix else spec.objcIncludePrefix
List(ImportRef(q(prefix + headerName(d.name))))
}
case e: MExtern => List(ImportRef(e.objc.header))
case p: MParam => List()
}
def headerName(ident: String) = idObjc.ty(ident) + "." + spec.objcHeaderExt
def include(ident: String) = q(spec.objcIncludePrefix + headerName(ident))
def isPointer(td: TypeDecl) = td.body match {
case i: Interface => true
case r: Record => true
case e: Enum => false
}
def boxedTypename(td: TypeDecl) = td.body match {
case i: Interface => typename(td.ident, i)
case r: Record => typename(td.ident, r)
case e: Enum => "NSNumber"
}
// Return value: (Type_Name, Is_Class_Or_Not)
def toObjcType(ty: TypeRef): (String, Boolean) = toObjcType(ty.resolved, false)
def toObjcType(ty: TypeRef, needRef: Boolean): (String, Boolean) = toObjcType(ty.resolved, needRef)
def toObjcType(tm: MExpr): (String, Boolean) = toObjcType(tm, false)
def toObjcType(tm: MExpr, needRef: Boolean): (String, Boolean) = {
def args(tm: MExpr) = if (tm.args.isEmpty) "" else tm.args.map(toBoxedParamType).mkString("<", ", ", ">")
def f(tm: MExpr, needRef: Boolean): (String, Boolean) = {
tm.base match {
case MOptional =>
// We use "nil" for the empty optional.
assert(tm.args.size == 1)
val arg = tm.args.head
arg.base match {
case MOptional => throw new AssertionError("nested optional?")
case m => f(arg, true)
}
case o =>
val base = o match {
case p: MPrimitive => if (needRef) (p.objcBoxed, true) else (p.objcName, false)
case MString => ("NSString", true)
case MDate => ("NSDate", true)
case MBinary => ("NSData", true)
case MOptional => throw new AssertionError("optional should have been special cased")
case MList => ("NSArray" + args(tm), true)
case MSet => ("NSSet" + args(tm), true)
case MMap => ("NSDictionary" + args(tm), true)
case d: MDef => d.defType match {
case DEnum => if (needRef) ("NSNumber", true) else (idObjc.ty(d.name), false)
case DRecord => (idObjc.ty(d.name), true)
case DInterface =>
val ext = d.body.asInstanceOf[Interface].ext
if (!ext.objc)
(idObjc.ty(d.name), true)
else
(s"id<${idObjc.ty(d.name)}>", false)
}
case e: MExtern => e.body match {
case i: Interface => if(i.ext.objc) (s"id<${e.objc.typename}>", false) else (e.objc.typename, true)
case _ => if(needRef) (e.objc.boxed, true) else (e.objc.typename, e.objc.pointer)
}
case p: MParam => throw new AssertionError("Parameter should not happen at Obj-C top level")
}
base
}
}
f(tm, needRef)
}
def toBoxedParamType(tm: MExpr): String = {
val (name, needRef) = toObjcType(tm, true)
name + (if(needRef) " *" else "")
}
def toObjcParamType(tm: MExpr): String = {
val (name, needRef) = toObjcType(tm)
name + (if(needRef) " *" else "")
}
/**
* This method returns whether we can use global variable to represent a given constant.
*
* We can use global variables for constants which are safe to create during static init, which are numbers
* strings, and optional strings. Anything else needs to be a class method.
*/
def canBeConstVariable(c:Const): Boolean = c.ty.resolved.base match {
case MPrimitive(_,_,_,_,_,_,_,_) => true
case MString => true
case MOptional =>
assert(c.ty.resolved.args.size == 1)
val arg = c.ty.resolved.args.head
arg.base match {
case MString => true
case _ => false
}
case _ => false
}
def objcProxyConstructFuncName(name: String) = {
s"create${idObjc.ty(name)}"
}
}
| ragnraok/djinni | src/source/ObjcMarshal.scala | Scala | apache-2.0 | 6,866 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.calcite
import java.util
import com.google.common.collect.ImmutableList
import org.apache.calcite.jdbc.CalciteSchema
import org.apache.calcite.plan.RelOptTable.ViewExpander
import org.apache.calcite.plan._
import org.apache.calcite.prepare.CalciteCatalogReader
import org.apache.calcite.rel.RelRoot
import org.apache.calcite.rel.`type`.RelDataType
import org.apache.calcite.rex.RexBuilder
import org.apache.calcite.schema.SchemaPlus
import org.apache.calcite.sql.parser.{SqlParser, SqlParseException => CSqlParseException}
import org.apache.calcite.sql.validate.SqlValidator
import org.apache.calcite.sql.{SqlNode, SqlOperatorTable}
import org.apache.calcite.sql2rel.{RelDecorrelator, SqlRexConvertletTable, SqlToRelConverter}
import org.apache.calcite.tools.{FrameworkConfig, RelConversionException}
import org.apache.flink.table.api.{SqlParserException, TableException, ValidationException}
import scala.collection.JavaConversions._
/**
* NOTE: this is heavily inspired by Calcite's PlannerImpl.
* We need it in order to share the planner between the Table API relational plans
* and the SQL relation plans that are created by the Calcite parser.
* The main difference is that we do not create a new RelOptPlanner in the ready() method.
*/
class FlinkPlannerImpl(
config: FrameworkConfig,
planner: RelOptPlanner,
typeFactory: FlinkTypeFactory) {
val operatorTable: SqlOperatorTable = config.getOperatorTable
/** Holds the trait definitions to be registered with planner. May be null. */
val traitDefs: ImmutableList[RelTraitDef[_ <: RelTrait]] = config.getTraitDefs
val parserConfig: SqlParser.Config = config.getParserConfig
val convertletTable: SqlRexConvertletTable = config.getConvertletTable
val defaultSchema: SchemaPlus = config.getDefaultSchema
var validator: FlinkCalciteSqlValidator = _
var validatedSqlNode: SqlNode = _
var root: RelRoot = _
private def ready() {
if (this.traitDefs != null) {
planner.clearRelTraitDefs()
for (traitDef <- this.traitDefs) {
planner.addRelTraitDef(traitDef)
}
}
}
def parse(sql: String): SqlNode = {
try {
ready()
val parser: SqlParser = SqlParser.create(sql, parserConfig)
val sqlNode: SqlNode = parser.parseStmt
sqlNode
} catch {
case e: CSqlParseException =>
throw SqlParserException(s"SQL parse failed. ${e.getMessage}", e)
}
}
def validate(sqlNode: SqlNode): SqlNode = {
validator = new FlinkCalciteSqlValidator(operatorTable, createCatalogReader, typeFactory)
validator.setIdentifierExpansion(true)
try {
validatedSqlNode = validator.validate(sqlNode)
}
catch {
case e: RuntimeException =>
throw new ValidationException(s"SQL validation failed. ${e.getMessage}", e)
}
validatedSqlNode
}
def rel(sql: SqlNode): RelRoot = {
try {
assert(validatedSqlNode != null)
val rexBuilder: RexBuilder = createRexBuilder
val cluster: RelOptCluster = FlinkRelOptClusterFactory.create(planner, rexBuilder)
val config = SqlToRelConverter.configBuilder()
.withTrimUnusedFields(false).withConvertTableAccess(false).build()
val sqlToRelConverter: SqlToRelConverter = new SqlToRelConverter(
new ViewExpanderImpl, validator, createCatalogReader, cluster, convertletTable, config)
root = sqlToRelConverter.convertQuery(validatedSqlNode, false, true)
// we disable automatic flattening in order to let composite types pass without modification
// we might enable it again once Calcite has better support for structured types
// root = root.withRel(sqlToRelConverter.flattenTypes(root.rel, true))
root = root.withRel(RelDecorrelator.decorrelateQuery(root.rel))
root
} catch {
case e: RelConversionException => throw TableException(e.getMessage)
}
}
/** Implements [[org.apache.calcite.plan.RelOptTable.ViewExpander]]
* interface for [[org.apache.calcite.tools.Planner]]. */
class ViewExpanderImpl extends ViewExpander {
override def expandView(
rowType: RelDataType,
queryString: String,
schemaPath: util.List[String],
viewPath: util.List[String]): RelRoot = {
val parser: SqlParser = SqlParser.create(queryString, parserConfig)
var sqlNode: SqlNode = null
try {
sqlNode = parser.parseQuery
}
catch {
case e: CSqlParseException =>
throw SqlParserException(s"SQL parse failed. ${e.getMessage}", e)
}
val catalogReader: CalciteCatalogReader = createCatalogReader.withSchemaPath(schemaPath)
val validator: SqlValidator =
new FlinkCalciteSqlValidator(operatorTable, catalogReader, typeFactory)
validator.setIdentifierExpansion(true)
val validatedSqlNode: SqlNode = validator.validate(sqlNode)
val rexBuilder: RexBuilder = createRexBuilder
val cluster: RelOptCluster = FlinkRelOptClusterFactory.create(planner, rexBuilder)
val config: SqlToRelConverter.Config = SqlToRelConverter.configBuilder
.withTrimUnusedFields(false).withConvertTableAccess(false).build
val sqlToRelConverter: SqlToRelConverter = new SqlToRelConverter(
new ViewExpanderImpl, validator, catalogReader, cluster, convertletTable, config)
root = sqlToRelConverter.convertQuery(validatedSqlNode, true, false)
root = root.withRel(sqlToRelConverter.flattenTypes(root.rel, true))
root = root.withRel(RelDecorrelator.decorrelateQuery(root.rel))
FlinkPlannerImpl.this.root
}
}
private def createCatalogReader: CalciteCatalogReader = {
val rootSchema: SchemaPlus = FlinkPlannerImpl.rootSchema(defaultSchema)
new CalciteCatalogReader(
CalciteSchema.from(rootSchema),
parserConfig.caseSensitive,
CalciteSchema.from(defaultSchema).path(null),
typeFactory)
}
private def createRexBuilder: RexBuilder = {
new RexBuilder(typeFactory)
}
}
object FlinkPlannerImpl {
private def rootSchema(schema: SchemaPlus): SchemaPlus = {
if (schema.getParentSchema == null) {
schema
}
else {
rootSchema(schema.getParentSchema)
}
}
}
| DieBauer/flink | flink-libraries/flink-table/src/main/scala/org/apache/flink/table/calcite/FlinkPlannerImpl.scala | Scala | apache-2.0 | 7,021 |
package com.github.lavenderx.springbootscala.data.domain
import java.time.LocalDateTime
import scala.beans.BeanProperty
case class Message(name: String,
age: Int,
user: User,
status: Option[Boolean]) {
@BeanProperty
val createdTime: LocalDateTime = LocalDateTime.now()
} | lavenderx/springboot-scala-vue | server/src/main/scala/com/github/lavenderx/springbootscala/data/domain/Message.scala | Scala | mit | 334 |
/***********************************************************************
* Copyright (c) 2013-2018 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.tools
import java.io._
import java.util.zip.Deflater
import com.beust.jcommander.{ParameterException, Parameters}
import com.typesafe.scalalogging.LazyLogging
import org.apache.commons.io.IOUtils
import org.geotools.data.Query
import org.geotools.factory.Hints
import org.locationtech.geomesa.convert.{EvaluationContext, SimpleFeatureConverter, SimpleFeatureConverters}
import org.locationtech.geomesa.index.geoserver.ViewParams
import org.locationtech.geomesa.tools.ConvertParameters.ConvertParameters
import org.locationtech.geomesa.tools.export._
import org.locationtech.geomesa.tools.export.formats._
import org.locationtech.geomesa.tools.utils.CLArgResolver
import org.locationtech.geomesa.tools.utils.DataFormats._
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes
import org.locationtech.geomesa.utils.io.PathUtils
import org.locationtech.geomesa.utils.io.fs.FileSystemDelegate.FileHandle
import org.locationtech.geomesa.utils.io.fs.LocalDelegate.StdInHandle
import org.locationtech.geomesa.utils.stats.MethodProfiling
import org.locationtech.geomesa.utils.text.TextTools.getPlural
import org.opengis.feature.simple.{SimpleFeature, SimpleFeatureType}
import org.opengis.filter.Filter
class ConvertCommand extends Command with MethodProfiling with LazyLogging {
override val name = "convert"
override val params = new ConvertParameters
override def execute(): Unit = {
profile(convertAndExport()) { (count, time) =>
Command.user.info(s"Conversion complete to ${Option(params.file).map(_.getPath).getOrElse("standard out")} " +
s"in ${time}ms${count.map(c => s" for $c features").getOrElse("")}")
}
}
private def convertAndExport(): Option[Long] = {
import ConvertCommand.{getConverter, getExporter}
import scala.collection.JavaConversions._
val files = if (params.files.nonEmpty) { params.files.iterator.flatMap(PathUtils.interpretPath) } else {
StdInHandle.available().map(Iterator.single).getOrElse {
throw new ParameterException("Missing option: <files>... is required")
}
}
val sft = CLArgResolver.getSft(params.spec)
Command.user.info(s"Using SFT definition: ${SimpleFeatureTypes.encodeType(sft)}")
val converter = getConverter(params, sft)
val filter = Option(params.cqlFilter)
filter.foreach(f => Command.user.debug(s"Applying CQL filter $f"))
val ec = converter.createEvaluationContext(Map("inputFilePath" -> ""))
val maxFeatures = Option(params.maxFeatures).map(_.intValue())
def features() = ConvertCommand.convertFeatures(files, converter, ec, filter, maxFeatures)
val exporter = getExporter(params, features())
try {
exporter.start(sft)
val count = exporter.export(features())
val records = ec.counter.getLineCount - (if (params.noHeader) { 0 } else { params.files.size })
Command.user.info(s"Converted ${getPlural(records, "line")} "
+ s"with ${getPlural(ec.counter.getSuccess, "success", "successes")} "
+ s"and ${getPlural(ec.counter.getFailure, "failure")}")
count
} finally {
IOUtils.closeQuietly(exporter)
IOUtils.closeQuietly(converter)
}
}
}
object ConvertCommand extends LazyLogging {
def getConverter(params: ConvertParameters, sft: SimpleFeatureType): SimpleFeatureConverter[Any] = {
val converterConfig = {
if (params.config != null)
CLArgResolver.getConfig(params.config)
else throw new ParameterException("Unable to parse Simple Feature type from sft config or string")
}
SimpleFeatureConverters.build(sft, converterConfig)
}
def getExporter(params: ConvertParameters, features: => Iterator[SimpleFeature]): FeatureExporter = {
import org.locationtech.geomesa.index.conf.QueryHints.RichHints
lazy val outputStream: OutputStream = ExportCommand.createOutputStream(params.file, params.gzip)
lazy val writer: Writer = ExportCommand.getWriter(params)
lazy val avroCompression = Option(params.gzip).map(_.toInt).getOrElse(Deflater.DEFAULT_COMPRESSION)
lazy val hints = {
val q = new Query("")
Option(params.hints).foreach { hints =>
q.getHints.put(Hints.VIRTUAL_TABLE_PARAMETERS, hints)
ViewParams.setHints(q)
}
q.getHints
}
lazy val arrowDictionaries: Map[String, Array[AnyRef]] = {
val attributes = hints.getArrowDictionaryFields
if (attributes.isEmpty) { Map.empty } else {
val values = attributes.map(a => a -> scala.collection.mutable.HashSet.empty[AnyRef])
features.foreach(f => values.foreach { case (a, v) => v.add(f.getAttribute(a))})
values.map { case (attribute, value) => attribute -> value.toArray }.toMap
}
}
params.outputFormat match {
case Csv | Tsv => new DelimitedExporter(writer, params.outputFormat, None, !params.noHeader)
case Shp => new ShapefileExporter(ExportCommand.checkShpFile(params))
case GeoJson | Json => new GeoJsonExporter(writer)
case Gml => new GmlExporter(outputStream)
case Avro => new AvroExporter(outputStream, avroCompression)
case Bin => new BinExporter(hints, outputStream)
case Arrow => new ArrowExporter(hints, outputStream, arrowDictionaries)
case Leaflet => new LeafletMapExporter(params)
case _ => throw new ParameterException(s"Format ${params.outputFormat} is not supported.")
}
}
def convertFeatures(files: Iterator[FileHandle],
converter: SimpleFeatureConverter[Any],
ec: EvaluationContext,
filter: Option[Filter],
maxFeatures: Option[Int]): Iterator[SimpleFeature] = {
val all = files.flatMap { file =>
ec.set(ec.indexOf("inputFilePath"), file.path)
val is = PathUtils.handleCompression(file.open, file.path)
converter.process(is, ec)
}
val filtered = filter.map(f => all.filter(f.evaluate)).getOrElse(all)
val limited = maxFeatures.map(filtered.take).getOrElse(filtered)
limited
}
}
object ConvertParameters {
@Parameters(commandDescription = "Convert files using GeoMesa's internal converter framework")
class ConvertParameters extends FileExportParams with InputFilesParam with OptionalTypeNameParam
with RequiredFeatureSpecParam with RequiredConverterConfigParam
}
| jahhulbert-ccri/geomesa | geomesa-tools/src/main/scala/org/locationtech/geomesa/tools/ConvertCommand.scala | Scala | apache-2.0 | 6,897 |
package org.unisonweb.util
import org.unisonweb.EasyTest._
import org.unisonweb._
import org.unisonweb.util.Unboxed.F1.{D_B, L_B, L_L, L_P}
import org.unisonweb.util.Unboxed.F2.{DD_D, LD_L, LL_L}
import org.unisonweb.util.Unboxed.Unboxed
object StreamTests {
val tests = suite("stream")(
suite("native")(
test("take/drop") { implicit T =>
equal(
Stream.from(0).take(5).drop(3).sumIntegers,
(0 until 5).drop(3).sum
)
},
test("ex1") { implicit T =>
equal(
Stream.from(0).take(10000).sumIntegers,
(0 until 10000).sum)
},
test("map") { implicit T =>
equal(
Stream.from(0).take(10000).map(L_L(_ + 1)).toSequence.toList,
(0 until 10000).map(_ + 1).toList
)
},
test("flatMap 0") { implicit T =>
equal(
Stream.from(1).take(100)
.flatMap(L_P(n => Stream.constant(n).take(n))).toSequence.toList,
scala.Stream.from(1).take(100)
.flatMap(n => scala.Stream.continually(n).take(n)).toList
)
},
test("flatMap 1") { implicit T =>
equal(
Stream.from(1).take(100)
.flatMap(L_P(n => Stream.constant(n).take(n))).toSequence.toList,
scala.Stream.from(1).take(100)
.flatMap(n => scala.Stream.continually(n).take(n)).toList
)
},
test("flatMap inf-fin-take") { implicit T =>
equal(
Stream.from(0).flatMap(L_P[Stream[Unboxed[Long]]](n => Stream.singleton(n))).take(3).toSequence.toList,
scala.Stream.from(0).flatMap(n => scala.Stream(n)).take(3).map(_.toLong).toList
)
},
test("flatMap inf-inf-take") { implicit T =>
equal(
Stream.from(0).flatMap(L_P[Stream[Unboxed[Long]]](n => Stream.constant(n))).take(3).toSequence.toList,
scala.Stream.from(0).flatMap(n => scala.Stream.continually(n)).take(3).map(_.toLong).toList
)
},
test("flatMap inf-consinf-take") { implicit T =>
equal(
Stream.from(0).flatMap(L_P[Stream[Unboxed[Long]]](n => 7l :: Stream.constant(n))).take(5).toSequence.toList,
scala.Stream.from(0).flatMap(n => 7 #:: scala.Stream.continually(n)).take(5).map(_.toLong).toList
)
},
test("unfold") { implicit T =>
equal(
// Stream.take 5 (Stream.unfold (b -> if b < 1 then Some (b + 1, b / 2) else None) -2)
Stream.unfold[Option[(Long,Long)],(Long,Long),Unboxed[Long],Unboxed[Long],Long](-2)(
L_P(b => if (b < 1) Some((b + 1l, b / 2l)) else None)
).take(5).toSequence.toList,
List(-2/2, -1/2, 0/2)
)
},
test("filter") { implicit T =>
equal(
Stream.from(0).take(10000).filter(L_B(_ % 2 == 0)).toSequence.toList,
(0 until 10000).filter(_ % 2 == 0).toList
)
},
test("takeWhile") { implicit T =>
equal(
Stream.from(0).take(100).takeWhile(L_B(_ < 50)).toSequence.toList,
(0 until 100).takeWhile(_ < 50).toList
)
equal(
Stream.from(0.0, by = 1.0).take(100).takeWhile(D_B(_ < 50.0)).toSequence.toList,
(0 until 100).takeWhile(_ < 50).toList
)
},
test("dropWhile") { implicit T =>
equal(
Stream.from(0).take(100).dropWhile(L_B(_ < 50)).toSequence.toList,
(0 until 100).dropWhile(_ < 50).toList
)
},
test("zipWith") { implicit T =>
val s1 = Stream.from(0)
val s2 = scala.collection.immutable.Stream.from(0)
equal(
s1.zipWith(s1.drop(1))(LL_L(_ * _)).take(100).toSequence.toList,
s2.zip(s2.drop(1)).map { case (a,b) => a * b }.take(100).toList
)
},
test("toSequence0") { implicit T =>
equal(
Stream.from(0).take(10000).toSequence0 { (u, _) => u },
Sequence.apply(0 until 10000: _*)
)
},
test("toSequence") { implicit T =>
equal(
Stream.from(0).take(10000).toSequence,
Sequence.apply(0 until 10000: _*)
)
},
test("foldLeft0 (+) long") { implicit T =>
equal(
Stream.from(0).take(10000).foldLeft0(U0, null:Unboxed[Long])(
LL_L(_ + _))((u,_) => u),
(0 until 10000).sum
)
},
test("foldLeft (+) long") { implicit T =>
equal(
Stream.from(0).take(10000).foldLeft(0l)(LL_L(_ + _)),
(0 until 10000).sum
)
},
test("foldLeft count even doubles") { implicit T =>
equal(
Stream.from(0.0, by = 1.0).take(10000).foldLeft(0l)(
LD_L((z, d) => if (d.toInt % 2 == 0) z else z + 1)),
(0 until 10000).count(_ % 2 == 0)
)
},
test("foldLeft (+) double") { implicit T =>
equal(
Stream.from(0.0, by = 1.0).take(10000).foldLeft(0.0)(DD_D(_ + _)),
(0 until 10000).sum
)
},
test("scanLeft0 (+) long") { implicit T =>
equal(
Stream.from(7).take(10).scanLeft0(longToUnboxed(-3), null: Unboxed[Long])(LL_L(_+_)).sumIntegers,
scala.Stream.from(7).take(10).scanLeft(-3)(_+_).sum
)
},
test("scanLeft (+) long") { implicit T =>
equal(
Stream.from(7).take(10).scanLeft(-3l)(LL_L(_+_)).sumIntegers,
scala.Stream.from(7).take(10).scanLeft(-3)(_+_).sum
)
},
test("++") { implicit T =>
equal(
(Stream.from(0).take(10000) ++ Stream.from(20000).take(5))
.toSequence.toList,
(scala.Stream.from(0).take(10000) ++ scala.Stream.from(20000).take(5))
.toList
)
equal(
(Stream.from(0).drop(10000).take(10000) ++ Stream.from(20000).take(5))
.toSequence.toList,
(scala.Stream.from(0).drop(10000).take(10000) ++
scala.Stream.from(20000).take(5)).toList
)
},
test("cons") { implicit T =>
equal(
(-10l :: Stream.from(0).take(10)).toSequence.toList,
(-10 #:: scala.Stream.from(0).take(10)).toList
)
},
test("iterate-from0") { implicit T =>
equal(
Stream.iterate(0l)(L_L(_ + 1)).take(10).toSequence.toList,
(scala.Stream.from(0).take(10)).toList
)
}
)
)
}
| paulp/unison | runtime-jvm/main/src/test/scala/util/StreamTests.scala | Scala | mit | 6,371 |
// Generated by the Scala Plugin for the Protocol Buffer Compiler.
// Do not edit!
//
// Protofile syntax: PROTO3
package scalapb.perf.protos
@SerialVersionUID(0L)
final case class SimpleMessage(
i: _root_.scala.Int = 0,
j: _root_.scala.Int = 0,
k: _root_.com.google.protobuf.ByteString = _root_.com.google.protobuf.ByteString.EMPTY,
color: scalapb.perf.protos.Color = scalapb.perf.protos.Color.UNKNOWN,
unknownFields: _root_.scalapb.UnknownFieldSet = _root_.scalapb.UnknownFieldSet.empty
) extends scalapb.GeneratedMessage
with scalapb.lenses.Updatable[SimpleMessage] {
@transient
private[this] var __serializedSizeMemoized: _root_.scala.Int = 0
private[this] def __computeSerializedSize(): _root_.scala.Int = {
var __size = 0
{
val __value = i
if (__value != 0) {
__size += _root_.com.google.protobuf.CodedOutputStream.computeInt32Size(1, __value)
}
};
{
val __value = j
if (__value != 0) {
__size += _root_.com.google.protobuf.CodedOutputStream.computeInt32Size(2, __value)
}
};
{
val __value = k
if (!__value.isEmpty) {
__size += _root_.com.google.protobuf.CodedOutputStream.computeBytesSize(3, __value)
}
};
{
val __value = color.value
if (__value != 0) {
__size += _root_.com.google.protobuf.CodedOutputStream.computeEnumSize(4, __value)
}
};
__size += unknownFields.serializedSize
__size
}
override def serializedSize: _root_.scala.Int = {
var __size = __serializedSizeMemoized
if (__size == 0) {
__size = __computeSerializedSize() + 1
__serializedSizeMemoized = __size
}
__size - 1
}
def writeTo(`_output__`: _root_.com.google.protobuf.CodedOutputStream): _root_.scala.Unit = {
{
val __v = i
if (__v != 0) {
_output__.writeInt32(1, __v)
}
};
{
val __v = j
if (__v != 0) {
_output__.writeInt32(2, __v)
}
};
{
val __v = k
if (!__v.isEmpty) {
_output__.writeBytes(3, __v)
}
};
{
val __v = color.value
if (__v != 0) {
_output__.writeEnum(4, __v)
}
};
unknownFields.writeTo(_output__)
}
def withI(__v: _root_.scala.Int): SimpleMessage = copy(i = __v)
def withJ(__v: _root_.scala.Int): SimpleMessage = copy(j = __v)
def withK(__v: _root_.com.google.protobuf.ByteString): SimpleMessage = copy(k = __v)
def withColor(__v: scalapb.perf.protos.Color): SimpleMessage = copy(color = __v)
def withUnknownFields(__v: _root_.scalapb.UnknownFieldSet) = copy(unknownFields = __v)
def discardUnknownFields = copy(unknownFields = _root_.scalapb.UnknownFieldSet.empty)
def getFieldByNumber(__fieldNumber: _root_.scala.Int): _root_.scala.Any = {
(__fieldNumber: @ _root_.scala.unchecked) match {
case 1 => {
val __t = i
if (__t != 0) __t else null
}
case 2 => {
val __t = j
if (__t != 0) __t else null
}
case 3 => {
val __t = k
if (__t != _root_.com.google.protobuf.ByteString.EMPTY) __t else null
}
case 4 => {
val __t = color.javaValueDescriptor
if (__t.getNumber() != 0) __t else null
}
}
}
def getField(
__field: _root_.scalapb.descriptors.FieldDescriptor
): _root_.scalapb.descriptors.PValue = {
_root_.scala.Predef.require(__field.containingMessage eq companion.scalaDescriptor)
(__field.number: @ _root_.scala.unchecked) match {
case 1 => _root_.scalapb.descriptors.PInt(i)
case 2 => _root_.scalapb.descriptors.PInt(j)
case 3 => _root_.scalapb.descriptors.PByteString(k)
case 4 => _root_.scalapb.descriptors.PEnum(color.scalaValueDescriptor)
}
}
def toProtoString: _root_.scala.Predef.String =
_root_.scalapb.TextFormat.printToUnicodeString(this)
def companion: scalapb.perf.protos.SimpleMessage.type = scalapb.perf.protos.SimpleMessage
// @@protoc_insertion_point(GeneratedMessage[scalapb.perf.SimpleMessage])
}
object SimpleMessage extends scalapb.GeneratedMessageCompanion[scalapb.perf.protos.SimpleMessage] {
implicit def messageCompanion
: scalapb.GeneratedMessageCompanion[scalapb.perf.protos.SimpleMessage] = this
def parseFrom(
`_input__`: _root_.com.google.protobuf.CodedInputStream
): scalapb.perf.protos.SimpleMessage = {
var __i: _root_.scala.Int = 0
var __j: _root_.scala.Int = 0
var __k: _root_.com.google.protobuf.ByteString = _root_.com.google.protobuf.ByteString.EMPTY
var __color: scalapb.perf.protos.Color = scalapb.perf.protos.Color.UNKNOWN
var `_unknownFields__` : _root_.scalapb.UnknownFieldSet.Builder = null
var _done__ = false
while (!_done__) {
val _tag__ = _input__.readTag()
_tag__ match {
case 0 => _done__ = true
case 8 =>
__i = _input__.readInt32()
case 16 =>
__j = _input__.readInt32()
case 26 =>
__k = _input__.readBytes()
case 32 =>
__color = scalapb.perf.protos.Color.fromValue(_input__.readEnum())
case tag =>
if (_unknownFields__ == null) {
_unknownFields__ = new _root_.scalapb.UnknownFieldSet.Builder()
}
_unknownFields__.parseField(tag, _input__)
}
}
scalapb.perf.protos.SimpleMessage(
i = __i,
j = __j,
k = __k,
color = __color,
unknownFields =
if (_unknownFields__ == null) _root_.scalapb.UnknownFieldSet.empty
else _unknownFields__.result()
)
}
implicit def messageReads: _root_.scalapb.descriptors.Reads[scalapb.perf.protos.SimpleMessage] =
_root_.scalapb.descriptors.Reads {
case _root_.scalapb.descriptors.PMessage(__fieldsMap) =>
_root_.scala.Predef.require(
__fieldsMap.keys.forall(_.containingMessage eq scalaDescriptor),
"FieldDescriptor does not match message type."
)
scalapb.perf.protos.SimpleMessage(
i = __fieldsMap
.get(scalaDescriptor.findFieldByNumber(1).get)
.map(_.as[_root_.scala.Int])
.getOrElse(0),
j = __fieldsMap
.get(scalaDescriptor.findFieldByNumber(2).get)
.map(_.as[_root_.scala.Int])
.getOrElse(0),
k = __fieldsMap
.get(scalaDescriptor.findFieldByNumber(3).get)
.map(_.as[_root_.com.google.protobuf.ByteString])
.getOrElse(_root_.com.google.protobuf.ByteString.EMPTY),
color = scalapb.perf.protos.Color.fromValue(
__fieldsMap
.get(scalaDescriptor.findFieldByNumber(4).get)
.map(_.as[_root_.scalapb.descriptors.EnumValueDescriptor])
.getOrElse(scalapb.perf.protos.Color.UNKNOWN.scalaValueDescriptor)
.number
)
)
case _ => throw new RuntimeException("Expected PMessage")
}
def javaDescriptor: _root_.com.google.protobuf.Descriptors.Descriptor =
ProtosProto.javaDescriptor.getMessageTypes().get(0)
def scalaDescriptor: _root_.scalapb.descriptors.Descriptor =
ProtosProto.scalaDescriptor.messages(0)
def messageCompanionForFieldNumber(
__number: _root_.scala.Int
): _root_.scalapb.GeneratedMessageCompanion[_] = throw new MatchError(__number)
lazy val nestedMessagesCompanions
: Seq[_root_.scalapb.GeneratedMessageCompanion[_ <: _root_.scalapb.GeneratedMessage]] =
Seq.empty
def enumCompanionForFieldNumber(
__fieldNumber: _root_.scala.Int
): _root_.scalapb.GeneratedEnumCompanion[_] = {
(__fieldNumber: @ _root_.scala.unchecked) match {
case 4 => scalapb.perf.protos.Color
}
}
lazy val defaultInstance = scalapb.perf.protos.SimpleMessage(
i = 0,
j = 0,
k = _root_.com.google.protobuf.ByteString.EMPTY,
color = scalapb.perf.protos.Color.UNKNOWN
)
implicit class SimpleMessageLens[UpperPB](
_l: _root_.scalapb.lenses.Lens[UpperPB, scalapb.perf.protos.SimpleMessage]
) extends _root_.scalapb.lenses.ObjectLens[UpperPB, scalapb.perf.protos.SimpleMessage](_l) {
def i: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Int] =
field(_.i)((c_, f_) => c_.copy(i = f_))
def j: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Int] =
field(_.j)((c_, f_) => c_.copy(j = f_))
def k: _root_.scalapb.lenses.Lens[UpperPB, _root_.com.google.protobuf.ByteString] =
field(_.k)((c_, f_) => c_.copy(k = f_))
def color: _root_.scalapb.lenses.Lens[UpperPB, scalapb.perf.protos.Color] =
field(_.color)((c_, f_) => c_.copy(color = f_))
}
final val I_FIELD_NUMBER = 1
final val J_FIELD_NUMBER = 2
final val K_FIELD_NUMBER = 3
final val COLOR_FIELD_NUMBER = 4
def of(
i: _root_.scala.Int,
j: _root_.scala.Int,
k: _root_.com.google.protobuf.ByteString,
color: scalapb.perf.protos.Color
): _root_.scalapb.perf.protos.SimpleMessage = _root_.scalapb.perf.protos.SimpleMessage(
i,
j,
k,
color
)
// @@protoc_insertion_point(GeneratedMessageCompanion[scalapb.perf.SimpleMessage])
}
| scalapb/ScalaPB | docs/src/main/scala/generated/scalapb/perf/protos/SimpleMessage.scala | Scala | apache-2.0 | 9,283 |
package scalarules.test
object ClassProvider {
def dissappearingClassMethod: String = "testContents"
}
object BackgroundNoise{}
| bazelbuild/rules_scala | test_expect_failure/disappearing_class/ClassProvider.scala | Scala | apache-2.0 | 133 |
package org.denigma.gsea.views
import org.denigma.binding.views.BindableView
import org.denigma.binding.views.collections.CollectionView
import org.denigma.codemirror.Doc
import org.denigma.controls.binders.CodeBinder
import org.scalajs.dom.raw.{HTMLElement, HTMLTextAreaElement}
import rx.ops._
import rx.{Rx, Var}
import scala.collection.immutable.Map
import scala.scalajs.js
class CodeView(val elem:HTMLElement,val params:Map[String,Any]) extends CollectionView
{
override type Item = Var[CodeCell]
override type ItemView = CodeCellView
override def newItem(item: Item): CodeCellView = this.constructItemView(item,Map[String,Any]("cell"->item)){case (el,mp)=>
new CodeCellView(el,mp)
}
override val items: Var[List[Item]] = Var(List(Var(CodeCell.empty)))
}
class CodeCellView(val elem:HTMLElement,val params:Map[String,Any]) extends BindableView
{
lazy val cell: Var[CodeCell] = this.resolveKey("cell"){
case cellVar:Var[CodeCell]=>cellVar
case code:CodeCell=>Var(code)
}
lazy val code: Rx[String] = cell.map(c=>c.code)
lazy val result: Rx[String] = cell.map(c=>c.result)
lazy val hasResult = result.map(r=>r!="")
}
class CodeCellBinder(view:BindableView,onCtrlEnter:Doc=>Unit) extends CodeBinder(view) {
lazy val ctrlHandler: js.Function1[Doc, Unit] = onCtrlEnter
//lazy val delHandler:js.Function1[Doc,Unit] = onDel
override def makeEditor(area: HTMLTextAreaElement, textValue: String, codeMode: String, readOnly: Boolean = false) = {
val editor = super.makeEditor(area, textValue, codeMode, readOnly)
val dic = js.Dictionary(
"Ctrl-Enter" -> ctrlHandler
)
editor.setOption("extraKeys", dic)
editor
}
}
| antonkulaga/gsea-runner | app/js/src/main/scala/org/denigma/gsea/views/CodeView.scala | Scala | mpl-2.0 | 1,691 |
package typeclass.data
import typeclass.Semigroup
import scalaprops.Gen
case class Last[A](value: Option[A])
object Last {
implicit def gen[A: Gen]: Gen[Last[A]] = Gen[Option[A]].map(Last(_))
implicit def semigroup[A]: Semigroup[Last[A]] = ???
}
| julien-truffaut/Typeclass | exercise/src/main/scala/typeclass/data/Last.scala | Scala | mit | 255 |
package com.rasterfoundry.datamodel
import io.circe.syntax._
import org.scalatest._
import java.util.UUID
class UploadTestSuite extends FunSuite with Matchers {
test(
"non-platform admins should not be able to create uploads for other users") {
val uploadCreate = Upload.Create(
UploadStatus.Uploaded,
FileType.Geotiff,
UploadType.Dropbox,
List.empty,
UUID.randomUUID,
().asJson,
Some("foo"), // proposed owner
Visibility.Private,
None,
None,
None,
None
)
// note the id is not "foo"
val user = User.Create("bar").toUser
val platformId = UUID.randomUUID
an[IllegalArgumentException] should be thrownBy (
uploadCreate.toUpload(user,
(platformId, false),
Some(platformId))
)
}
test(
"platform admins should be able to create uploads for other users in the same platform") {
val uploadCreate = Upload.Create(
UploadStatus.Uploaded,
FileType.Geotiff,
UploadType.Dropbox,
List.empty,
UUID.randomUUID,
().asJson,
Some("foo"), // proposed owner
Visibility.Private,
None,
None,
None,
None
)
// note the id is not "foo"
val user = User.Create("bar").toUser
val platformId = UUID.randomUUID
Some(
uploadCreate
.toUpload(user, (platformId, true), Some(platformId))
.owner) shouldEqual uploadCreate.owner
}
test(
"platform admins should not be able to create uploads for other users in different platforms") {
val uploadCreate = Upload.Create(
UploadStatus.Uploaded,
FileType.Geotiff,
UploadType.Dropbox,
List.empty,
UUID.randomUUID,
().asJson,
Some("foo"), // proposed owner
Visibility.Private,
None,
None,
None,
None
)
// note the id is not "foo"
val user = User.Create("bar").toUser
an[IllegalArgumentException] should be thrownBy (
uploadCreate.toUpload(user,
(UUID.randomUUID, false),
Some(UUID.randomUUID))
)
}
test(
"superusers should also be able to create uploads for other users regardless of platform") {
val uploadCreate = Upload.Create(
UploadStatus.Uploaded,
FileType.Geotiff,
UploadType.Dropbox,
List.empty,
UUID.randomUUID,
().asJson,
Some("foo"), // proposed owner
Visibility.Private,
None,
None,
None,
None
)
// note the id is not "foo"
val user = User
.Create("bar")
.toUser
.copy(
isSuperuser = true
)
Some(
uploadCreate
.toUpload(user, (UUID.randomUUID, true), Some(UUID.randomUUID))
.owner) shouldEqual uploadCreate.owner
}
}
| aaronxsu/raster-foundry | app-backend/common/src/test/scala/com/UploadSpec.scala | Scala | apache-2.0 | 2,859 |
package com.karasiq.nanoboard.api
case class NanoboardCaptchaImage(index: Int, image: Array[Byte])
case class NanoboardCaptchaRequest(postHash: String, pow: Array[Byte], captcha: NanoboardCaptchaImage)
case class NanoboardCaptchaAnswer(request: NanoboardCaptchaRequest, answer: String) | Karasiq/nanoboard | shared/shared/src/main/scala/com/karasiq/nanoboard/api/NanoboardCaptchaRequest.scala | Scala | apache-2.0 | 288 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.kafka010
import java.io.{File, IOException}
import java.lang.{Integer => JInt}
import java.net.InetSocketAddress
import java.util.{Map => JMap, Properties}
import java.util.concurrent.TimeUnit
import scala.collection.JavaConverters._
import scala.language.postfixOps
import scala.util.Random
import kafka.admin.AdminUtils
import kafka.api.Request
import kafka.common.TopicAndPartition
import kafka.server.{KafkaConfig, KafkaServer, OffsetCheckpoint}
import kafka.utils.ZkUtils
import org.apache.kafka.clients.consumer.KafkaConsumer
import org.apache.kafka.clients.producer._
import org.apache.kafka.common.TopicPartition
import org.apache.kafka.common.serialization.{StringDeserializer, StringSerializer}
import org.apache.zookeeper.server.{NIOServerCnxnFactory, ZooKeeperServer}
import org.scalatest.concurrent.Eventually._
import org.scalatest.time.SpanSugar._
import org.apache.spark.SparkConf
import org.apache.spark.internal.Logging
import org.apache.spark.util.Utils
/**
* This is a helper class for Kafka test suites. This has the functionality to set up
* and tear down local Kafka servers, and to push data using Kafka producers.
*
* The reason to put Kafka test utility class in src is to test Python related Kafka APIs.
*/
class KafkaTestUtils(withBrokerProps: Map[String, Object] = Map.empty) extends Logging {
// Zookeeper related configurations
private val zkHost = "localhost"
private var zkPort: Int = 0
private val zkConnectionTimeout = 60000
private val zkSessionTimeout = 6000
private var zookeeper: EmbeddedZookeeper = _
private var zkUtils: ZkUtils = _
// Kafka broker related configurations
private val brokerHost = "localhost"
private var brokerPort = 0
private var brokerConf: KafkaConfig = _
// Kafka broker server
private var server: KafkaServer = _
// Kafka producer
private var producer: Producer[String, String] = _
// Flag to test whether the system is correctly started
private var zkReady = false
private var brokerReady = false
def zkAddress: String = {
assert(zkReady, "Zookeeper not setup yet or already torn down, cannot get zookeeper address")
s"$zkHost:$zkPort"
}
def brokerAddress: String = {
assert(brokerReady, "Kafka not setup yet or already torn down, cannot get broker address")
s"$brokerHost:$brokerPort"
}
def zookeeperClient: ZkUtils = {
assert(zkReady, "Zookeeper not setup yet or already torn down, cannot get zookeeper client")
Option(zkUtils).getOrElse(
throw new IllegalStateException("Zookeeper client is not yet initialized"))
}
// Set up the Embedded Zookeeper server and get the proper Zookeeper port
private def setupEmbeddedZookeeper(): Unit = {
// Zookeeper server startup
zookeeper = new EmbeddedZookeeper(s"$zkHost:$zkPort")
// Get the actual zookeeper binding port
zkPort = zookeeper.actualPort
zkUtils = ZkUtils(s"$zkHost:$zkPort", zkSessionTimeout, zkConnectionTimeout, false)
zkReady = true
}
// Set up the Embedded Kafka server
private def setupEmbeddedKafkaServer(): Unit = {
assert(zkReady, "Zookeeper should be set up beforehand")
// Kafka broker startup
Utils.startServiceOnPort(brokerPort, port => {
brokerPort = port
brokerConf = new KafkaConfig(brokerConfiguration, doLog = false)
server = new KafkaServer(brokerConf)
server.startup()
brokerPort = server.boundPort()
(server, brokerPort)
}, new SparkConf(), "KafkaBroker")
brokerReady = true
}
/** setup the whole embedded servers, including Zookeeper and Kafka brokers */
def setup(): Unit = {
setupEmbeddedZookeeper()
setupEmbeddedKafkaServer()
}
/** Teardown the whole servers, including Kafka broker and Zookeeper */
def teardown(): Unit = {
brokerReady = false
zkReady = false
if (producer != null) {
producer.close()
producer = null
}
if (server != null) {
server.shutdown()
server.awaitShutdown()
server = null
}
// On Windows, `logDirs` is left open even after Kafka server above is completely shut down
// in some cases. It leads to test failures on Windows if the directory deletion failure
// throws an exception.
brokerConf.logDirs.foreach { f =>
try {
Utils.deleteRecursively(new File(f))
} catch {
case e: IOException if Utils.isWindows =>
logWarning(e.getMessage)
}
}
if (zkUtils != null) {
zkUtils.close()
zkUtils = null
}
if (zookeeper != null) {
zookeeper.shutdown()
zookeeper = null
}
}
/** Create a Kafka topic and wait until it is propagated to the whole cluster */
def createTopic(topic: String, partitions: Int, overwrite: Boolean = false): Unit = {
var created = false
while (!created) {
try {
AdminUtils.createTopic(zkUtils, topic, partitions, 1)
created = true
} catch {
// Workaround fact that TopicExistsException is in kafka.common in 0.10.0 and
// org.apache.kafka.common.errors in 0.10.1 (!)
case e: Exception if (e.getClass.getSimpleName == "TopicExistsException") && overwrite =>
deleteTopic(topic)
}
}
// wait until metadata is propagated
(0 until partitions).foreach { p =>
waitUntilMetadataIsPropagated(topic, p)
}
}
def getAllTopicsAndPartitionSize(): Seq[(String, Int)] = {
zkUtils.getPartitionsForTopics(zkUtils.getAllTopics()).mapValues(_.size).toSeq
}
/** Create a Kafka topic and wait until it is propagated to the whole cluster */
def createTopic(topic: String): Unit = {
createTopic(topic, 1)
}
/** Delete a Kafka topic and wait until it is propagated to the whole cluster */
def deleteTopic(topic: String): Unit = {
val partitions = zkUtils.getPartitionsForTopics(Seq(topic))(topic).size
AdminUtils.deleteTopic(zkUtils, topic)
verifyTopicDeletionWithRetries(zkUtils, topic, partitions, List(this.server))
}
/** Add new partitions to a Kafka topic */
def addPartitions(topic: String, partitions: Int): Unit = {
AdminUtils.addPartitions(zkUtils, topic, partitions)
// wait until metadata is propagated
(0 until partitions).foreach { p =>
waitUntilMetadataIsPropagated(topic, p)
}
}
/** Java-friendly function for sending messages to the Kafka broker */
def sendMessages(topic: String, messageToFreq: JMap[String, JInt]): Unit = {
sendMessages(topic, Map(messageToFreq.asScala.mapValues(_.intValue()).toSeq: _*))
}
/** Send the messages to the Kafka broker */
def sendMessages(topic: String, messageToFreq: Map[String, Int]): Unit = {
val messages = messageToFreq.flatMap { case (s, freq) => Seq.fill(freq)(s) }.toArray
sendMessages(topic, messages)
}
/** Send the array of messages to the Kafka broker */
def sendMessages(topic: String, messages: Array[String]): Seq[(String, RecordMetadata)] = {
sendMessages(topic, messages, None)
}
/** Send the array of messages to the Kafka broker using specified partition */
def sendMessages(
topic: String,
messages: Array[String],
partition: Option[Int]): Seq[(String, RecordMetadata)] = {
producer = new KafkaProducer[String, String](producerConfiguration)
val offsets = try {
messages.map { m =>
val record = partition match {
case Some(p) => new ProducerRecord[String, String](topic, p, null, m)
case None => new ProducerRecord[String, String](topic, m)
}
val metadata =
producer.send(record).get(10, TimeUnit.SECONDS)
logInfo(s"\\tSent $m to partition ${metadata.partition}, offset ${metadata.offset}")
(m, metadata)
}
} finally {
if (producer != null) {
producer.close()
producer = null
}
}
offsets
}
def cleanupLogs(): Unit = {
server.logManager.cleanupLogs()
}
def getEarliestOffsets(topics: Set[String]): Map[TopicPartition, Long] = {
val kc = new KafkaConsumer[String, String](consumerConfiguration)
logInfo("Created consumer to get earliest offsets")
kc.subscribe(topics.asJavaCollection)
kc.poll(0)
val partitions = kc.assignment()
kc.pause(partitions)
kc.seekToBeginning(partitions)
val offsets = partitions.asScala.map(p => p -> kc.position(p)).toMap
kc.close()
logInfo("Closed consumer to get earliest offsets")
offsets
}
def getLatestOffsets(topics: Set[String]): Map[TopicPartition, Long] = {
val kc = new KafkaConsumer[String, String](consumerConfiguration)
logInfo("Created consumer to get latest offsets")
kc.subscribe(topics.asJavaCollection)
kc.poll(0)
val partitions = kc.assignment()
kc.pause(partitions)
kc.seekToEnd(partitions)
val offsets = partitions.asScala.map(p => p -> kc.position(p)).toMap
kc.close()
logInfo("Closed consumer to get latest offsets")
offsets
}
protected def brokerConfiguration: Properties = {
val props = new Properties()
props.put("broker.id", "0")
props.put("host.name", "localhost")
props.put("advertised.host.name", "localhost")
props.put("port", brokerPort.toString)
props.put("log.dir", Utils.createTempDir().getAbsolutePath)
props.put("zookeeper.connect", zkAddress)
props.put("log.flush.interval.messages", "1")
props.put("replica.socket.timeout.ms", "1500")
props.put("delete.topic.enable", "true")
props.put("offsets.topic.num.partitions", "1")
// Can not use properties.putAll(propsMap.asJava) in scala-2.12
// See https://github.com/scala/bug/issues/10418
withBrokerProps.foreach { case (k, v) => props.put(k, v) }
props
}
private def producerConfiguration: Properties = {
val props = new Properties()
props.put("bootstrap.servers", brokerAddress)
props.put("value.serializer", classOf[StringSerializer].getName)
props.put("key.serializer", classOf[StringSerializer].getName)
// wait for all in-sync replicas to ack sends
props.put("acks", "all")
props
}
private def consumerConfiguration: Properties = {
val props = new Properties()
props.put("bootstrap.servers", brokerAddress)
props.put("group.id", "group-KafkaTestUtils-" + Random.nextInt)
props.put("value.deserializer", classOf[StringDeserializer].getName)
props.put("key.deserializer", classOf[StringDeserializer].getName)
props.put("enable.auto.commit", "false")
props
}
/** Verify topic is deleted in all places, e.g, brokers, zookeeper. */
private def verifyTopicDeletion(
topic: String,
numPartitions: Int,
servers: Seq[KafkaServer]): Unit = {
val topicAndPartitions = (0 until numPartitions).map(TopicAndPartition(topic, _))
import ZkUtils._
// wait until admin path for delete topic is deleted, signaling completion of topic deletion
assert(
!zkUtils.pathExists(getDeleteTopicPath(topic)),
s"${getDeleteTopicPath(topic)} still exists")
assert(!zkUtils.pathExists(getTopicPath(topic)), s"${getTopicPath(topic)} still exists")
// ensure that the topic-partition has been deleted from all brokers' replica managers
assert(servers.forall(server => topicAndPartitions.forall(tp =>
server.replicaManager.getPartition(tp.topic, tp.partition) == None)),
s"topic $topic still exists in the replica manager")
// ensure that logs from all replicas are deleted if delete topic is marked successful
assert(servers.forall(server => topicAndPartitions.forall(tp =>
server.getLogManager().getLog(tp).isEmpty)),
s"topic $topic still exists in log mananger")
// ensure that topic is removed from all cleaner offsets
assert(servers.forall(server => topicAndPartitions.forall { tp =>
val checkpoints = server.getLogManager().logDirs.map { logDir =>
new OffsetCheckpoint(new File(logDir, "cleaner-offset-checkpoint")).read()
}
checkpoints.forall(checkpointsPerLogDir => !checkpointsPerLogDir.contains(tp))
}), s"checkpoint for topic $topic still exists")
// ensure the topic is gone
assert(
!zkUtils.getAllTopics().contains(topic),
s"topic $topic still exists on zookeeper")
}
/** Verify topic is deleted. Retry to delete the topic if not. */
private def verifyTopicDeletionWithRetries(
zkUtils: ZkUtils,
topic: String,
numPartitions: Int,
servers: Seq[KafkaServer]) {
eventually(timeout(60.seconds), interval(200.millis)) {
try {
verifyTopicDeletion(topic, numPartitions, servers)
} catch {
case e: Throwable =>
// As pushing messages into Kafka updates Zookeeper asynchronously, there is a small
// chance that a topic will be recreated after deletion due to the asynchronous update.
// Hence, delete the topic and retry.
AdminUtils.deleteTopic(zkUtils, topic)
throw e
}
}
}
private def waitUntilMetadataIsPropagated(topic: String, partition: Int): Unit = {
def isPropagated = server.apis.metadataCache.getPartitionInfo(topic, partition) match {
case Some(partitionState) =>
val leaderAndInSyncReplicas = partitionState.leaderIsrAndControllerEpoch.leaderAndIsr
zkUtils.getLeaderForPartition(topic, partition).isDefined &&
Request.isValidBrokerId(leaderAndInSyncReplicas.leader) &&
leaderAndInSyncReplicas.isr.nonEmpty
case _ =>
false
}
eventually(timeout(60.seconds)) {
assert(isPropagated, s"Partition [$topic, $partition] metadata not propagated after timeout")
}
}
private class EmbeddedZookeeper(val zkConnect: String) {
val snapshotDir = Utils.createTempDir()
val logDir = Utils.createTempDir()
val zookeeper = new ZooKeeperServer(snapshotDir, logDir, 500)
val (ip, port) = {
val splits = zkConnect.split(":")
(splits(0), splits(1).toInt)
}
val factory = new NIOServerCnxnFactory()
factory.configure(new InetSocketAddress(ip, port), 16)
factory.startup(zookeeper)
val actualPort = factory.getLocalPort
def shutdown() {
factory.shutdown()
// The directories are not closed even if the ZooKeeper server is shut down.
// Please see ZOOKEEPER-1844, which is fixed in 3.4.6+. It leads to test failures
// on Windows if the directory deletion failure throws an exception.
try {
Utils.deleteRecursively(snapshotDir)
} catch {
case e: IOException if Utils.isWindows =>
logWarning(e.getMessage)
}
try {
Utils.deleteRecursively(logDir)
} catch {
case e: IOException if Utils.isWindows =>
logWarning(e.getMessage)
}
}
}
}
| esi-mineset/spark | external/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaTestUtils.scala | Scala | apache-2.0 | 15,584 |
/*
* Copyright 2015 Dennis Vriend
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.dnvriend.webservices.generic
import akka.http.scaladsl.client.RequestBuilding
import akka.http.scaladsl.model.{ HttpHeader, StatusCodes }
import akka.stream.scaladsl.Source
import com.github.dnvriend.TestSpec
class HttpClientTest extends TestSpec {
"HttpClient.encoder" should "url encode text" in {
HttpClient.encode("abcde abcde") shouldBe "abcde+abcde"
}
"HttpClient.queryString" should "create a query string from an empty map" in {
HttpClient.queryString(Map.empty) shouldBe ""
}
it should "create a query string from simple entries" in {
HttpClient.queryString(Map("a" -> "b", "c" -> "d")) shouldBe "?a=b&c=d"
HttpClient.queryString(Map("a" -> "b c", "d" -> "e f")) shouldBe "?a=b+c&d=e+f"
HttpClient.queryString(Map("a" -> "", "c" -> "")) shouldBe "?a=&c="
HttpClient.queryString(Map("" -> "", "" -> "")) shouldBe "?"
}
"HttpClient.header" should "create a single http header" in {
HttpClient.header("foo", "bar").value mustBe {
case HttpHeader("foo", "bar") ⇒
}
}
it should "create a List[HttpHeader] from a Map[String, String]" in {
HttpClient.headers(Map("foo" -> "bar", "bar" -> "baz")).sortBy(_.name()) mustBe {
case List(HttpHeader("bar", "baz"), HttpHeader("foo", "bar")) ⇒
}
}
/**
* see: http://httpbin.org/
*/
"Client connection to httpbin.org (echo service)" should "non TLS HTTP 200 for '/get'" in {
HttpClient("httpbin.org", 80, tls = false).get("/get").futureValue.status shouldBe StatusCodes.OK
}
it should "TLS HTTP 200 for a get on '/get'" in {
HttpClient("httpbin.org", 443, tls = true).get("/get").futureValue.status shouldBe StatusCodes.OK
}
it should "support basic auth for non-tls" in {
HttpClient("httpbin.org", 80, tls = false, Option("foo"), Option("bar")).get("/basic-auth/foo/bar").futureValue.status shouldBe StatusCodes.OK
}
it should "support basic auth for tls" in {
HttpClient("httpbin.org", 443, tls = true, Option("foo"), Option("bar")).get("/basic-auth/foo/bar").futureValue.status shouldBe StatusCodes.OK
}
it should "support post" in {
HttpClient("httpbin.org", 443, tls = true, Option("foo"), Option("bar")).post("/post").futureValue.status shouldBe StatusCodes.OK
}
it should "support put" in {
HttpClient("httpbin.org", 443, tls = true, Option("foo"), Option("bar")).put("/put").futureValue.status shouldBe StatusCodes.OK
}
it should "support delete" in {
HttpClient("httpbin.org", 443, tls = true, Option("foo"), Option("bar")).delete("/delete").futureValue.status shouldBe StatusCodes.OK
}
it should "support patch" in {
HttpClient("httpbin.org", 443, tls = true, Option("foo"), Option("bar")).patch("/patch").futureValue.status shouldBe StatusCodes.OK
}
def httpBinGet = HttpClient.mkRequest(RequestBuilding.Get, "/get")
"Cached connection" should "non tls HTTP 200 for /get" in {
Source((1 to 10).map(i ⇒ (httpBinGet, i)))
.via(HttpClient.cachedConnection("httpbin.org", 80))
.via(HttpClient.responseToString)
.log("received")
.runFold(0) { case (c, e) ⇒ c + 1 }
.futureValue shouldBe 10
}
}
| Tecsisa/akka-http-test | src/test/scala/com/github/dnvriend/webservices/generic/HttpClientTest.scala | Scala | apache-2.0 | 3,768 |
package org.scaladebugger.api.profiles.java.info
import org.scaladebugger.api.lowlevel.events.misc.NoResume
import org.scaladebugger.api.profiles.java.JavaDebugProfile
import org.scaladebugger.api.profiles.traits.info.ThreadInfo
import org.scaladebugger.api.utils.JDITools
import org.scaladebugger.api.virtualmachines.DummyScalaVirtualMachine
import org.scaladebugger.test.helpers.ParallelMockFunSpec
import org.scalatest.concurrent.Eventually
import test.{ApiTestUtilities, VirtualMachineFixtures}
class JavaObjectInfoScala210IntegrationSpec extends ParallelMockFunSpec
with VirtualMachineFixtures
with ApiTestUtilities
with Eventually
{
describe("JavaObjectInfo for 2.10") {
it("should be able to get a list of methods for the object") {
val testClass = "org.scaladebugger.test.info.Methods"
val testFile = JDITools.scalaClassStringToFileString(testClass)
@volatile var t: Option[ThreadInfo] = None
val s = DummyScalaVirtualMachine.newInstance()
// NOTE: Do not resume so we can check the variables at the stack frame
s.withProfile(JavaDebugProfile.Name)
.getOrCreateBreakpointRequest(testFile, 22, NoResume)
.foreach(e => t = Some(e.thread))
withVirtualMachine(testClass, pendingScalaVirtualMachines = Seq(s)) { (s) =>
logTimeTaken(eventually {
val methodNames = t.get.topFrame.thisObject.methods.map(_.name)
methodNames should contain theSameElementsAs Seq(
// Defined methods
"main",
"innerMethod$1", // Nested method has different Java signature
"publicMethod",
"privateMethod",
"protectedMethod",
"zeroArgMethod",
"functionMethod", // Scala provides a method for the function
// object since it would be treated as a field
// Inherited methods
"<clinit>",
"<init>",
"registerNatives",
"getClass",
"hashCode",
"equals",
"clone",
"toString",
"notify",
"notifyAll",
"wait", // Overloaded method
"wait",
"wait",
"finalize"
)
})
}
}
}
}
| ensime/scala-debugger | scala-debugger-api/src/it/scala-2.10/org/scaladebugger/api/profiles/java/info/JavaObjectInfoScala210IntegrationSpec.scala | Scala | apache-2.0 | 2,280 |
/***********************************************************************
* Copyright (c) 2013-2017 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.features.kryo
import java.util.{Collection => jCollection, List => jList, Map => jMap}
import com.esotericsoftware.kryo.io.Input
import com.vividsolutions.jts.geom.Geometry
import org.geotools.geometry.jts.ReferencedEnvelope
import org.geotools.process.vector.TransformProcess
import org.locationtech.geomesa.features.ScalaSimpleFeature
import org.locationtech.geomesa.features.SerializationOption._
import org.locationtech.geomesa.features.kryo.impl.KryoFeatureDeserialization
import org.locationtech.geomesa.features.serialization.ObjectType
import org.locationtech.geomesa.utils.geotools.ImmutableFeatureId
import org.opengis.feature.`type`.Name
import org.opengis.feature.simple.{SimpleFeature, SimpleFeatureType}
import org.opengis.feature.{GeometryAttribute, Property}
import org.opengis.filter.expression.PropertyName
import org.opengis.filter.identity.FeatureId
import org.opengis.geometry.BoundingBox
import scala.collection.JavaConversions._
object LazySimpleFeature {
val NULL_BYTE = 0.asInstanceOf[Byte]
}
class KryoBufferSimpleFeature(sft: SimpleFeatureType,
readers: Array[(Input) => AnyRef],
readUserData: (Input) => jMap[AnyRef, AnyRef],
options: Set[SerializationOption]) extends SimpleFeature {
private var offset: Int = _
private var length: Int = _
private val input = new Input
private val offsets = Array.ofDim[Int](sft.getAttributeCount)
private var startOfOffsets: Int = -1
private var missingAttributes: Boolean = false
private lazy val geomIndex = sft.indexOf(sft.getGeometryDescriptor.getLocalName)
private var userData: jMap[AnyRef, AnyRef] = _
private var userDataOffset: Int = -1
private var id: String = ""
private var transforms: String = _
private var transformSchema: SimpleFeatureType = _
private var binaryTransform: () => Array[Byte] = input.getBuffer
private var reserializeTransform: () => Array[Byte] = input.getBuffer
/**
* Creates a new feature for later use - does not copy attribute bytes
*
* @return
*/
def copy(): KryoBufferSimpleFeature = {
val sf = new KryoBufferSimpleFeature(sft, readers, readUserData, options)
if (transforms != null) {
sf.setTransforms(transforms, transformSchema)
}
sf
}
/**
* Transform the feature into a serialized byte array
*
* @return
*/
def transform(): Array[Byte] =
// if attributes have been added to the sft, we have to reserialize to get the null serialized values
if (missingAttributes) { reserializeTransform() } else { binaryTransform() }
/**
* Set the serialized bytes to use for reading attributes
*
* @param bytes serialized byte array
*/
def setBuffer(bytes: Array[Byte]): Unit = setBuffer(bytes, 0, bytes.length)
/**
* Set the serialized bytes to use for reading attributes
*
* @param bytes serialized byte array
* @param offset offset into the byte array of valid bytes
* @param length number of valid bytes to read from the byte array
*/
def setBuffer(bytes: Array[Byte], offset: Int, length: Int): Unit = {
this.offset = offset
this.length = length
input.setBuffer(bytes, offset, offset + length)
// reset our offsets
input.setPosition(offset + 1) // skip version
startOfOffsets = offset + input.readInt()
input.setPosition(startOfOffsets) // set to offsets start
var i = 0
while (i < offsets.length && input.position < input.limit) {
offsets(i) = offset + input.readInt(true)
i += 1
}
if (i < offsets.length) {
// attributes have been added to the sft since this feature was serialized
missingAttributes = true
do { offsets(i) = -1; i += 1 } while (i < offsets.length)
} else {
missingAttributes = false
}
userData = null
userDataOffset = input.position()
}
def setId(id: String): Unit = this.id = id
def setTransforms(transforms: String, transformSchema: SimpleFeatureType): Unit = {
this.transforms = transforms
this.transformSchema = transformSchema
val tdefs = TransformProcess.toDefinition(transforms)
// transforms by evaluating the transform expressions and then serializing the resulting feature
// we use this for transform expressions and for data that was written using an old schema
reserializeTransform = {
val serializer = KryoFeatureSerializer(transformSchema, options)
val sf = new ScalaSimpleFeature(transformSchema, "")
() => {
sf.setId(getID)
var i = 0
while (i < tdefs.size) {
sf.setAttribute(i, tdefs.get(i).expression.evaluate(this))
i += 1
}
serializer.serialize(sf)
}
}
val indices = tdefs.map { t =>
t.expression match {
case p: PropertyName => sft.indexOf(p.getPropertyName)
case _ => -1
}
}
val shouldReserialize = indices.contains(-1)
// if we are just returning a subset of attributes, we can copy the bytes directly and avoid creating
// new objects, reserializing, etc
binaryTransform = if (!shouldReserialize) {
val mutableOffsetsAndLength = Array.ofDim[(Int,Int)](indices.length)
() => {
// NOTE: the input buffer is the raw buffer. we need to ensure that we use the
// offset into the raw buffer rather than the raw buffer directly
val buf = input.getBuffer
var length = offsets(0) - this.offset // space for version, offset block and ID
var idx = 0
while(idx < mutableOffsetsAndLength.length) {
val i = indices(idx)
val l = (if (i < offsets.length - 1) offsets(i + 1) else startOfOffsets) - offsets(i)
length += l
mutableOffsetsAndLength(idx) = (offsets(i), l)
idx += 1
}
val dst = Array.ofDim[Byte](length)
// copy the version, offset block and id
var dstPos = offsets(0) - this.offset
System.arraycopy(buf, this.offset, dst, 0, dstPos)
mutableOffsetsAndLength.foreach { case (o, l) =>
System.arraycopy(buf, o, dst, dstPos, l)
dstPos += l
}
// note that the offset block is incorrect - we couldn't use this in another lazy feature
// but the normal serializer doesn't care
dst
}
} else {
reserializeTransform
}
}
def getDateAsLong(index: Int): Long = {
val offset = offsets(index)
if (offset == -1) {
0L
} else {
input.setPosition(offset)
KryoBufferSimpleFeature.longReader(input).asInstanceOf[Long]
}
}
override def getAttribute(index: Int): AnyRef = {
val offset = offsets(index)
if (offset == -1) {
null
} else {
input.setPosition(offset)
readers(index)(input)
}
}
def getInput(index: Int): Input = {
val offset = offsets(index)
if (offset == -1) {
null
} else {
input.setPosition(offset)
input
}
}
override def getType: SimpleFeatureType = sft
override def getFeatureType: SimpleFeatureType = sft
override def getName: Name = sft.getName
override def getIdentifier: FeatureId = new ImmutableFeatureId(getID)
override def getID: String = {
if (options.withoutId) { id } else {
input.setPosition(5)
input.readString()
}
}
override def getAttribute(name: Name): AnyRef = getAttribute(name.getLocalPart)
override def getAttribute(name: String): Object = {
val index = sft.indexOf(name)
if (index == -1) null else getAttribute(index)
}
override def getDefaultGeometry: AnyRef = getAttribute(geomIndex)
override def getAttributeCount: Int = sft.getAttributeCount
override def getBounds: BoundingBox = getDefaultGeometry match {
case g: Geometry => new ReferencedEnvelope(g.getEnvelopeInternal, sft.getCoordinateReferenceSystem)
case _ => new ReferencedEnvelope(sft.getCoordinateReferenceSystem)
}
override def getAttributes: jList[AnyRef] = {
val attributes = new java.util.ArrayList[AnyRef](offsets.length)
var i = 0
while (i < offsets.length) {
attributes.add(getAttribute(i))
i += 1
}
attributes
}
override def getUserData: jMap[AnyRef, AnyRef] = {
if (userData == null) {
input.setPosition(userDataOffset)
userData = readUserData(input)
}
userData
}
override def getDefaultGeometryProperty = throw new NotImplementedError
override def getProperties: jCollection[Property] = throw new NotImplementedError
override def getProperties(name: Name) = throw new NotImplementedError
override def getProperties(name: String) = throw new NotImplementedError
override def getProperty(name: Name) = throw new NotImplementedError
override def getProperty(name: String) = throw new NotImplementedError
override def getValue = throw new NotImplementedError
override def getDescriptor = throw new NotImplementedError
override def setAttribute(name: Name, value: Object) = throw new NotImplementedError
override def setAttribute(name: String, value: Object) = throw new NotImplementedError
override def setAttribute(index: Int, value: Object) = throw new NotImplementedError
override def setAttributes(vals: jList[Object]) = throw new NotImplementedError
override def setAttributes(vals: Array[Object]) = throw new NotImplementedError
override def setDefaultGeometry(geo: Object) = throw new NotImplementedError
override def setDefaultGeometryProperty(geoAttr: GeometryAttribute) = throw new NotImplementedError
override def setValue(newValue: Object) = throw new NotImplementedError
override def setValue(values: jCollection[Property]) = throw new NotImplementedError
override def isNillable = true
override def validate() = throw new NotImplementedError
override def toString = s"KryoBufferSimpleFeature:$getID"
}
object KryoBufferSimpleFeature {
val longReader = KryoFeatureDeserialization.matchReader(ObjectType.LONG)
}
| ronq/geomesa | geomesa-features/geomesa-feature-kryo/src/main/scala/org/locationtech/geomesa/features/kryo/KryoBufferSimpleFeature.scala | Scala | apache-2.0 | 10,503 |
package lila.analyse
import akka.actor._
import akka.pattern.pipe
import com.typesafe.config.Config
import scala.util.{ Success, Failure }
import spray.caching.{ LruCache, Cache }
import lila.common.PimpedConfig._
final class Env(
config: Config,
db: lila.db.Env,
ai: ActorSelection,
system: ActorSystem,
indexer: ActorSelection,
modActor: ActorSelection) {
private val CollectionAnalysis = config getString "collection.analysis"
private val NetDomain = config getString "net.domain"
private val CachedNbTtl = config duration "cached.nb.ttl"
private val PaginatorMaxPerPage = config getInt "paginator.max_per_page"
private val ActorName = config getString "actor.name"
private[analyse] lazy val analysisColl = db(CollectionAnalysis)
lazy val analyser = new Analyser(
ai = ai,
indexer = indexer,
modActor = modActor,
limiter = new Limiter)
lazy val paginator = new PaginatorBuilder(
cached = cached,
maxPerPage = PaginatorMaxPerPage)
lazy val annotator = new Annotator(NetDomain)
lazy val cached = new {
private val cache: Cache[Int] = LruCache(timeToLive = CachedNbTtl)
def nbAnalysis: Fu[Int] = cache(true)(AnalysisRepo.count)
}
// api actor
system.actorOf(Props(new Actor {
def receive = {
case lila.hub.actorApi.ai.AutoAnalyse(gameId) =>
analyser.getOrGenerate(gameId, userId = "lichess", userIp = none, concurrent = true, auto = true)
}
}), name = ActorName)
def cli = new lila.common.Cli {
import tube.analysisTube
def process = {
case "analyse" :: "typecheck" :: Nil => lila.db.Typecheck.apply[Analysis](false)
}
}
}
object Env {
lazy val current = "analyse" boot new Env(
config = lila.common.PlayApp loadConfig "analyse",
db = lila.db.Env.current,
ai = lila.hub.Env.current.actor.ai,
system = lila.common.PlayApp.system,
indexer = lila.hub.Env.current.actor.gameSearch,
modActor = lila.hub.Env.current.actor.mod)
}
| r0k3/lila | modules/analyse/src/main/Env.scala | Scala | mit | 1,990 |
def factorial(n:Int) = (1 to n).reduceLeft(_ * _)
println(factorial(5))
| Gerhut/scala-for-the-impatient | Chapter12/3.scala | Scala | unlicense | 73 |
package memnets.core
import memnets.model._
object Trial {
def apply(time: Int = -1, name: String = "")(implicit model: Model): Trial = {
val t = new Trial(model, model.trials.length)
model.trials += t
t.name = name
t.time = time
t
}
}
| MemoryNetworks/memnets | api/src/main/scala/memnets/core/Trial.scala | Scala | apache-2.0 | 262 |
package pureconfig
import java.util.regex.Pattern
import scala.util.matching.Regex
import org.scalactic.Equality
import org.scalactic.TypeCheckedTripleEquals._
package object equality {
implicit final val PatternEquality: Equality[Pattern] = new Equality[Pattern] {
def areEqual(a: Pattern, b: Any): Boolean =
b match {
case bp: Pattern => a.pattern === bp.pattern
case _ => false
}
}
implicit final val RegexEquality: Equality[Regex] = new Equality[Regex] {
override def areEqual(a: Regex, b: Any): Boolean =
b match {
case r: Regex => PatternEquality.areEqual(a.pattern, r.pattern)
case _ => false
}
}
}
| pureconfig/pureconfig | testkit/src/main/scala/pureconfig/equality/package.scala | Scala | mpl-2.0 | 685 |
package thangiee.riotapi.matchhistory
case class ParticipantTimeline(
ancientGolemAssistsPerMinCounts: ParticipantTimelineData = ParticipantTimelineData(),
ancientGolemKillsPerMinCounts: ParticipantTimelineData = ParticipantTimelineData(),
assistedLaneDeathsPerMinDeltas: ParticipantTimelineData = ParticipantTimelineData(),
assistedLaneKillsPerMinDeltas: ParticipantTimelineData = ParticipantTimelineData(),
baronAssistsPerMinCounts: ParticipantTimelineData = ParticipantTimelineData(),
baronKillsPerMinCounts: ParticipantTimelineData = ParticipantTimelineData(),
creepsPerMinDeltas: ParticipantTimelineData = ParticipantTimelineData(),
csDiffPerMinDeltas: ParticipantTimelineData = ParticipantTimelineData(),
damageTakenDiffPerMinDeltas: ParticipantTimelineData = ParticipantTimelineData(),
damageTakenPerMinDeltas: ParticipantTimelineData = ParticipantTimelineData(),
dragonAssistsPerMinCounts: ParticipantTimelineData = ParticipantTimelineData(),
dragonKillsPerMinCounts: ParticipantTimelineData = ParticipantTimelineData(),
elderLizardAssistsPerMinCounts: ParticipantTimelineData = ParticipantTimelineData(),
elderLizardKillsPerMinCounts: ParticipantTimelineData = ParticipantTimelineData(),
goldPerMinDeltas: ParticipantTimelineData = ParticipantTimelineData(),
inhibitorAssistsPerMinCounts: ParticipantTimelineData = ParticipantTimelineData(),
inhibitorKillsPerMinCounts: ParticipantTimelineData = ParticipantTimelineData(),
lane: String = "",
role: String = "",
towerAssistsPerMinCounts: ParticipantTimelineData = ParticipantTimelineData(),
towerKillsPerMinCounts: ParticipantTimelineData = ParticipantTimelineData(),
towerKillsPerMinDeltas: ParticipantTimelineData = ParticipantTimelineData(),
vilemawAssistsPerMinCounts: ParticipantTimelineData = ParticipantTimelineData(),
vilemawKillsPerMinCounts: ParticipantTimelineData = ParticipantTimelineData(),
wardsPerMinDeltas: ParticipantTimelineData = ParticipantTimelineData(),
xpDiffPerMinDeltas: ParticipantTimelineData = ParticipantTimelineData(),
xpPerMinDeltas: ParticipantTimelineData = ParticipantTimelineData()
)
| Thangiee/Riot-API-Scala | src/main/scala/thangiee/riotapi/matchhistory/ParticipantTimeline.scala | Scala | mit | 2,163 |
/*
* Copyright 2016-2020 47 Degrees Open Source <https://www.47deg.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalaexercises.evaluator
import cats.effect.IO
import coursier.util.Sync
import coursier.interop.cats._
import scala.concurrent.ExecutionContext
trait Implicits {
val EC = ExecutionContext.fromExecutor(new java.util.concurrent.ForkJoinPool(10))
implicit val timer = IO.timer(EC)
implicit val CS = IO.contextShift(EC)
implicit val sync = Sync[IO]
}
| scala-exercises/evaluator | server/src/test/scala/org/scalaexercises/evaluator/Implicits.scala | Scala | apache-2.0 | 1,012 |
/*
* Copyright 2014–2017 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.precog
import java.util.concurrent.atomic.AtomicInteger
package object util {
/**
* Opaque symbolic identifier (like Int, but better!).
*/
final class Identifier extends AnyRef
// Shared Int could easily overflow: Unshare? Extend to a Long? Different approach?
object IdGen extends IdGen
class IdGen {
private[this] val currentId = new AtomicInteger(0)
def nextInt(): Int = currentId.getAndIncrement()
}
}
| drostron/quasar | blueeyes/src/main/scala/quasar/precog/util/package.scala | Scala | apache-2.0 | 1,058 |
package gapt.examples
import java.io._
import gapt.formats.llk.short._
import gapt.formats.tptp.TptpHOLExporter
import gapt.proofs.HOLSequent
/**
* The object nTape6 generates hard problems for higher order theorem provers containing an axiomatization of
* if-then-else.
* Formulas:
* f1,f2 ... if-then-else axiomatizations
* f3,f4 ... properties of the successor function (0 is no successor and
* a number is always different from its successor)
* conclusion0 ... there exists a function h s.t. h(0) = 1, h(1) = 0
* conclusion1 ... there exists a function h s.t. h(0) = 1, h(1) = 0, h(2) = 0
* conclusion2 ... there exists a function h s.t. h(0) = 1, h(1) = 0, h(2) = 1
* w1 ... witness for sc
* w2 ... witness for sc2
*
* The problems are (in sequent notation):
*
* P0: f1, f2 :- conclusion0
* P1: f1, f2, f3, f4 :- conclusion1
* P2: f1, f2, f3, f4 :- conclusion2
*
* The generated filenames are "ntape6-i-without-witness.tptp" for i = 0 to 2.
*
* To show that there are actual witnesses for the function h, we provide a witness, where the witness w1 can be used
* for both W0 and W1:
*
* W0: { w1 :- } x P0
* W1: { w1 :- } x P1
* W2: { w2 :- } x P2
*
* The generated filenames are "ntape6-i-with-witness.tptp" for i = 0 to 2.
*/
object nTape6 {
/**
* Contains all the formulas used.
*/
object formulas {
implicit val signature =
sig(
"""var X:o; var U,V:i; var H:i>i; var x,y:i;
const zero:i; const s:i>i; const h:i>i;
const ite : o > (i > (i>i));""" )
val s1 = "(all X all U all V (X -> ite(X,U,V) = U))"
val s2 = "(all X all U all V (-X -> ite(X,U,V) = V))"
val s3 = "(all x -(s(x) = zero))"
val s4 = "(all x -(s(x) = x))"
val s5 = "(all x (h(x) = ite((x=zero), s(zero), zero) ))"
val s6 = "(all x (h(x) = ite((x=s(zero)), zero, s(zero)) ))"
val s7 =
"""(all x (h(x) = ite((x=zero), s(zero),
ite((x=s(zero)), zero, s(zero) ))))"""
val sc = "(exists H (H(zero)=s(zero) & H(s(zero))=zero ))"
val sc1 = "(exists H (H(zero)=s(zero) & H(s(zero))=zero & H(s(s(zero))) = zero))"
val sc2 = "(exists H (H(zero)=s(zero) & H(s(zero))=zero & H(s(s(zero))) = s(zero)))"
val List( f1, f2, f3, f4, w1, w2, w3, conclusion0, conclusion1, conclusion2 ) =
List( s1, s2, s3, s4, s5, s6, s7, sc, sc1, sc2 ).map( hof( _ ) )
}
/**
* Contains all the conjecture sequents used.
*/
object sequents {
import formulas._
val s0a = HOLSequent( f1 :: f2 :: Nil, conclusion0 :: Nil )
val s0b = HOLSequent( f1 :: f2 :: formulas.w1 :: Nil, conclusion0 :: Nil )
val s1a = HOLSequent( f1 :: f2 :: Nil, conclusion1 :: Nil )
val s1b = HOLSequent( f1 :: f2 :: formulas.w1 :: Nil, conclusion1 :: Nil )
val s1c = HOLSequent( f1 :: f2 :: f3 :: f4 :: formulas.w1 :: Nil, conclusion1 :: Nil )
val s1d = HOLSequent( f1 :: f2 :: f3 :: f4 :: Nil, conclusion1 :: Nil )
val s2b = HOLSequent( f1 :: f2 :: f4 :: formulas.w2 :: Nil, conclusion2 :: Nil )
val s2c = HOLSequent( f1 :: f2 :: f4 :: Nil, conclusion2 :: Nil )
val s2d = HOLSequent( f1 :: f2 :: w3 :: Nil, conclusion2 :: Nil )
val s2e = HOLSequent( f1 :: f2 :: f3 :: f4 :: w3 :: Nil, conclusion2 :: Nil )
val consistent = HOLSequent( f1 :: f2 :: f3 :: f4 :: formulas.w1 :: conclusion0 :: Nil, Nil )
val cuts0a = HOLSequent( f1 :: f2 :: f3 :: f4 :: Nil, formulas.w1 :: Nil )
val cuts0b = HOLSequent( f3 :: f4 :: formulas.w1 :: Nil, conclusion0 :: Nil )
}
import sequents._
val fn = "ntape6-"
/**
* Problem 0: sequence (0,1)
*/
val p0 = s0a
/**
* Problem 1: sequence (1,0,0)
*/
val p1 = s1a
/**
* Problem 0: sequence (1,0,1)
*/
val p2 = s2c
/**
* Problem 0 with witness: sequence (0,1)
*/
val w0 = s0b
/**
* Problem 1 with witness : sequence (1,0,0)
*/
val w1 = s1c
/**
* Problem 2 with witness: sequence (1,0,1)
*/
val w2 = s2c
/**
* Problem 2 with different witness: sequence (1,0,1)
*/
val w2b = s2e
/**
* Export the problems P0-P2 and W0-W2 to TPTP THF.
*
* @param path
*/
def export( path: String = ".", separate_axioms: Boolean = false ): Unit = {
val f = path + File.separator + fn
//sc
TptpHOLExporter( s0a, f + "0-minimal.tptp", separate_axioms ) //provable by agsyhol
TptpHOLExporter( s0b, f + "0-with-witness.tptp", separate_axioms ) //provable by agsyhol
//sc1
TptpHOLExporter( s1a, f + "1-minimal.tptp", separate_axioms ) //timeout
TptpHOLExporter( s1b, f + "1-withness-no-arith.tptp", separate_axioms ) //timeout
TptpHOLExporter( s1c, f + "1-with-witness.tptp", separate_axioms ) //provable by leo 2, satallax, agsyhol
TptpHOLExporter( s1d, f + "1-without-witness.tptp", separate_axioms ) //timeout
//sc2
TptpHOLExporter( s2b, f + "2-with-witness.tptp", separate_axioms ) //provable by leo 2, satallax
TptpHOLExporter( s2c, f + "2-without-witness.tptp", separate_axioms ) //timeout
//sc2 with different witness
TptpHOLExporter( s2d, f + "2-with-witness2.tptp", separate_axioms ) //provable by leo 2, satallax
//TPTPHOLExporter( s2e , f + "2-with-witness2-help.tptp" , separate_axioms ) //provable by leo 2, satallax
//these are some experiments
//TPTPHOLExporter( cuts0a , f + "0-cut1.tptp" , separate_axioms )
//TPTPHOLExporter( cuts0b , f + "0-cut2.tptp" , separate_axioms )
//TPTPHOLExporter( consistent , f + "0-consistent.tptp" , separate_axioms )
}
} | gapt/gapt | examples/ntape/nTape6.scala | Scala | gpl-3.0 | 5,541 |
package poly.algebra.std
import poly.algebra._
/**
* @author Tongfei Chen
*/
object IntStructure extends EuclideanDomain[Int] with BoundedLattice[Int] with SequentialOrder[Int] with OrderedHashing[Int] {
final def hash(x: Int) = x.##
final val bot: Int = Int.MinValue
final val top: Int = Int.MaxValue
def pred(x: Int) = x - 1
def succ(x: Int) = x + 1
override def predN(x: Int, n: Int) = x - n
override def succN(x: Int, n: Int) = x + n
def cmp(x: Int, y: Int) = x - y
override def eq(x: Int, y: Int) = x == y
override def le(x: Int, y: Int) = x <= y
override def lt(x: Int, y: Int) = x < y
override def ge(x: Int, y: Int) = x >= y
override def gt(x: Int, y: Int) = x > y
override def max[Y <: Int](x: Y, y: Y) = if (x > y) x else y
override def min[Y <: Int](x: Y, y: Y) = if (x < y) x else y
final val zero = 0
final val one = 1
//TODO: ! `final` causes compiler generating a class that when loaded, throws ClassFormatException (SI-9486)
override val negOne = -1
def add(x: Int, y: Int) = x + y
def neg(x: Int) = -x
override def sub(x: Int, y: Int) = x - y
def mul(x: Int, y: Int) = x * y
def div(x: Int, y: Int) = x / y
def mod(x: Int, y: Int) = x % y
override def abs(x: Int) = Math.abs(x)
override def sgn(x: Int) = if (x == 0) 0 else if (x > 0) 1 else -1
override def dist(x: Int, y: Int) = Math.abs(x - y)
}
| ctongfei/poly-algebra | src/main/scala/poly/algebra/std/IntStructure.scala | Scala | mit | 1,383 |
/**
* License
* =======
*
* The MIT License (MIT)
*
*
* Copyright (c) 2017 Antoine DOERAENE @sherpal
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package gui
trait LayeredRegion extends Region {
def draw(): Unit
private var _drawLayer: Layer = Artwork
private var _drawSubLayer: Int = 0
/** Returns the draw layer of the Region. */
def drawLayer: Layer = _drawLayer
/** Returns the draw sub layer of the Region. */
def drawSubLayer: Int = _drawSubLayer
/** Sets the draw layer and sublayer of the Region. */
def setDrawLayer(layer: Layer, subLayer: Int): Unit = {
_drawLayer = layer
_drawSubLayer = subLayer
parent match {
case Some(p) => p.orderLayeredRegions()
case _ =>
if (scala.scalajs.LinkingInfo.developmentMode) {
println("Something weird, LayeredRegion does not have parent...")
}
}
}
/** Sets the layer of the region, and the sublayer to 9 if Region is a FontString, 0 otherwise. */
def setDrawLayer(layer: Layer): Unit = setDrawLayer(layer, this match {
case _: FontString => 9
case _ => 0
})
protected var _red: Double = 1.0
protected var _green: Double = 1.0
protected var _blue: Double = 1.0
/** Sets the colours and the alpha of the Region. */
def setVertexColor(red: Double = 1.0, green: Double = 1.0, blue: Double = 1.0, alpha: Double = 1.0): Unit = {
_red = if (red < 0) 0.0 else if (red > 1) 1.0 else red
_green = if (green < 0) 0.0 else if (green > 1) 1.0 else green
_blue = if (blue < 0) 0.0 else if (blue > 1) 1.0 else blue
setAlpha(alpha)
}
}
| sherpal/scalajs-ui | src/main/scala/gui/LayeredRegion.scala | Scala | mit | 2,711 |
package dotty.tools
package dotc
package interactive
import java.net.URI
import java.io._
import java.nio.file._
import java.nio.file.attribute.BasicFileAttributes
import java.util.zip._
import scala.collection._
import scala.io.Codec
import dotty.tools.io.{ AbstractFile, ClassPath, ClassRepresentation, PlainFile, VirtualFile }
import ast.{Trees, tpd}
import core._, core.Decorators._
import Contexts._, Names._, NameOps._, Symbols._, SymDenotations._, Trees._, Types._
import classpath._
import reporting._
import util._
/** A Driver subclass designed to be used from IDEs */
class InteractiveDriver(val settings: List[String]) extends Driver {
import tpd._
override def sourcesRequired: Boolean = false
private val myInitCtx: Context = {
val rootCtx = initCtx.fresh.addMode(Mode.ReadPositions).addMode(Mode.Interactive).addMode(Mode.ReadComments)
rootCtx.setSetting(rootCtx.settings.YretainTrees, true)
rootCtx.setSetting(rootCtx.settings.YcookComments, true)
val ctx = setup(settings.toArray, rootCtx)._2
ctx.initialize()(ctx)
ctx
}
private var myCtx: Context = myInitCtx
def currentCtx: Context = myCtx
private val compiler: Compiler = new InteractiveCompiler
private val myOpenedFiles = new mutable.LinkedHashMap[URI, SourceFile] {
override def default(key: URI) = NoSource
}
def openedFiles: Map[URI, SourceFile] = myOpenedFiles
private val myOpenedTrees = new mutable.LinkedHashMap[URI, List[SourceTree]] {
override def default(key: URI) = Nil
}
def openedTrees: Map[URI, List[SourceTree]] = myOpenedTrees
private val myCompilationUnits = new mutable.LinkedHashMap[URI, CompilationUnit]
def compilationUnits: Map[URI, CompilationUnit] = myCompilationUnits
// Presence of a file with one of these suffixes indicates that the
// corresponding class has been pickled with TASTY.
private val tastySuffixes = List(".hasTasty", ".tasty")
// FIXME: All the code doing classpath handling is very fragile and ugly,
// improving this requires changing the dotty classpath APIs to handle our usecases.
// We also need something like sbt server-mode to be informed of changes on
// the classpath.
private val (zipClassPaths, dirClassPaths) = currentCtx.platform.classPath(currentCtx) match {
case AggregateClassPath(cps) =>
// FIXME: We shouldn't assume that ClassPath doesn't have other
// subclasses. For now, the only other subclass is JrtClassPath on Java
// 9+, we can safely ignore it for now because it's only used for the
// standard Java library, but this will change once we start supporting
// adding entries to the modulepath.
val zipCps = cps.collect { case cp: ZipArchiveFileLookup[?] => cp }
val dirCps = cps.collect { case cp: JFileDirectoryLookup[?] => cp }
(zipCps, dirCps)
case _ =>
(Seq(), Seq())
}
// Like in `ZipArchiveFileLookup` we assume that zips are immutable
private val zipClassPathClasses: Seq[TypeName] = {
val names = new mutable.ListBuffer[TypeName]
for (cp <- zipClassPaths)
classesFromZip(cp.zipFile, names)
names
}
initialize()
/**
* The trees for all the source files in this project.
*
* This includes the trees for the buffers that are presently open in the IDE, and the trees
* from the target directory.
*/
def sourceTrees(implicit ctx: Context): List[SourceTree] = sourceTreesContaining("")
/**
* The trees for all the source files in this project that contain `id`.
*
* This includes the trees for the buffers that are presently open in the IDE, and the trees
* from the target directory.
*/
def sourceTreesContaining(id: String)(implicit ctx: Context): List[SourceTree] = {
val fromBuffers = openedTrees.values.flatten.toList
val fromCompilationOutput = {
val classNames = new mutable.ListBuffer[TypeName]
val output = ctx.settings.outputDir.value
if (output.isDirectory)
classesFromDir(output.jpath, classNames)
else
classesFromZip(output.file, classNames)
classNames.flatMap { cls =>
treesFromClassName(cls, id)
}
}
(fromBuffers ++ fromCompilationOutput).distinct
}
/**
* All the trees for this project.
*
* This includes the trees of the sources of this project, along with the trees that are found
* on this project's classpath.
*/
def allTrees(implicit ctx: Context): List[SourceTree] = allTreesContaining("")
/**
* All the trees for this project that contain `id`.
*
* This includes the trees of the sources of this project, along with the trees that are found
* on this project's classpath.
*/
def allTreesContaining(id: String)(implicit ctx: Context): List[SourceTree] = {
val fromSource = openedTrees.values.flatten.toList
val fromClassPath = (dirClassPathClasses ++ zipClassPathClasses).flatMap { cls =>
treesFromClassName(cls, id)
}
(fromSource ++ fromClassPath).distinct
}
def run(uri: URI, sourceCode: String): List[Diagnostic] = run(uri, toSource(uri, sourceCode))
def run(uri: URI, source: SourceFile): List[Diagnostic] = {
val previousCtx = myCtx
try {
val reporter =
new StoreReporter(null) with UniqueMessagePositions with HideNonSensicalMessages
val run = compiler.newRun(myInitCtx.fresh.setReporter(reporter))
myCtx = run.runContext
implicit val ctx = myCtx
myOpenedFiles(uri) = source
run.compileSources(List(source))
run.printSummary()
val unit = if ctx.run.units.nonEmpty then ctx.run.units.head else ctx.run.suspendedUnits.head
val t = unit.tpdTree
cleanup(t)
myOpenedTrees(uri) = topLevelTrees(t, source)
myCompilationUnits(uri) = unit
reporter.removeBufferedMessages
}
catch {
case ex: FatalError =>
myCtx = previousCtx
close(uri)
Nil
}
}
def close(uri: URI): Unit = {
myOpenedFiles.remove(uri)
myOpenedTrees.remove(uri)
myCompilationUnits.remove(uri)
}
/**
* The `SourceTree`s that define the class `className` and/or module `className`.
*
* @see SourceTree.fromSymbol
*/
private def treesFromClassName(className: TypeName, id: String)(implicit ctx: Context): List[SourceTree] = {
def trees(className: TypeName, id: String): List[SourceTree] = {
val clsd = ctx.base.staticRef(className)
clsd match {
case clsd: ClassDenotation =>
clsd.ensureCompleted()
SourceTree.fromSymbol(clsd.symbol.asClass, id)
case _ =>
Nil
}
}
trees(className, id) ::: trees(className.moduleClassName, id)
}
// FIXME: classfiles in directories may change at any point, so we retraverse
// the directories each time, if we knew when classfiles changed (sbt
// server-mode might help here), we could do cache invalidation instead.
private def dirClassPathClasses: Seq[TypeName] = {
val names = new mutable.ListBuffer[TypeName]
dirClassPaths.foreach { dirCp =>
val root = dirCp.dir.toPath
classesFromDir(root, names)
}
names
}
/** Adds the names of the classes that are defined in `file` to `buffer`. */
private def classesFromZip(file: File, buffer: mutable.ListBuffer[TypeName]): Unit = {
val zipFile = new ZipFile(file)
try {
val entries = zipFile.entries()
while (entries.hasMoreElements) {
val entry = entries.nextElement()
val name = entry.getName
tastySuffixes.find(name.endsWith) match {
case Some(tastySuffix) =>
buffer += name.replace("/", ".").stripSuffix(tastySuffix).toTypeName
case _ =>
}
}
}
finally zipFile.close()
}
/** Adds the names of the classes that are defined in `dir` to `buffer`. */
private def classesFromDir(dir: Path, buffer: mutable.ListBuffer[TypeName]): Unit =
try
Files.walkFileTree(dir, new SimpleFileVisitor[Path] {
override def visitFile(path: Path, attrs: BasicFileAttributes) = {
if (!attrs.isDirectory) {
val name = path.getFileName.toString
for {
tastySuffix <- tastySuffixes
if name.endsWith(tastySuffix)
}
{
buffer += dir.relativize(path).toString.replace("/", ".").stripSuffix(tastySuffix).toTypeName
}
}
FileVisitResult.CONTINUE
}
})
catch {
case _: NoSuchFileException =>
}
private def topLevelTrees(topTree: Tree, source: SourceFile): List[SourceTree] = {
val trees = new mutable.ListBuffer[SourceTree]
def addTrees(tree: Tree): Unit = tree match {
case PackageDef(_, stats) =>
stats.foreach(addTrees)
case imp: Import =>
trees += SourceTree(imp, source)
case tree: TypeDef =>
trees += SourceTree(tree, source)
case _ =>
}
addTrees(topTree)
trees.toList
}
/** Remove attachments and error out completers. The goal is to avoid
* having a completer hanging in a typed tree which can capture the context
* of a previous run. Note that typed trees can have untyped or partially
* typed children if the source contains errors.
*/
private def cleanup(tree: tpd.Tree)(implicit ctx: Context): Unit = {
val seen = mutable.Set.empty[tpd.Tree]
def cleanupTree(tree: tpd.Tree): Unit = {
seen += tree
tree.foreachSubTree { t =>
if (t.symbol.exists && t.hasType) {
if (!t.symbol.isCompleted) t.symbol.info = UnspecifiedErrorType
t.symbol.annotations.foreach { annot =>
/* In some cases annotations are are used on themself (possibly larger cycles).
* This is the case with the java.lang.annotation.Target annotation, would end
* in an infinite loop while cleaning. The `seen` is added to ensure that those
* trees are not cleand twice.
* TODO: Find a less expensive way to check for those cycles.
*/
if (annot.isEvaluated && !seen(annot.tree))
cleanupTree(annot.tree)
}
}
t.removeAllAttachments()
}
}
cleanupTree(tree)
}
private def toSource(uri: URI, sourceCode: String): SourceFile = {
val path = Paths.get(uri)
val virtualFile = new VirtualFile(path.getFileName.toString, path.toString)
val writer = new BufferedWriter(new OutputStreamWriter(virtualFile.output, "UTF-8"))
writer.write(sourceCode)
writer.close()
new SourceFile(virtualFile, Codec.UTF8)
}
/**
* Initialize this driver and compiler.
*
* This is necessary because an `InteractiveDriver` can be put to work without having
* compiled anything (for instance, resolving a symbol coming from a different compiler in
* this compiler). In those cases, an un-initialized compiler may crash (for instance if
* late-compilation is needed).
*/
private def initialize(): Unit = {
val run = compiler.newRun(myInitCtx.fresh)
myCtx = run.runContext
run.compileUnits(Nil, myCtx)
}
}
object InteractiveDriver {
def toUriOption(file: AbstractFile): Option[URI] =
if (!file.exists)
None
else
try
// We don't use file.file here since it'll be null
// for the VirtualFiles created by InteractiveDriver#toSource
// TODO: To avoid these round trip conversions, we could add an
// AbstractFile#toUri method and implement it by returning a constant
// passed as a parameter to a constructor of VirtualFile
Some(Paths.get(file.path).toUri)
catch {
case e: InvalidPathException =>
None
}
def toUriOption(source: SourceFile): Option[URI] =
if (!source.exists)
None
else
toUriOption(source.file)
}
| som-snytt/dotty | compiler/src/dotty/tools/dotc/interactive/InteractiveDriver.scala | Scala | apache-2.0 | 11,842 |
/**
* Copyright 2015 Thomson Reuters
*
* Licensed under the Apache License, Version 2.0 (the “License”); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.israel.sbt.zookeeper
import java.io.{FileOutputStream, InputStream}
import sbt._
import sbt.Keys._
import cmwell.build.util._
/**
* Created by Israel Klein on 28/12/2015.
*/
object SbtZookeeperPlugin extends sbt.AutoPlugin{
object autoImport {
lazy val helloTask = taskKey[Unit]("prints hello world")
/** Settings **/
lazy val zookeeperVersion = settingKey[String]("version of zookeeper")
lazy val zookeeperServerConfig = settingKey[File]("zookeeper server configuration file")
lazy val zookeeperServerRunDir = settingKey[File]("Run zookeeper server process from this directory. ")
lazy val stopAfterTests = settingKey[Boolean]("Stop zookeeper server after tests finish")
lazy val startBeforeTests = settingKey[Boolean]("Auto start zookeeper server before tests start")
lazy val cleanAfterTests = settingKey[Boolean]("Clean data after tests finish")
lazy val cleanBeforeTests = settingKey[Boolean]("Clean data before test starts")
/** Tasks **/
lazy val startZookeeper = taskKey[Unit]("start the zookeeper server")
lazy val stopZookeeper = taskKey[Unit]("stop the zookeeper server")
lazy val cleanZookeeper = taskKey[Unit]("clean zookeeper run dir")
lazy val cleanZookeeperFunc = taskKey[()=>Unit]("return a function that clean zookeeper's run dir")
}
override def requires = cmwell.build.CMWellBuild
lazy val Zookeeper = config("zk") extend(Compile) describedAs("Dependencies for using Zookeeper.")
override def projectConfigurations = Seq(Zookeeper)
import autoImport._
var zookeeperProcess:java.lang.Process = null
private def doCleanZookeeperBeforeTest = Def.taskDyn{
streams.value.log.error("doCleanZookeeperBeforeTests")
if(cleanBeforeTests.value)
cleanZookeeper
else
Def.task{}
}
private def doCleanZookeeperAfterTest = Def.taskDyn{
streams.value.log.error("doCleanZookeeperAfterTests")
if(cleanAfterTests.value)
cleanZookeeper
else
Def.task{}
}
private def doStartZookeeperBeforeTest = Def.taskDyn{
streams.value.log.error("doStartZookeeperBeforeTests")
if(startBeforeTests.value)
startZookeeper
else
Def.task{}
}
private def doStopZookeeperAfterTest = Def.taskDyn{
streams.value.log.error("doStopZookeeperBeforeTests")
if(stopAfterTests.value)
stopZookeeper
else
Def.task{}
}
private def isZookeeperRunning:Boolean = {
val p = sys.runtime.exec("jps -l")
val lines = scala.io.Source.fromInputStream(p.getInputStream).getLines()
lines.exists(_.contains("org.apache.zookeeper.server.quorum.QuorumPeerMain"))
}
private def killZookeeper(force:Boolean = false)(implicit logger:Logger) = {
val p = sys.runtime.exec("jps -l")
val lines = scala.io.Source.fromInputStream(p.getInputStream).getLines()
val pidOpt = lines.collectFirst({case s if (s.contains("org.apache.zookeeper.server.quorum.QuorumPeerMain")) => s.split(" ")(0)})
pidOpt match {
case Some(pid) =>
val command = if(force)s"kill -9 $pid" else s"kill $pid"
sys.runtime.exec(command)
case None => logger.debug("requested to kill zookeeper process but none was found")
}
}
override def projectSettings = Seq(
/** Settings **/
zookeeperVersion := "3.4.7",
libraryDependencies += "org.apache.zookeeper" % "zookeeper" % zookeeperVersion.value % Zookeeper,
zookeeperServerConfig := (resourceDirectory in Runtime).value / "zookeeper.server.cfg",
zookeeperServerRunDir := {
val f = target.value / "zookeeper-server"
f.mkdir()
f
},
stopAfterTests := true,
startBeforeTests := true,
cleanAfterTests := false,
cleanBeforeTests := true,
/** Tasks **/
externalDependencyClasspath in Zookeeper := (externalDependencyClasspath or (externalDependencyClasspath in Runtime)).value,
startZookeeper := {
val logger = streams.value.log
logger.info("preparing to start ZooKeeper")
val depClasspath = (externalDependencyClasspath in Zookeeper).value
if(isZookeeperRunning)
logger.info("zookeeper is already running. doing nothing")
else {
val baseDir = zookeeperServerRunDir.value
if (!baseDir.isDirectory)
baseDir.mkdir()
val classpath = Attributed.data(depClasspath)
val serverConfigFile = zookeeperServerConfig.value
if (!serverConfigFile.exists()) {
val is: InputStream = resourceFromJarAsIStream("zookeeper.server.cfg")
val fos = new FileOutputStream(serverConfigFile)
IO.transferAndClose(is, fos)
fos.close()
}
val configFile = serverConfigFile.absolutePath
val cp = classpath.map(_.getAbsolutePath).mkString(":")
val javaExec = System.getProperty("java.home") + "/bin/java"
val mainClass = "org.apache.zookeeper.server.quorum.QuorumPeerMain"
val pb = new java.lang.ProcessBuilder(javaExec, "-classpath", cp, mainClass, configFile).inheritIO()
pb.directory(baseDir)
zookeeperProcess = pb.start()
// This is a temp solution for waiting for zookeeper to be ready
Thread.sleep(10000)
if(isZookeeperRunning)
logger.info("successfuly started zookeeper process")
else {
logger.error("failed to start zookeeper process")
}
}
},
stopZookeeper := {
implicit val logger = streams.value.log
logger.info("preparing to stop zookeeper process")
if(zookeeperProcess != null)
zookeeperProcess.destroy()
else
killZookeeper()
var triesLeft = 20
while(isZookeeperRunning && triesLeft > 0) {
logger.info("waiting 500ms for zookeeper process to finish...")
Thread.sleep(500)
triesLeft -= 1
}
if(triesLeft == 0) {
logger.error("failed to stop zookeeper process nicely, using the heavy guns...")
killZookeeper(true)
logger.info("zookeeper process was forcefully killed (-9)")
} else {
logger.info("zookeeper process successfully stopped")
}
zookeeperProcess = null
},
cleanZookeeper := {
cleanZookeeperFunc.value()
},
cleanZookeeperFunc := {
() => {
val dir = zookeeperServerRunDir.value
IO.delete(dir)
}
}
)
}
| hochgi/CM-Well | server/project/sbt-zookeeper-plugin/src/main/scala/SbtZookeeperPlugin.scala | Scala | apache-2.0 | 6,988 |
package org.lnu.is.integration.cases.partner
import java.util.UUID
import scala.concurrent.duration.DurationInt
import io.gatling.core.Predef.checkBuilder2Check
import io.gatling.core.Predef.findCheckBuilder2ValidatorCheckBuilder
import io.gatling.core.Predef.exec
import io.gatling.core.Predef.stringToExpression
import io.gatling.core.Predef.validatorCheckBuilder2CheckBuilder
import io.gatling.core.Predef.value2Expression
import io.gatling.core.Predef.value2Success
import io.gatling.http.Predef.ELFileBody
import io.gatling.http.Predef.http
import io.gatling.http.Predef.jsonPath
import io.gatling.http.Predef.status
object PartnerIntegrationTest {
val testCase = exec(session => {
session
.set("partnerName", UUID.randomUUID())
.set("partnerManager", UUID.randomUUID())
.set("newPartnerManager", UUID.randomUUID())
.set("partnerPhone", UUID.randomUUID())
.set("partnerEmail", UUID.randomUUID())
.set("partnerAbbr", UUID.randomUUID())
})
.exec(http("Post Partner")
.post("/partners")
.header("Content-Type", "application/json")
.body(ELFileBody("data/partner/post.json"))
.asJSON
.check(status.is(201))
.check(jsonPath("$.id").find.saveAs("partnerId")))
.exec(http("Get Partner")
.get("/partners/${partnerId}")
.basicAuth("admin", "nimda")
.check(status.is(200)))
.exec(http("Update Partner")
.put("/partners/${partnerId}")
.basicAuth("admin", "nimda")
.header("Content-Type", "application/json")
.body(ELFileBody("data/partner/put.json"))
.asJSON
.check(status.is(200)))
.exec(http("Get Partner")
.get("/partners/${partnerId}")
.basicAuth("admin", "nimda")
.check(status.is(200))
.check(jsonPath("$.manager").find.is("${newPartnerManager}")))
.exec(http("Delete Partner")
.delete("/partners/${partnerId}")
.basicAuth("admin", "nimda")
.check(status.is(204)))
.exec(http("Get Partner")
.get("/partners/${partnerId}")
.basicAuth("admin", "nimda")
.check(status.is(404)))
} | ifnul/ums-backend | is-lnu-integration/src/test/scala/org/lnu/is/integration/cases/partner/PartnerIntegrationTest.scala | Scala | apache-2.0 | 2,147 |
object Initialization5 {
case class NoThis() {
val nothis1 = f()
val nothis2 = 0
def f() = g(this)
def g(nt: NoThis) = nt.nothis2
}
}
| epfl-lara/stainless | frontends/benchmarks/extraction/invalid/Initialization5.scala | Scala | apache-2.0 | 156 |
package com.twitter.finagle.mysql.protocol
object Type {
/** MySQL type codes */
val DECIMAL = 0x00;
val TINY = 0x01;
val SHORT = 0x02;
val LONG = 0x03;
val FLOAT = 0x04;
val DOUBLE = 0x05;
val NULL = 0x06;
val TIMESTAMP = 0x07;
val LONGLONG = 0x08;
val INT24 = 0x09;
val DATE = 0x0a;
val TIME = 0x0b;
val DATETIME = 0x0c;
val YEAR = 0x0d;
val NEWDATE = 0x0e;
val VARCHAR = 0x0f;
val BIT = 0x10;
val NEWDECIMAL = 0xf6;
val ENUM = 0xf7;
val SET = 0xf8;
val TINY_BLOB = 0xf9;
val MEDIUM_BLOB = 0xfa;
val LONG_BLOB = 0xfb;
val BLOB = 0xfc;
val VAR_STRING = 0xfd;
val STRING = 0xfe;
val GEOMETRY = 0xff;
/**
* Returns the sizeof the given parameter in
* its MySQL binary representation. If the size
* is unknown -1 is returned.
*/
def sizeOf(any: Any) = any match {
case s: String => Buffer.sizeOfLen(s.size) + s.size
case b: Array[Byte] => Buffer.sizeOfLen(b.size) + b.size
case b: Boolean => 1
case b: Byte => 1
case s: Short => 2
case i: Int => 4
case l: Long => 8
case f: Float => 4
case d: Double => 8
case null => 0
// Date and Time
case t: java.sql.Timestamp => 12
case d: java.sql.Date => 5
case d: java.util.Date => 12
case _ => -1
}
/**
* Retrieves the MySQL type code for the
* given parameter. If the parameter type
* mapping is unknown -1 is returned.
*/
def getCode(any: Any) = any match {
// primitives
case s: String => VARCHAR
case b: Boolean => TINY
case b: Byte => TINY
case s: Short => SHORT
case i: Int => LONG
case l: Long => LONGLONG
case f: Float => FLOAT
case d: Double => DOUBLE
case null => NULL
// blobs
case b: Array[Byte] if b.size <= 255 => TINY_BLOB
case b: Array[Byte] if b.size <= 65535 => BLOB
case b: Array[Byte] if b.size <= 16777215 => MEDIUM_BLOB
// No support for LONG_BLOBS. In order to implement this correctly
// in Java/Scala we need to represent this set of bytes as a composition
// of buffers.
// case b: Array[Byte] if b.size <= 4294967295L => LONG_BLOB
// Date and Time
case t: java.sql.Timestamp => TIMESTAMP
case d: java.sql.Date => DATE
case d: java.util.Date => DATETIME
case _ => -1
}
}
/**
* Timestamp object that can appropriately
* represent MySQL zero Timestamp.
*/
case object SQLZeroTimestamp extends java.sql.Timestamp(0) {
override val getTime = 0L
override val toString = "0000-00-00 00:00:00"
}
/**
* Date object that can appropriately
* represent MySQL zero Date.
*/
case object SQLZeroDate extends java.sql.Date(0) {
override val getTime = 0L
override val toString = "0000-00-00"
}
| foursquare/finagle | finagle-mysql/src/main/scala/com/twitter/finagle/mysql/protocol/Type.scala | Scala | apache-2.0 | 2,960 |
package foo
// Note: Using traits to get distinct errors
// (instead of sharing one single "')' expected but '}' found." at the end)
//// Multi-line only cases: make sure trailing commas are only supported when multi-line
trait ArgumentExprs1 { f(23, "bar", )(Ev0, Ev1) }
trait ArgumentExprs2 { f(23, "bar")(Ev0, Ev1, ) }
trait ArgumentExprs3 { new C(23, "bar", )(Ev0, Ev1) }
trait ArgumentExprs4 { new C(23, "bar")(Ev0, Ev1, ) }
trait Params1 { def f(foo: Int, bar: String, )(implicit ev0: Ev0, ev1: Ev1, ) = 1 }
trait Params2 { def f(foo: Int, bar: String, )(implicit ev0: Ev0, ev1: Ev1, ) = 1 }
trait ClassParams1 { final class C(foo: Int, bar: String, )(implicit ev0: Ev0, ev1: Ev1) }
trait ClassParams2 { final class C(foo: Int, bar: String)(implicit ev0: Ev0, ev1: Ev1, ) }
trait SimpleExpr { (23, "bar", ) }
trait TypeArgs { def f: C[Int, String, ] }
trait TypeParamClause { type C[A, B, ] }
trait FunTypeParamClause { def f[A, B, ] }
trait SimpleType { def f: (Int, String, ) }
trait FunctionArgTypes { def f: (Int, String, ) => Boolean }
trait SimplePattern { val (foo, bar, ) = null: Any }
trait ImportSelectors { import foo.{ Ev0, Ev1, } }
trait Import { import foo.Ev0, foo.Ev1, }
trait ValDcl { val foo, bar, = 23 }
trait VarDcl { var foo, bar, = 23 }
trait VarDef { var foo, bar, = _ }
trait PatDef { val Foo(foo), Bar(bar), = bippy }
//// The Tuple 1 cases
// the Tuple1 value case: make sure that the possible "(23, )" syntax for Tuple1 doesn't compile to "23"
trait SimpleExpr2 { (23, ) }
// the Tuple1 type case: make sure that the possible "(Int, )" syntax for Tuple1[Int] doesn't compile to "Int"
trait SimpleType2 { def f: (Int, ) }
//// Test utilities
object `package` {
sealed trait Ev0; implicit object Ev0 extends Ev0
sealed trait Ev1; implicit object Ev1 extends Ev1
}
| scala/scala | test/files/neg/trailing-commas.scala | Scala | apache-2.0 | 1,820 |
/*
* Copyright 2015 PayPal
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.squbs.unicomplex
import org.scalatest.{Matchers, FunSpecLike}
import org.squbs.unicomplex.UnicomplexBoot._
import com.typesafe.config.{ConfigException, ConfigFactory}
import java.io.{File, PrintWriter}
import scala.util.Try
class UnicomplexBootSpec extends FunSpecLike with Matchers {
describe ("The UnicomplexBootstrap") {
it ("Should handle non-duplication in cube short names") {
val cubeList = Seq(
CubeInit(Cube("bar", "com.foo.foobar.bar", "1.0.0", "don't care"), Map.empty),
CubeInit(Cube("foo", "com.foo.baz.foo", "1.0.0", "don't care"), Map.empty),
CubeInit(Cube("foobar", "com.foo.baz.foobar", "1.0.0", "don't care"), Map.empty)
)
val newList = resolveAliasConflicts(cubeList)
newList should be theSameInstanceAs cubeList
}
it ("Should handle duplication in cube short names") {
val cubeList = Seq(
CubeInit(Cube("bar", "com.foo.foobar.bar", "1.0.0", "don't care"), Map.empty),
CubeInit(Cube("bar", "com.foo.baz.bar", "1.0.0", "don't care"), Map.empty),
CubeInit(Cube("bar", "com.foo.bar.bar", "1.0.0", "don't care"), Map.empty)
)
val newList = resolveAliasConflicts(cubeList)
newList should not be theSameInstanceAs (cubeList)
val newAliases = newList map (_.info.name)
val refAliases = Seq("foobar.bar", "baz.bar", "bar.bar")
newAliases should be (refAliases)
}
it ("Should handle some duplication in cube names") {
val cubeList = Seq(
CubeInit(Cube("bar", "com.bar.baz.bar", "1.0.0", "don't care"), Map.empty),
CubeInit(Cube("bar", "com.foo.baz.bar", "1.0.0", "don't care"), Map.empty),
CubeInit(Cube("bar", "com.foo.bar.bar", "1.0.0", "don't care"), Map.empty)
)
val newList = resolveAliasConflicts(cubeList)
newList should not be theSameInstanceAs (cubeList)
val newAliases = newList map (_.info.name)
val refAliases = Seq("bar.baz.bar", "foo.baz.bar", "bar.bar")
newAliases should be (refAliases)
}
it ("Should load addOnConfig if provided") {
import scala.collection.JavaConversions._
val addOnConfig = ConfigFactory.parseMap(Map(
"squbs.testAttribute" -> "foobar"
))
val config = getFullConfig(Some(addOnConfig))
config.getString("squbs.actorsystem-name") should be ("squbs")
config.getString("squbs.testAttribute") should be ("foobar")
}
it ("Should load config files of all supported formats from external config dir") {
val configDir = new File("squbsconfig")
val createdConfDir = configDir.mkdir()
{ // 1. Deal with no config file
val config = getFullConfig(None)
config.getString("squbs.actorsystem-name") should be ("squbs")
an [ConfigException.Missing] should be thrownBy config.getString("squbs.testAttribute1")
}
{ // 2. Deal with .conf
val appConf =
"""
|squbs {
| testAttribute1 = foobar1
|}
""".stripMargin
val confFile = new File(configDir, "application.conf")
val writer = new PrintWriter(confFile)
writer.append(appConf)
writer.close()
val config = getFullConfig(None)
config.getString("squbs.actorsystem-name") should be ("squbs")
config.getString("squbs.testAttribute1") should be ("foobar1")
confFile.delete()
}
{ // 3. Deal with .json
val appConf =
"""
|{
| "squbs" : {
| "testAttribute2" : "foobar2"
| }
|}
""".stripMargin
val confFile = new File(configDir, "application.json")
val writer = new PrintWriter(confFile)
writer.append(appConf)
writer.close()
val config = getFullConfig(None)
config.getString("squbs.actorsystem-name") should be ("squbs")
config.getString("squbs.testAttribute2") should be ("foobar2")
confFile.delete()
}
{ // 4. Deal with .properties
val appConf =
"""
|squbs.testAttribute3=foobar3
""".stripMargin
val confFile = new File(configDir, "application.properties")
val writer = new PrintWriter(confFile)
writer.append(appConf)
writer.close()
val config = getFullConfig(None)
config.getString("squbs.actorsystem-name")should be ("squbs")
config.getString("squbs.testAttribute3")should be ("foobar3")
confFile.delete()
}
if (createdConfDir) configDir.deleteOnExit()
}
it ("Should find the configured listeners and their configurations") {
val appConf =
"""
|default-listener {
| type = squbs.listener
| aliases = [ foo-listener, bar-listener ]
| bind-address = "0.0.0.0"
| bind-port = 8080
| secure = false
| full-address = false
|}
|
|secure-listener {
| type = squbs.listener
| aliases = [ foobar-listener, baz-listener ]
| bind-address = "0.0.0.0"
| bind-port = 8443
| secure = true
| full-address = false
| ssl-context = "org.my.SSLContext"
|}
|
|blocking-dispatcher {
| # Dispatcher is the name of the event-based dispatcher
| type = Dispatcher
| # What kind of ExecutionService to use
| executor = "fork-join-executor"
|}
|
|some-config {
| foo = bar
|}
|
|some-other-config = foo
""".stripMargin
val config = ConfigFactory.parseString(appConf)
val listeners = configuredListeners(config)
listeners.size should be (2)
listeners map (_._1) should contain only ("default-listener", "secure-listener")
listeners.toMap.apply("secure-listener").getInt("bind-port") should be (8443)
}
it ("Should find the active and missing listeners") {
val routeDef1 =
"""
| class-name = org.minime.Svc1
| listeners = [
| secure-listener
| ]
""".stripMargin
val route1 = ConfigFactory.parseString(routeDef1)
val routeDef2 =
"""
| class-name = org.minime.Svc2
| listeners = [
| secure2-listener
| ]
""".stripMargin
val route2 = ConfigFactory.parseString(routeDef2)
val routeDef3 =
"""
| class-name = org.minime.Svc3
| listeners = [
| local-listener
| ]
""".stripMargin
val route3 = ConfigFactory.parseString(routeDef3)
val appConfDef =
"""
|default-listener {
| type = squbs.listener
| aliases = [ foo-listener, bar-listener ]
| bind-address = "0.0.0.0"
| bind-port = 8080
| secure = false
| full-address = false
|}
|
|secure-listener {
| type = squbs.listener
| aliases = [ secure2-listener, baz-listener ]
| bind-address = "0.0.0.0"
| bind-port = 8443
| secure = true
| full-address = false
| ssl-context = "org.my.SSLContext"
|}
""".stripMargin
val appConf = ConfigFactory.parseString(appConfDef)
val cubeList = Seq(
CubeInit(Cube("foo", "com.foo.bar", "1.0.0", "don't care"), Map(StartupType.SERVICES -> Seq(route1))),
CubeInit(Cube("bar", "com.foo.bar", "1.0.0", "don't care"), Map(StartupType.SERVICES -> Seq(route2, route3))))
val (activeAliases, activeListeners, missingListeners) = findListeners(appConf, cubeList)
activeAliases map (_._1) should contain only ("secure-listener", "secure2-listener")
activeListeners map (_._1) should contain only "secure-listener"
missingListeners should contain only "local-listener"
}
it ("should merge the addOnConfig with original config") {
import scala.collection.JavaConversions._
val addOnConfig = ConfigFactory.parseMap(Map(
"configTest" -> Boolean.box(true)
))
val finalConfig = UnicomplexBoot.getFullConfig(Some(addOnConfig))
Try(finalConfig.getConfig("squbs")).toOption should not be (None)
finalConfig.getBoolean("configTest") should be (true)
}
}
}
| keshin/squbs | squbs-unicomplex/src/test/scala/org/squbs/unicomplex/UnicomplexBootSpec.scala | Scala | apache-2.0 | 9,346 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources.oap.index
import scala.collection.mutable
import scala.util.hashing.{MurmurHash3 => MH3}
/**
* Implementation for Bloom filter.
*/
class BloomFilter(
maxBitCount: Int,
numOfHashFunc: Int)(var bloomBitSet: mutable.BitSet = null) {
private val hashFunctions: Array[BloomHashFunction] =
BloomHashFunction.getMurmurHashFunction(maxBitCount, numOfHashFunc)
if (bloomBitSet == null) {
bloomBitSet = new mutable.BitSet(maxBitCount)
}
def this() = this(1 << 16, 3)()
def getBitMapLongArray: Array[Long] = bloomBitSet.toBitMask
def getNumOfHashFunc: Int = numOfHashFunc
private def getIndices(value: String): Array[Int] =
hashFunctions.map(func => func.hash(value))
private def getIndices(value: Array[Byte]): Array[Int] =
hashFunctions.map(func => func.hash(value))
def addValue(value: String): Unit = {
val indices = getIndices(value)
indices.foreach(bloomBitSet.add)
}
def checkExist(value: String): Boolean = {
getIndices(value).forall(bloomBitSet.contains)
}
def addValue(data: Array[Byte]): Unit = {
val indices = getIndices(data)
indices.foreach(bloomBitSet.add)
}
def checkExist(data: Array[Byte]): Boolean = {
val indices = getIndices(data)
for (i <- indices)
if (!bloomBitSet.contains(i)) {
return false
}
true
}
}
object BloomFilter {
def apply(longArr: Array[Long], numOfHashFunc: Int): BloomFilter =
new BloomFilter(longArr.length * 64, numOfHashFunc)(mutable.BitSet.fromBitMask(longArr))
}
private[oap] trait BloomHashFunction {
def hash(value: String): Int
def hash(value: Array[Byte]): Int
}
private[oap] class MurmurHashFunction(maxCount: Int, seed: Int) extends BloomHashFunction {
def hash(value: String): Int =
(MH3.stringHash(value, seed) % maxCount + maxCount) % maxCount
override def hash(value: Array[Byte]): Int =
(MH3.bytesHash(value, seed) % maxCount + maxCount) % maxCount
}
private[oap] object BloomHashFunction {
def getMurmurHashFunction(maxCount: Int, cnt: Int): Array[BloomHashFunction] = {
(0 until cnt).map(i => new MurmurHashFunction(maxCount, i.toString.hashCode())).toArray
}
}
| Intel-bigdata/OAP | oap-cache/oap/src/main/scala/org/apache/spark/sql/execution/datasources/oap/index/BloomFilter.scala | Scala | apache-2.0 | 3,014 |
/*
* Copyright (C) 2013-2015 by Michael Hombre Brinkmann
*/
package net.twibs.web
import com.google.common.cache.{CacheLoader, CacheBuilder, LoadingCache}
import java.util.concurrent.TimeUnit
import net.twibs.util.{ResponseRequest, Request}
import scala.concurrent.duration._
class ExpiringCacheResponder(delegate: Responder, duration: Duration = 1 second) extends CacheResponder {
def respond(request: Request): Option[Response] =
request.use {
val requestCacheKey = request.responseRequest
if (!request.useCache) {
cache.invalidate(requestCacheKey)
}
respond(requestCacheKey)
}
protected val cache: LoadingCache[ResponseRequest, Option[Response]] =
CacheBuilder.newBuilder().expireAfterWrite(duration.toMillis, TimeUnit.MILLISECONDS).build(loader)
private def loader = new CacheLoader[ResponseRequest, Option[Response]]() {
def load(requestCacheKey: ResponseRequest): Option[Response] = delegate.respond(Request)
}
}
| hombre/twibs | twibs-web/src/main/scala/net/twibs/web/ExpiringCacheResponder.scala | Scala | apache-2.0 | 983 |
package com.mesosphere.cosmos
import com.mesosphere.cosmos.error.CosmosException
import com.mesosphere.cosmos.error.EndpointUriConnection
import com.mesosphere.cosmos.error.EndpointUriSyntax
import com.mesosphere.cosmos.error.GenericHttpError
import com.mesosphere.cosmos.error.UnsupportedContentEncoding
import com.mesosphere.cosmos.error.UnsupportedRedirect
import com.mesosphere.http.MediaType
import com.mesosphere.http.MediaTypeParser
import io.lemonlabs.uri.Uri
import com.twitter.finagle.http.Fields
import com.twitter.finagle.http.filter.LogFormatter
import com.twitter.finagle.stats.StatsReceiver
import com.twitter.util.Future
import io.netty.handler.codec.http.HttpResponseStatus
import java.io.IOException
import java.io.InputStream
import java.net.HttpURLConnection
import java.net.MalformedURLException
import java.net.URISyntaxException
import java.util.zip.GZIPInputStream
import org.slf4j.Logger
import scala.util.{Failure, Success, Try}
object HttpClient {
lazy val DEFAULT_RETRIES = maxRetryCount()
lazy val RETRY_INTERVAL = retryDuration()
val logger: Logger = org.slf4j.LoggerFactory.getLogger(getClass)
def fetch[A](
uri: Uri,
headers: (String, String)*
)(
processResponse: ResponseData => A
)(
implicit statsReceiver: StatsReceiver
): Future[A] = {
fetchResponse(uri, DEFAULT_RETRIES, headers: _*)
.flatMap { case (responseData, conn) =>
Future(processResponse(responseData))
// IOException when closing stream should not fail the whole request, that's why we're ignoring exceptions with Try
.ensure({
Try(responseData.contentStream.close())
()
})
.ensure(conn.disconnect())
}
.handle { case e: IOException =>
throw CosmosException(EndpointUriConnection(uri, e.getMessage), e)
}
}
def fetchStream[A](
uri: Uri,
headers: (String, String)*
)(
processResponse: (ResponseData, HttpURLConnection) => A
)(
implicit statsReceiver: StatsReceiver
): Future[A] = {
fetchResponse(uri, DEFAULT_RETRIES, headers: _*)
.map { case (responseData, conn) => processResponse(responseData, conn) }
.handle { case e: IOException =>
throw CosmosException(EndpointUriConnection(uri, e.getMessage), e)
}
}
private[this] def fetchResponse(
uri: Uri,
retryCount : Int,
headers: (String, String)*
)(
implicit statsReceiver: StatsReceiver
): Future[(ResponseData, HttpURLConnection)] = {
val isRetryApplicable = (ex: Exception) => ex match {
case ce: CosmosException => ce.error.isInstanceOf[GenericHttpError] &&
ce.error.asInstanceOf[GenericHttpError].clientStatus.code() >= 500
case _: IOException => true
}
Future(uri.toJavaURI.toURL.openConnection())
.handle {
case t @ (_: IllegalArgumentException | _: MalformedURLException | _: URISyntaxException) =>
throw CosmosException(EndpointUriSyntax(uri, t.getMessage), t)
}
.map { case conn: HttpURLConnection =>
conn.setRequestProperty(Fields.UserAgent, s"cosmos/${BuildProperties().cosmosVersion}")
// UserAgent set above can be overridden below.
headers.foreach { case (name, value) => conn.setRequestProperty(name, value) }
logger.info(format(conn))
val responseData = extractResponseData(uri, conn)
(responseData, conn)
}
.rescue {
case ex: Exception if isRetryApplicable(ex) =>
if (retryCount > 0) {
logger.info(s"Retry [remaining - $retryCount] : ${ex.getMessage}")
Future.sleep(RETRY_INTERVAL).before(fetchResponse(uri, retryCount - 1, headers: _*))
} else {
logger.warn(s"Retries exhausted, giving up due to ${ex.getMessage}", ex)
throw ex
}
}
}
private[this] def extractResponseData(
uri: Uri,
conn: HttpURLConnection
)(implicit
sr: StatsReceiver
): ResponseData = {
val (contentType, contentEncoding) = parseContentHeaders(uri, conn)
val contentLength = conn.getContentLengthLong match {
case len if len < 0 => None
case len => Some(len)
}
val contentStream = prepareContentStream(conn, contentEncoding)
ResponseData(contentType, contentLength, contentStream)
}
private def parseContentHeaders(
uri: Uri,
conn: HttpURLConnection
)(implicit
sr: StatsReceiver
): (MediaType, Option[String]) = {
conn.getResponseCode match {
case HttpURLConnection.HTTP_OK =>
sr.scope("status").counter("200").incr()
val contentEncoding = Option(conn.getHeaderField(Fields.ContentEncoding))
MediaTypeParser.parse(conn.getHeaderField(Fields.ContentType)) match {
case Success(contentType) => (contentType, contentEncoding)
case Failure(error) => {
logger.error(s"Error while parsing the Content-Type " +
s"${conn.getHeaderField(Fields.ContentType)} from URI $uri",
error
)
throw error
}
}
case status if RedirectStatuses(status) =>
sr.scope("status").counter(status.toString).incr()
// Different forms of redirect, HttpURLConnection won't follow a redirect across schemes
val loc = Option(conn.getHeaderField("Location")).map(Uri.parse).flatMap(_.schemeOption)
throw UnsupportedRedirect(List(uri.schemeOption.get), loc).exception
case status =>
sr.scope("status").counter(status.toString).incr()
throw GenericHttpError(uri = uri, clientStatus = HttpResponseStatus.valueOf(status)).exception
}
}
private def prepareContentStream(
conn: HttpURLConnection,
contentEncoding: Option[String]
)(implicit
sr: StatsReceiver
): InputStream = {
contentEncoding match {
case Some("gzip") =>
sr.scope("contentEncoding").counter("gzip").incr()
new GZIPInputStream(conn.getInputStream)
case ce @ Some(_) =>
throw UnsupportedContentEncoding(List("gzip"), ce).exception
case _ =>
sr.scope("contentEncoding").counter("plain").incr()
conn.getInputStream
}
}
def format(conn: HttpURLConnection): String = {
val contentLength = conn.getContentLength
val contentLengthStr = if (contentLength > 0) s"${contentLength.toString}B" else "-"
val userAgent:Option[String] = Option(conn.getHeaderField(Fields.UserAgent))
def escape(s: String) = LogFormatter.escape(s)
s"${conn.getURL.getHost} - " +
s"${escape("\\"")}${escape(conn.getRequestMethod)} " +
s"${escape(conn.getURL.toURI.toString)} " +
s"${escape(conn.getURL.getProtocol)}${escape("\\"")} " +
s"${conn.getResponseCode} " +
s"$contentLengthStr" +
s"${userAgent match {
case Some(uaStr) => s" ${escape("\\"")}${escape(uaStr)}${escape("\\"")}"
case None => " -"
}}"
}
final case class ResponseData(
contentType: MediaType,
contentLength: Option[Long],
contentStream: InputStream
)
val TemporaryRedirect: Int = 307
val PermanentRedirect: Int = 308
val RedirectStatuses: Set[Int] = {
Set(
HttpURLConnection.HTTP_MOVED_PERM,
HttpURLConnection.HTTP_MOVED_TEMP,
HttpURLConnection.HTTP_SEE_OTHER,
TemporaryRedirect,
PermanentRedirect
)
}
}
| dcos/cosmos | cosmos-server/src/main/scala/com/mesosphere/cosmos/HttpClient.scala | Scala | apache-2.0 | 7,352 |
/* __ *\\
** ________ ___ / / ___ __ ____ Scala.js Test Suite **
** / __/ __// _ | / / / _ | __ / // __/ (c) 2013, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ |/_// /_\\ \\ http://scala-js.org/ **
** /____/\\___/_/ |_/____/_/ | |__/ /____/ **
** |/____/ **
\\* */
package org.scalajs.testsuite.compiler
import org.junit.Test
import org.junit.Assert._
class UnitJSTest {
@Test def `should_have_toString()`(): Unit = {
assertEquals(().toString(), "undefined")
assertEquals(((): Any).toString(), "undefined")
}
}
| mdedetrich/scala-js | test-suite/js/src/test/scala/org/scalajs/testsuite/compiler/UnitJSTest.scala | Scala | bsd-3-clause | 787 |
package io.mediachain.translation
import com.fasterxml.jackson.core.JsonFactory
import io.mediachain.XorMatchers
import org.specs2.Specification
import org.json4s.{JObject, JValue, JArray, JInt}
object JsonLoaderSpec extends Specification with XorMatchers {
def is =
s2"""
Loads json from a URL into an AST $loadsFromURL
Parses simple $parsesSimple
Parses key after object $parsesAfterObject
Parses tate $parsesTate
Parses objects in an array $parsesObjectInArray
Error on malformed primitive $simpleError
"""
val jf = new JsonFactory
def loadsFromURL = {
val json = JsonLoader.loadObjectFromURL(SpecResources.simpleTestResourceUrl)
json.toEither must beRight
}
val simple =
"""
{
"a": 1
}
""".stripMargin
def parsesSimple = {
val parser = jf.createParser(simple)
parser.nextToken()
val parsed = JsonLoader.parseJOBject(parser)
parsed must beRightXor
}
val afterObject =
"""
{
"a": {
"b": "d"
},
"c": "o"
}
""".stripMargin
def parsesAfterObject = {
val parser = jf.createParser(afterObject)
parser.nextToken()
val parsed = JsonLoader.parseJOBject(parser)
parsed must beRightXor
}
def tate =
"""
{
"acno": "D12803",
"acquisitionYear": 1856,
"additionalImages": [
{
"copyright": null,
"creativeCommons": null,
"filenameBase": "D12803",
"sizes": [
{
"caption": "Enhanced image",
"cleared": true,
"file": "enhanced_images/D128/D12803_E.jpg",
"height": 358,
"resolution": 512,
"size": "large",
"width": 512
}
]
}
],
"all_artists": "Joseph Mallord William Turner",
"catalogueGroup": {
"accessionRanges": "D12702-D12883; D40663-D40665",
"completeStatus": "COMPLETE",
"finbergNumber": "CLX",
"groupType": "Turner Sketchbook",
"id": 65802,
"shortTitle": "Waterloo and Rhine Sketchbook"
},
"classification": "on paper, unique",
"contributorCount": 1,
"contributors": [
{
"birthYear": 1775,
"date": "1775\\u20131851",
"displayOrder": 1,
"fc": "Joseph Mallord William Turner",
"gender": "Male",
"id": 558,
"mda": "Turner, Joseph Mallord William",
"role": "artist",
"startLetter": "T"
}
],
"creditLine": "Accepted by the nation as part of the Turner Bequest 1856",
"dateRange": {
"endYear": 1817,
"startYear": 1817,
"text": "1817"
},
"dateText": "1817",
"depth": "",
"dimensions": "support: 150 x 94 mm",
"finberg": "CLX 53",
"foreignTitle": null,
"groupTitle": "Waterloo and Rhine Sketchbook",
"height": "94",
"id": 40171,
"inscription": null,
"medium": "Graphite on paper",
"movementCount": 0,
"pageNumber": 109,
"subjectCount": 8,
"subjects": {
"children": [
{
"children": [
{
"children": [
{
"id": 9067,
"name": "Coblenz, Ehrenbreitstein"
}
],
"id": 107,
"name": "cities, towns, villages (non-UK)"
},
{
"children": [
{
"id": 3561,
"name": "Germany"
}
],
"id": 108,
"name": "countries and continents"
}
],
"id": 106,
"name": "places"
},
{
"children": [
{
"children": [
{
"id": 1138,
"name": "castle"
}
],
"id": 20,
"name": "military"
},
{
"children": [
{
"id": 465,
"name": "church"
}
],
"id": 25,
"name": "religious"
},
{
"children": [
{
"id": 1151,
"name": "dome"
},
{
"id": 1065,
"name": "tower"
}
],
"id": 17,
"name": "features"
}
],
"id": 13,
"name": "architecture"
},
{
"children": [
{
"children": [
{
"id": 880,
"name": "mountain"
},
{
"id": 563,
"name": "rocky"
}
],
"id": 71,
"name": "landscape"
}
],
"id": 60,
"name": "nature"
}
],
"id": 1,
"name": "subject"
},
"thumbnailCopyright": null,
"thumbnailUrl": "http://www.tate.org.uk/art/images/work/D/D12/D12803_8.jpg",
"title": "The Fortress of Ehrenbreitstein, from the South, next to the Church of the Holy Cross and the Heribertturm",
"units": "mm",
"url": "http://www.tate.org.uk/art/artworks/turner-the-fortress-of-ehrenbreitstein-from-the-south-next-to-the-church-of-the-holy-cross-d12803",
"width": "150"
}
"""
def parsesTate = {
val parser = jf.createParser(tate)
parser.nextToken()
val parsed = JsonLoader.parseJOBject(parser)
parsed must beRightXor { json =>
(json \\ "contributors") match {
case JArray(List(obj)) =>
(obj \\ "birthYear") must_== JInt(1775)
case _ => failure
}
}
}
val objectInArray =
"""
|{
| "a":[
| {"b":1,"c":2}
| ],
| "f": "foo"
|}
""".stripMargin
def parsesObjectInArray = {
val parser = jf.createParser(objectInArray)
parser.nextToken()
val parsed = JsonLoader.parseJValue(parser)
parsed must beRightXor
}
val simpleMalformed =
"""
| {
| "a":
| }
""".stripMargin
def simpleError = {
val parserXor = JsonLoader.createParser(simpleMalformed)
val parsed = for {
parser <- parserXor
parsed <- JsonLoader.parseJValue(parser)
} yield parsed
parsed must beLeftXor
}
}
| mediachain/L-SPACE | translation_engine/src/test/scala/io/mediachain/translation/JsonLoaderSpec.scala | Scala | mit | 5,668 |
package de.johoop.xplane.samples.livemap
import akka.actor.ActorSystem
import akka.stream.{KillSwitches, Materializer}
import akka.stream.scaladsl.{Keep, Sink}
import de.johoop.xplane.samples.livemap.util._
import de.johoop.xplane.util.returning
import de.johoop.xplane.samples.livemap.model.LiveMap
import de.johoop.xplane.samples.livemap.view.{LiveMapView, MapPane}
import scalafx.Includes._
import scalafx.application.Platform
class Controller(mapPane: MapPane, liveMapView: LiveMapView)(implicit system: ActorSystem, mat: Materializer) {
private var liveMap = LiveMap(killSwitch = None)
def wire: Unit = {
liveMapView.map.zoomProperty.onChange {
liveMapView.zoom.text = "Zoom Level: " + liveMapView.map.getZoom.toString
}
liveMapView.map.mapTypeProperty.onChange {
liveMapView.mapType.text = "Map Type: " + liveMapView.map.getMapType.name
}
mapPane.connect.onAction = { _ =>
liveMap.killSwitch match {
case None =>
mapPane.connect.text = "Disconnect"
liveMap = liveMap.copy(killSwitch = Some(LiveMap.initialize
.viaMat(KillSwitches.single)(Keep.right)
.scan(Track()) { (track, rpos) =>
returning(Track.update(track, rpos)) { track =>
Platform.runLater(liveMapView.update(rpos, track.lines.lastOption))
}
}
.to(Sink.ignore)
.run()
))
case Some(killSwitch) =>
mapPane.connect.text = "Connect"
killSwitch.shutdown()
liveMap = liveMap.copy(killSwitch = None)
}
}
}
def shutdown: Unit = liveMap.killSwitch foreach { _.shutdown() }
}
| jmhofer/xplane-udp | samples/src/main/scala/de/johoop/xplane/samples/livemap/Controller.scala | Scala | gpl-3.0 | 1,676 |
package scala.collection
import org.junit.Assert._
import org.junit.Test
import scala.collection.mutable.{ArrayBuffer, ListBuffer}
import scala.tools.testkit.AssertUtil.assertSameElements
class ViewTest {
@Test
def hasCorrectDropAndTakeMethods(): Unit = {
val iter = Iterable(1, 2, 3)
assertEquals(Iterable.empty[Int], iter.view take Int.MinValue to Iterable)
assertEquals(Iterable.empty[Int],
iter.view takeRight Int.MinValue to Iterable)
assertEquals(iter, iter.view drop Int.MinValue to Iterable)
assertEquals(iter, iter.view dropRight Int.MinValue to Iterable)
}
@Test
def seqView(): Unit = {
val xs = List(1, 2, 3)
assertEquals(xs.reverse, xs.view.reverse.toSeq)
assertEquals(2, xs.view(1))
val v1 = xs.view.reverse.reverse
val v1t: SeqView[Int] = v1
assertEquals(xs, v1t.toSeq)
val v2 = xs.view.concat(xs)
val v2t: SeqView[Int] = v2
assertEquals(xs.concat(xs), v2t.toSeq)
}
@Test
def indexedSeqView(): Unit = {
val xs = Vector(1, 2, 3)
assertEquals(xs.reverse, xs.view.reverse.toSeq)
assertEquals(2, xs.view(1))
val v1 = xs.view.reverse.reverse
val v1t: IndexedSeqView[Int] = v1
assertEquals(xs, v1t.toSeq)
val v2 = xs.view.concat(xs)
val v2t: IndexedSeqView[Int] = v2
assertEquals(xs.concat(xs), v2t.toSeq)
}
@Test
def mapView(): Unit = {
val xs = immutable.Map(1 -> "a", 2 -> "b")
assertEquals("a", xs.view(1))
val ys = xs.view.mapValues(_.toUpperCase)
assertTrue(ys.contains(1))
assertEquals("B", ys(2))
}
@Test
def viewsViewIsNoOp(): Unit = {
def check[A](it: Iterable[A]): Unit = {
val view = it.view
assertTrue(view eq view.view)
}
check(immutable.Set(1, 2, 3)) // View
check(List(1, 2, 3)) // SeqView
check(immutable.Vector(1, 2, 3)) // IndexedSeqView
check(immutable.Map(1 -> "a", 2 -> "b")) // MapView
}
@Test
def tapEach(): Unit = {
val lb = ListBuffer[Int]()
val v =
View(1, 2, 3)
.tapEach(lb += _)
.map(_ => 10)
.tapEach(lb += _)
.tapEach(_ => lb += -1)
assertEquals(ListBuffer[Int](), lb)
val strict = v.to(Seq)
assertEquals(strict, Seq(10, 10, 10))
assertEquals(lb, Seq(1, 10, -1, 2, 10, -1, 3, 10, -1))
}
@Test
def updated(): Unit = {
def checkThrows[U](f: => U) = try { f; assertTrue(false) } catch { case _: IndexOutOfBoundsException => }
// View.Updated can update the last element but not the one after:
val v1 = new View.Updated(0 until 5, 4, 0)
val v2 = new View.Updated(0 until 5, 5, 0)
assertEquals(List(0,1,2,3,0), v1.toList)
checkThrows(v2.toList)
// Seq.updated throws immediately for strict collections:
checkThrows(ArrayBuffer.from(0 until 5).updated(5, 0))
checkThrows(ArrayBuffer.from(0 until 5).updated(-1, 0))
// Negative indices result in an immediate exception even for lazy collections:
checkThrows(LazyList.from(0 until 5).updated(-1, 0))
// `updated` does not force a LazyList but forcing it afterwards will check the index:
val ll = LazyList.from(0 until 5).updated(5, 0)
checkThrows(ll.toList)
}
@deprecated("Tests deprecated API", since="2.13")
@Test
def `t10103 result of view must be indexed seq`(): Unit = {
val ints: IndexedSeq[Int] = Vector(1, 2, 3, 4)
ints.view(1, 3): scala.collection.IndexedSeqView[Int]
}
@Test
def _toString(): Unit = {
assertEquals("View(<not computed>)", View(1, 2, 3).toString)
}
// see scala/scala#9388
@Test
def patch(): Unit = {
// test re-iterability
val v1 = List(2).view.patch(1, List(3, 4, 5).iterator, 0)
assertSameElements(Seq(2, 3, 4, 5), v1.toList)
assertSameElements(Seq(2, 3, 4, 5), v1.toList) // check that it works twice
// https://github.com/scala/scala/pull/9388#discussion_r709392221
val v2 = List(2).view.patch(1, Nil, 0)
assert(!v2.isEmpty)
// https://github.com/scala/scala/pull/9388#discussion_r709481748
val v3 = Nil.view.patch(0, List(1).iterator, 0)
assert(v3.knownSize != 0)
}
}
| scala/scala | test/junit/scala/collection/ViewTest.scala | Scala | apache-2.0 | 4,098 |
package simplez
import org.specs2.mutable._
import std.option._
import syntax._
class ApplicativeSpec extends Specification {
"An applicative builder" should {
"work for options (Some)" in {
(Option(3) |@| Option(4) |@| Option(5)) { _ + _ + _ } should beSome(12)
}
"work for options (None)" in {
(Option(3) |@| (None: Option[Int]) |@| Option(5)) { _ + _ + _ } should beNone
}
"work for mixed types" in {
def test(a: Int, b: Double): Double = a + b
(Option(3) |@| Option(4.0)) { test } should beSome(7.0)
}
}
}
| inoio/simplez | main/src/test/scala/simplez/ApplicativeSpec.scala | Scala | bsd-2-clause | 570 |
package com.github.vonnagy.service.container.security
import com.github.vonnagy.service.container.AkkaTestkitSpecs2Support
import org.specs2.mutable.SpecificationLike
class SSLContextProviderSpec extends AkkaTestkitSpecs2Support with SpecificationLike {
val sys = system
"SSLContextProvider" should {
"allow for the creation of a server context" in {
val prov = new SSLServerContextProvider {
implicit def system = sys
def configNamespace = "container.ssl"
}
prov.sslConfig must not beNull
prov.isClient must beFalse
val ctx = prov.sslContext
ctx must not beNull
}
"allow for the creation of a client context" in {
val prov = new SSLClientContextProvider {
implicit def system = sys
def configNamespace = "container.ssl"
}
prov.sslConfig must not beNull
prov.isClient must beTrue
val ctx = prov.sslContext
ctx must not beNull
}
}
}
| vonnagy/service-container | service-container/src/test/scala/com/github/vonnagy/service/container/security/SSLContextProviderSpec.scala | Scala | apache-2.0 | 971 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.internal
import java.util.Locale
import org.apache.spark.util.Utils
/**
* Static SQL configuration is a cross-session, immutable Spark configuration. External users can
* see the static sql configs via `SparkSession.conf`, but can NOT set/unset them.
*/
object StaticSQLConf {
import SQLConf.buildStaticConf
val WAREHOUSE_PATH = buildStaticConf("spark.sql.warehouse.dir")
.doc("The default location for managed databases and tables.")
.stringConf
.createWithDefault(Utils.resolveURI("spark-warehouse").toString)
val CATALOG_IMPLEMENTATION = buildStaticConf("spark.sql.catalogImplementation")
.internal()
.stringConf
.checkValues(Set("hive", "in-memory"))
.createWithDefault("in-memory")
val GLOBAL_TEMP_DATABASE = buildStaticConf("spark.sql.globalTempDatabase")
.internal()
.stringConf
.transform(_.toLowerCase(Locale.ROOT))
.createWithDefault("global_temp")
// This is used to control when we will split a schema's JSON string to multiple pieces
// in order to fit the JSON string in metastore's table property (by default, the value has
// a length restriction of 4000 characters, so do not use a value larger than 4000 as the default
// value of this property). We will split the JSON string of a schema to its length exceeds the
// threshold. Note that, this conf is only read in HiveExternalCatalog which is cross-session,
// that's why this conf has to be a static SQL conf.
val SCHEMA_STRING_LENGTH_THRESHOLD =
buildStaticConf("spark.sql.sources.schemaStringLengthThreshold")
.doc("The maximum length allowed in a single cell when " +
"storing additional schema information in Hive's metastore.")
.internal()
.intConf
.createWithDefault(4000)
val FILESOURCE_TABLE_RELATION_CACHE_SIZE =
buildStaticConf("spark.sql.filesourceTableRelationCacheSize")
.internal()
.doc("The maximum size of the cache that maps qualified table names to table relation plans.")
.intConf
.checkValue(cacheSize => cacheSize >= 0, "The maximum size of the cache must not be negative")
.createWithDefault(1000)
val CODEGEN_CACHE_MAX_ENTRIES = buildStaticConf("spark.sql.codegen.cache.maxEntries")
.internal()
.doc("When nonzero, enable caching of generated classes for operators and expressions. " +
"All jobs share the cache that can use up to the specified number for generated classes.")
.intConf
.checkValue(maxEntries => maxEntries >= 0, "The maximum must not be negative")
.createWithDefault(100)
val CODEGEN_COMMENTS = buildStaticConf("spark.sql.codegen.comments")
.internal()
.doc("When true, put comment in the generated code. Since computing huge comments " +
"can be extremely expensive in certain cases, such as deeply-nested expressions which " +
"operate over inputs with wide schemas, default is false.")
.booleanConf
.createWithDefault(false)
// When enabling the debug, Spark SQL internal table properties are not filtered out; however,
// some related DDL commands (e.g., ANALYZE TABLE and CREATE TABLE LIKE) might not work properly.
val DEBUG_MODE = buildStaticConf("spark.sql.debug")
.internal()
.doc("Only used for internal debugging. Not all functions are supported when it is enabled.")
.booleanConf
.createWithDefault(false)
val HIVE_THRIFT_SERVER_SINGLESESSION =
buildStaticConf("spark.sql.hive.thriftServer.singleSession")
.doc("When set to true, Hive Thrift server is running in a single session mode. " +
"All the JDBC/ODBC connections share the temporary views, function registries, " +
"SQL configuration and the current database.")
.booleanConf
.createWithDefault(false)
val SPARK_SESSION_EXTENSIONS = buildStaticConf("spark.sql.extensions")
.doc("A comma-separated list of classes that implement " +
"Function1[SparkSessionExtensions, Unit] used to configure Spark Session extensions. The " +
"classes must have a no-args constructor. If multiple extensions are specified, they are " +
"applied in the specified order. For the case of rules and planner strategies, they are " +
"applied in the specified order. For the case of parsers, the last parser is used and each " +
"parser can delegate to its predecessor. For the case of function name conflicts, the last " +
"registered function name is used.")
.stringConf
.toSequence
.createOptional
val QUERY_EXECUTION_LISTENERS = buildStaticConf("spark.sql.queryExecutionListeners")
.doc("List of class names implementing QueryExecutionListener that will be automatically " +
"added to newly created sessions. The classes should have either a no-arg constructor, " +
"or a constructor that expects a SparkConf argument.")
.stringConf
.toSequence
.createOptional
val STREAMING_QUERY_LISTENERS = buildStaticConf("spark.sql.streaming.streamingQueryListeners")
.doc("List of class names implementing StreamingQueryListener that will be automatically " +
"added to newly created sessions. The classes should have either a no-arg constructor, " +
"or a constructor that expects a SparkConf argument.")
.stringConf
.toSequence
.createOptional
val UI_RETAINED_EXECUTIONS =
buildStaticConf("spark.sql.ui.retainedExecutions")
.doc("Number of executions to retain in the Spark UI.")
.intConf
.createWithDefault(1000)
val BROADCAST_EXCHANGE_MAX_THREAD_THRESHOLD =
buildStaticConf("spark.sql.broadcastExchange.maxThreadThreshold")
.internal()
.doc("The maximum degree of parallelism to fetch and broadcast the table. " +
"If we encounter memory issue like frequently full GC or OOM when broadcast table " +
"we can decrease this number in order to reduce memory usage. " +
"Notice the number should be carefully chosen since decreasing parallelism might " +
"cause longer waiting for other broadcasting. Also, increasing parallelism may " +
"cause memory problem.")
.intConf
.checkValue(thres => thres > 0 && thres <= 128, "The threshold must be in (0,128].")
.createWithDefault(128)
val SUBQUERY_MAX_THREAD_THRESHOLD =
buildStaticConf("spark.sql.subquery.maxThreadThreshold")
.internal()
.doc("The maximum degree of parallelism to execute the subquery.")
.intConf
.checkValue(thres => thres > 0 && thres <= 128, "The threshold must be in (0,128].")
.createWithDefault(16)
val SQL_EVENT_TRUNCATE_LENGTH = buildStaticConf("spark.sql.event.truncate.length")
.doc("Threshold of SQL length beyond which it will be truncated before adding to " +
"event. Defaults to no truncation. If set to 0, callsite will be logged instead.")
.intConf
.checkValue(_ >= 0, "Must be set greater or equal to zero")
.createWithDefault(Int.MaxValue)
val SQL_LEGACY_SESSION_INIT_WITH_DEFAULTS =
buildStaticConf("spark.sql.legacy.sessionInitWithConfigDefaults")
.doc("Flag to revert to legacy behavior where a cloned SparkSession receives SparkConf " +
"defaults, dropping any overrides in its parent SparkSession.")
.booleanConf
.createWithDefault(false)
val DEFAULT_URL_STREAM_HANDLER_FACTORY_ENABLED =
buildStaticConf("spark.sql.defaultUrlStreamHandlerFactory.enabled")
.doc(
"When true, register Hadoop's FsUrlStreamHandlerFactory to support " +
"ADD JAR against HDFS locations. " +
"It should be disabled when a different stream protocol handler should be registered " +
"to support a particular protocol type, or if Hadoop's FsUrlStreamHandlerFactory " +
"conflicts with other protocol types such as `http` or `https`. See also SPARK-25694 " +
"and HADOOP-14598.")
.internal()
.booleanConf
.createWithDefault(true)
val STREAMING_UI_ENABLED =
buildStaticConf("spark.sql.streaming.ui.enabled")
.doc("Whether to run the Structured Streaming Web UI for the Spark application when the " +
"Spark Web UI is enabled.")
.booleanConf
.createWithDefault(true)
val STREAMING_UI_RETAINED_PROGRESS_UPDATES =
buildStaticConf("spark.sql.streaming.ui.retainedProgressUpdates")
.doc("The number of progress updates to retain for a streaming query for Structured " +
"Streaming UI.")
.intConf
.createWithDefault(100)
val STREAMING_UI_RETAINED_QUERIES =
buildStaticConf("spark.sql.streaming.ui.retainedQueries")
.doc("The number of inactive queries to retain for Structured Streaming UI.")
.intConf
.createWithDefault(100)
}
| darionyaphet/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/internal/StaticSQLConf.scala | Scala | apache-2.0 | 9,534 |
package net.sansa_stack.inference.spark.backwardchaining
import net.sansa_stack.inference.rules.RuleSets
import net.sansa_stack.inference.rules.plan.SimpleSQLGenerator
import net.sansa_stack.inference.spark.backwardchaining.tree.{AndNode, OrNode}
import net.sansa_stack.inference.utils.RuleUtils._
import net.sansa_stack.inference.utils.{CollectionUtils, Logging, TripleUtils}
import org.apache.jena.graph.{Node, NodeFactory, Triple}
import org.apache.jena.rdf.model.Resource
import org.apache.jena.reasoner.TriplePattern
import org.apache.jena.reasoner.rulesys.Rule
import org.apache.jena.reasoner.rulesys.impl.BindingVector
import org.apache.jena.sparql.util.FmtUtils
import org.apache.jena.vocabulary.{RDF, RDFS}
import org.apache.spark.sql.{DataFrame, Dataset, SparkSession}
import scala.collection.mutable
import scala.concurrent.duration.FiniteDuration
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.types.{StringType, StructField, StructType}
import net.sansa_stack.inference.spark.data.loader.RDFGraphLoader
// case class RDFTriple(s: Node, p: Node, o: Node)
case class RDFTriple(s: String, p: String, o: String)
/**
* @author Lorenz Buehmann
*/
class BackwardChainingReasonerDataframe(
val session: SparkSession,
val rules: Set[Rule],
val graph: Dataset[RDFTriple]) extends Logging {
import org.apache.spark.sql.functions._
private implicit def resourceToNodeConverter(resource: Resource): Node = resource.asNode()
val precomputeSchema: Boolean = true
lazy val schema: Map[Node, Dataset[RDFTriple]] = if (precomputeSchema) extractWithIndex(graph) else Map()
def isEntailed(triple: Triple): Boolean = {
isEntailed(new TriplePattern(triple))
}
def isEntailed(tp: TriplePattern): Boolean = {
val tree = buildTree(new AndNode(tp), Seq())
log.info(tree.toString)
val triples = processTree(tree)
triples.explain(true)
log.info(triples.distinct().count().toString)
false
}
private def processTree(tree: AndNode): Dataset[RDFTriple] = {
// 1. look for asserted triples in the graph
val assertedTriples = lookup(tree.element)
if(TripleUtils.isTerminological(tree.element.asTriple())) broadcast(assertedTriples)
// 2. process the inference rules that can infer the triple pattern
val inferredTriples = tree.children.map(child => {
log.info(s"processing rule ${child.element}")
// first process the children, i.e. we get the data for each triple pattern in the body of the rule
val childrenTriples: Seq[Dataset[RDFTriple]] = child.children.map(processTree(_))
val baseTriples = if (childrenTriples.size > 1) childrenTriples.reduce(_ union _) else childrenTriples.head
// then apply the rule on the UNION of the children data
applyRule(child.element, baseTriples)
})
var triples = assertedTriples
if(inferredTriples.nonEmpty) triples = triples.union(inferredTriples.reduce(_ union _))
triples
}
private def lookup(tp: TriplePattern): Dataset[RDFTriple] = {
lookup(tp.asTriple())
}
private def lookupSimple(tp: Triple, triples: Dataset[RDFTriple] = graph): Dataset[RDFTriple] = {
info(s"Lookup data for $tp")
val s = tp.getSubject.toString()
val p = tp.getPredicate.toString()
val o = tp.getObject.toString()
var filteredGraph = triples
if(tp.getSubject.isConcrete) {
filteredGraph.filter(t => t.s.equals(s))
}
if(tp.getPredicate.isConcrete) {
filteredGraph = filteredGraph.filter(t => t.p.equals(p))
}
if(tp.getObject.isConcrete) {
filteredGraph = filteredGraph.filter(t => t.o.equals(o))
}
filteredGraph
}
private def lookup(tp: Triple): Dataset[RDFTriple] = {
val terminological = TripleUtils.isTerminological(tp)
var filteredGraph =
if (terminological) {
schema.getOrElse(tp.getPredicate, graph)
} else {
graph
}
info(s"Lookup data for $tp")
val s = tp.getSubject.toString()
val p = tp.getPredicate.toString()
val o = tp.getObject.toString()
if(tp.getSubject.isConcrete) {
filteredGraph = filteredGraph.filter(t => t.s.equals(s))
}
if(!terminological && tp.getPredicate.isConcrete) {
filteredGraph = filteredGraph.filter(t => t.p.equals(p))
}
if(tp.getObject.isConcrete) {
filteredGraph = filteredGraph.filter(t => t.o.equals(o))
}
filteredGraph
}
private def buildTree(tree: AndNode, visited: Seq[Rule]): AndNode = {
val tp = tree.element
rules.filterNot(visited.contains(_)).foreach(r => {
// check if the head is more general than the triple in question
val head = r.headTriplePatterns()
head.foreach(headTP => {
val subsumes = headTP.subsumes(tp)
if(subsumes) {
// instantiate the rule
val boundRule = instantiateRule(r, tp)
// add new Or-node to tree
val node = new OrNode(boundRule)
// println(node)
tree.children :+= node
boundRule.bodyTriplePatterns().foreach(newTp => {
node.children :+= buildTree(new AndNode(newTp), visited ++ Seq(r))
})
}
})
})
tree
}
/*
// create a binding for the rule variables
*/
private def instantiateRule(rule: Rule, tp: TriplePattern): Rule = {
val headTP = rule.headTriplePatterns().head // TODO handle rules with multiple head TPs
val binding = new BindingVector(5)
// the subject
if(tp.getSubject.isConcrete && headTP.getSubject.isVariable) {
binding.bind(headTP.getSubject, tp.getSubject)
}
// the predicate
if(tp.getPredicate.isConcrete && headTP.getPredicate.isVariable) {
binding.bind(headTP.getPredicate, tp.getPredicate)
}
// the object
if(tp.getObject.isConcrete && headTP.getObject.isVariable) {
binding.bind(headTP.getObject, tp.getObject)
}
rule.instantiate(binding)
}
import session.implicits._
private def applyRule(rule: Rule, dataset: Dataset[RDFTriple]): Dataset[RDFTriple] = {
// convert to SQL
val sqlGenerator = new SimpleSQLGenerator()
var sql = sqlGenerator.generateSQLQuery(rule)
// val sql =
// """
// |SELECT rel0.s, 'http://www.w3.org/1999/02/22-rdf-syntax-ns#type' AS p, 'http://swat.cse.lehigh.edu/onto/univ-bench.owl#Person' AS o
// | FROM TRIPLES rel1 INNER JOIN TRIPLES rel0 ON rel1.s=rel0.p
// | WHERE rel1.o='http://swat.cse.lehigh.edu/onto/univ-bench.owl#Person' AND rel1.p='http://www.w3.org/2000/01/rdf-schema#domain'
// """.stripMargin
// generate logical execution plan
// val planGenerator = new SimplePlanGenerator(TriplesSchema.get())
// val plan = planGenerator.generateLogicalPlan(rule)
val tableName = s"TRIPLES_${rule.getName}"
sql = sql.replace("TRIPLES", tableName)
log.info(s"SQL NEW: $sql")
dataset.createOrReplaceTempView(tableName)
dataset.sparkSession.sql(sql).as[RDFTriple]
}
val properties = Set(
(RDFS.subClassOf, true, "SCO"),
(RDFS.subPropertyOf, true, "SPO"),
(RDFS.domain, false, "DOM"),
(RDFS.range, false, "RAN"))
val DUMMY_VAR = NodeFactory.createVariable("VAR")
/**
* Computes the triples for each schema property p, e.g. `rdfs:subClassOf` and returns it as mapping from p
* to the [[Dataset]] containing the triples.
*
* @param graph the RDF graph
* @return a mapping from the corresponding schema property to the Dataframe of s-o pairs
*/
def extractWithIndex(graph: Dataset[RDFTriple]): Map[Node, Dataset[RDFTriple]] = {
log.info("Started schema extraction...")
val bcProperties = session.sparkContext.broadcast(Set(
RDFS.subClassOf,
RDFS.subPropertyOf,
RDFS.domain,
RDFS.range).map(_.toString))
val schemaTriples = graph.filter(t => bcProperties.value.contains(t.p)).cache()
// for each schema property p
val index =
properties.map { entry =>
val p = entry._1
val tc = entry._2
val alias = entry._3
// get triples (s, p, o)
var triples = lookupSimple(Triple.create(DUMMY_VAR, p, DUMMY_VAR), schemaTriples)
// compute TC if necessary
if (tc) triples = computeTC(triples)
// broadcast the triples
triples = broadcast(triples).alias(alias)
// register as a table
triples.createOrReplaceTempView(FmtUtils.stringForNode(p).replace(":", "_"))
// add to index
(p.asNode() -> triples)
}
log.info("Finished schema extraction.")
index.toMap
}
def query(tp: Triple): Dataset[RDFTriple] = {
import org.apache.spark.sql.functions._
val domain = schema.getOrElse(RDFS.domain, broadcast(graph.filter(t => t.p == RDFS.domain.toString)).alias("DOMAIN"))
val range = schema.getOrElse(RDFS.range, broadcast(graph.filter(t => t.p == RDFS.range.toString)).alias("RANGE"))
val sco = schema.getOrElse(RDFS.subClassOf, broadcast(computeTC(graph.filter(t => t.p == RDFS.subClassOf.toString))).alias("SCO"))
val spo = schema.getOrElse(RDFS.subPropertyOf, broadcast(computeTC(graph.filter(t => t.p == RDFS.subPropertyOf.toString))).alias("SPO"))
// asserted triples
var ds = lookup(tp)
// inferred triples
if(tp.getPredicate.isConcrete) {
if (tp.getPredicate.matches(RDF.`type`.asNode())) { // rdf:type data
var instanceTriples = graph
// if s is concrete, we filter first
if(tp.getSubject.isConcrete) { // find triples where s occurs as subject or object
instanceTriples = instanceTriples.filter(t => t.s == tp.getSubject.toString() || t.o == tp.getSubject.toString())
}
// get all non rdf:type triples
instanceTriples = instanceTriples.filter(_.p != RDF.`type`.toString)
// enrich the instance data with super properties, i.e. rdfs5
if(tp.getSubject.isConcrete) { // find triples where s occurs as subject or object
instanceTriples = instanceTriples.filter(t => t.s == tp.getSubject.toString() || t.o == tp.getSubject.toString())
}
val spoBC = session.sparkContext.broadcast(
CollectionUtils.toMultiMap(spo.select("s", "o").collect().map(r => (r.getString(0), r.getString(1))))
)
val rdfs7 = instanceTriples.flatMap(t => spoBC.value.getOrElse(t.p, Set[String]()).map(supProp => RDFTriple(t.s, supProp, t.o)))
// val rdfs7 = spo
// .join(instanceTriples.alias("DATA"), $"SPO.s" === $"DATA.p", "inner")
// .select($"DATA.s".alias("s"), $"SPO.o".alias("p"), $"DATA.s".alias("o"))
// .as[RDFTriple]
// instanceTriples = instanceTriples.union(rdfs7).cache()
// rdfs2 (domain)
var dom = if (tp.getObject.isConcrete) domain.filter(_.o == tp.getObject.toString()) else domain
dom = dom.alias("DOM")
var data = if (tp.getSubject.isConcrete) {
// // asserted triples :s ?p ?o
// val asserted = instanceTriples.filter(t => t.s == tp.getSubject.toString()).cache()
// // join with super properties
// val inferred = spo
// .join(asserted.alias("DATA"), $"SPO.s" === $"DATA.p", "inner")
// .select($"DATA.s".alias("s"), $"SPO.o".alias("p"), $"DATA.s".alias("o"))
// .as[RDFTriple]
// asserted.union(inferred)
instanceTriples
} else {
instanceTriples
}
val rdftype = RDF.`type`.toString
// val rdfs2 = dom
// .join(data.alias("DATA"), $"DOM.s" === $"DATA.p", "inner")
// .select($"DATA.s", lit(RDF.`type`.toString).alias("p"), dom("o").alias("o"))
// .as[RDFTriple]
val domBC = session.sparkContext.broadcast(
CollectionUtils.toMultiMap(dom.select("s", "o").collect().map(r => (r.getString(0), r.getString(1))))
)
val rdfs2 = data.flatMap(t => domBC.value.getOrElse(t.p, Set[String]()).map(o => RDFTriple(t.s, rdftype, o)))
// rdfs3 (range)
var ran = if (tp.getObject.isConcrete) range.filter(_.o == tp.getObject.toString()) else range
ran = ran.alias("RAN")
data = if (tp.getSubject.isConcrete) {
// // asserted triples ?o ?p :s
// val asserted = instanceTriples.filter(t => t.o == tp.getSubject.toString()).cache()
// // join with super properties
// val inferred = spo
// .join(asserted.alias("DATA"), $"SPO.s" === $"DATA.p", "inner")
// .select($"DATA.s".alias("s"), $"SPO.o".alias("p"), $"DATA.o".alias("o"))
// .as[RDFTriple]
// asserted.union(inferred)
instanceTriples
} else {
instanceTriples
}
// val rdfs3 = ran
// .join(data.alias("DATA"), $"RAN.s" === $"DATA.p", "inner")
// .select($"DATA.o".alias("s"), lit(RDF.`type`.toString).alias("p"), ran("o").alias("o"))
// .as[RDFTriple]
val ranBC = session.sparkContext.broadcast(CollectionUtils.toMultiMap(ran.select("s", "o").collect().map(r => (r.getString(0), r.getString(1)))))
val rdfs3 = data.flatMap(t => ranBC.value.getOrElse(t.p, Set[String]()).map(o => RDFTriple(t.o, rdftype, o)))
// all rdf:type triples
val types = rdfs2
.union(rdfs3)
.union(ds)
.alias("TYPES")
// rdfs9 (subClassOf)
val scoTmp =
if (tp.getObject.isURI) {
sco.filter(_.o == tp.getObject.toString())
} else {
sco
}
val rdfs9 = scoTmp.alias("SCO")
.join(types, $"SCO.s" === $"TYPES.o", "inner")
.select(types("s").alias("s"), lit(RDF.`type`.toString).alias("p"), sco("o").alias("o"))
.as[RDFTriple]
// log.info(s"|rdf:type|=${ds.count()}")
// log.info(s"|rdfs2|=${rdfs2.count()}")
// log.info(s"|rdfs3|=${rdfs3.count()}")
// log.info(s"|rdf:type/rdfs2/rdfs3/|=${types.count()}")
// log.info(s"|rdfs9|=${rdfs9.count()}")
// add all rdf:type triples to result
ds = ds
.union(rdfs9)
.union(types)
// .repartition(200)
} else if (tp.predicateMatches(RDFS.subClassOf.asNode())) {
} else if (tp.predicateMatches(RDFS.subPropertyOf.asNode())) {
} else { // instance data (s,p,o) with p!=rdf:type => we only have to cover subPropertyOf rule
// filter instance data if subject or object was given
val instanceData =
if (tp.getSubject.isConcrete) {
graph.filter(_.s == tp.getSubject.toString())
} else if (tp.getObject.isConcrete) {
graph.filter(_.o == tp.getObject.toString())
} else {
graph
}
// get all subproperties of p
val subProperties = spo.filter(_.o == tp.getPredicate.toString()).alias("SPO")
val rdfs7 = subProperties
.join(instanceData.alias("DATA"), $"SPO.s" === $"DATA.p", "inner")
.select($"DATA.s".alias("s"), $"SPO.o".alias("p"), $"DATA.o".alias("o"))
.as[RDFTriple]
ds = ds.union(rdfs7)
}
} else {
val instanceData = ds
if(tp.getSubject.isConcrete) {
val tmp = spo
.join(instanceData.alias("DATA"), $"SPO.s" === $"DATA.p", "inner")
.select($"DATA.s".alias("s"), $"SPO.o".alias("p"), $"DATA.o".alias("o"))
.as[RDFTriple]
ds = ds.union(tmp)
}
}
// ds.explain()
ds.distinct()
}
/**
* Computes the transitive closure for a Dataset of triples. The assumption is that this Dataset is already
* filter by a single predicate.
*
* @param ds the Dataset of triples
* @return a Dataset containing the transitive closure of the triples
*/
private def computeTC(ds: Dataset[RDFTriple]): Dataset[RDFTriple] = {
var tc = ds
tc.cache()
// the join is iterated until a fixed point is reached
var i = 1
var oldCount = 0L
var nextCount = tc.count()
do {
log.info(s"iteration $i...")
oldCount = nextCount
val joined = tc.alias("A")
.join(tc.alias("B"), $"A.o" === $"B.s")
.select($"A.s", $"A.p", $"B.o")
.as[RDFTriple]
tc = tc
.union(joined)
.distinct()
.cache()
nextCount = tc.count()
i += 1
} while (nextCount != oldCount)
tc.unpersist()
log.info("TC has " + nextCount + " edges.")
tc
}
}
object BackwardChainingReasonerDataframe extends Logging{
val DEFAULT_PARALLELISM = 200
val DEFAULT_NUM_THREADS = 4
def loadRDD(session: SparkSession, path: String): RDD[RDFTriple] = {
RDFGraphLoader
.loadFromDisk(session, path, 20)
.triples.map(t => RDFTriple(t.getSubject.toString(), t.getPredicate.toString(), t.getObject.toString()))
}
def loadDataset(session: SparkSession, path: String): Dataset[RDFTriple] = {
import session.implicits._
session.createDataset(loadRDD(session, path))
}
def loadDatasetFromParquet(session: SparkSession, path: String): Dataset[RDFTriple] = {
import session.implicits._
session.read.parquet(path).as[RDFTriple]
}
def loadDataFrame(session: SparkSession, path: String): DataFrame = {
loadDataset(session, path).toDF()
}
def loadDataFrameFromParquet(session: SparkSession, path: String): DataFrame = {
loadDatasetFromParquet(session, path).toDF()
}
def main(args: Array[String]): Unit = {
if (args.length == 0) sys.error("USAGE: BackwardChainingReasonerDataframe <INPUT_PATH>+ <NUM_THREADS>? <PARALLELISM>?")
val inputPath = args(0)
val parquet = if (args.length > 1) args(1).toBoolean else false
val numThreads = if (args.length > 2) args(2).toInt else DEFAULT_NUM_THREADS
val parallelism = if (args.length > 3) args(3).toInt else DEFAULT_PARALLELISM
// the SPARK config
val session = SparkSession.builder
.appName(s"Spark Backward Chaining")
.master(s"local[$numThreads]")
.config("spark.eventLog.enabled", "true")
.config("spark.hadoop.validateOutputSpecs", "false") // override output files
.config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
.config("spark.default.parallelism", parallelism)
.config("spark.ui.showConsoleProgress", "false")
.config("spark.sql.shuffle.partitions", parallelism)
.config("spark.sql.autoBroadcastJoinThreshold", "10485760")
.config("parquet.enable.summary-metadata", "false")
// .config("spark.sql.cbo.enabled", "true")path
// .config("spark.local.dir", "/home/user/work/datasets/spark/tmp")
.getOrCreate()
var graph = if (parquet) loadDatasetFromParquet(session, inputPath) else loadDataset(session, inputPath)
graph = graph.cache()
graph.createOrReplaceTempView("TRIPLES")
// compute size here to have it cached
time {
log.info(s"|G|=${graph.count()}")
}
val rules = RuleSets.RDFS_SIMPLE
.filter(r => Seq(
"rdfs2"
, "rdfs3"
, "rdfs9"
, "rdfs7"
).contains(r.getName))
val reasoner = new BackwardChainingReasonerDataframe(session, rules, graph)
// VAR rdf:type URI
var tp = Triple.create(
NodeFactory.createVariable("s"),
RDF.`type`.asNode(),
NodeFactory.createURI("http://swat.cse.lehigh.edu/onto/univ-bench.owl#Person"))
compare(tp, reasoner)
// :s rdf:type VAR
tp = Triple.create(
NodeFactory.createURI("http://www.Department0.University0.edu/FullProfessor0"),
RDF.`type`.asNode(),
NodeFactory.createVariable("o"))
compare(tp, reasoner)
// :s rdfs:subClassOf VAR
tp = Triple.create(
NodeFactory.createURI("http://swat.cse.lehigh.edu/onto/univ-bench.owl#ClericalStaff"),
RDFS.subClassOf.asNode(),
NodeFactory.createVariable("o"))
compare(tp, reasoner, true)
// :s rdfs:subPropertyOf VAR
tp = Triple.create(
NodeFactory.createURI("http://swat.cse.lehigh.edu/onto/univ-bench.owl#headOf"),
RDFS.subPropertyOf.asNode(),
NodeFactory.createVariable("o"))
compare(tp, reasoner)
// VAR :p VAR
tp = Triple.create(
NodeFactory.createVariable("s"),
NodeFactory.createURI("http://swat.cse.lehigh.edu/onto/univ-bench.owl#degreeFrom"),
NodeFactory.createVariable("o"))
compare(tp, reasoner)
// :s :p VAR
tp = Triple.create(
NodeFactory.createURI("http://www.Department4.University3.edu/GraduateStudent40"),
NodeFactory.createURI("http://swat.cse.lehigh.edu/onto/univ-bench.owl#degreeFrom"),
NodeFactory.createVariable("o"))
compare(tp, reasoner)
// VAR :p :o
tp = Triple.create(
NodeFactory.createVariable("s"),
NodeFactory.createURI("http://swat.cse.lehigh.edu/onto/univ-bench.owl#degreeFrom"),
NodeFactory.createURI("http://www.University801.edu"))
compare(tp, reasoner)
// :s VAR :o
tp = Triple.create(
NodeFactory.createURI("http://www.Department4.University3.edu/GraduateStudent40"),
NodeFactory.createVariable("p"),
NodeFactory.createURI("http://www.University801.edu"))
compare(tp, reasoner)
// :s VAR VAR where :s is a resource
tp = Triple.create(
NodeFactory.createURI("http://www.Department4.University3.edu/GraduateStudent40"),
NodeFactory.createVariable("p"),
NodeFactory.createVariable("o"))
compare(tp, reasoner)
// :s VAR VAR where :s is a class
tp = Triple.create(
NodeFactory.createURI("http://swat.cse.lehigh.edu/onto/univ-bench.owl#Book"),
NodeFactory.createVariable("p"),
NodeFactory.createVariable("o"))
compare(tp, reasoner)
// :s VAR VAR where :s is a property
tp = Triple.create(
NodeFactory.createURI("http://swat.cse.lehigh.edu/onto/univ-bench.owl#undergraduateDegreeFrom"),
NodeFactory.createVariable("p"),
NodeFactory.createVariable("o"))
compare(tp, reasoner)
session.stop()
}
def compare(tp: Triple, reasoner: BackwardChainingReasonerDataframe, show: Boolean = false): Unit = {
time {
val triples = reasoner.query(tp)
println(triples.count())
if (show) triples.show(false)
}
// time {
// log.info(reasoner.isEntailed(tp))
// }
}
import net.sansa_stack.inference.spark.utils.PrettyDuration._
def time[R](block: => R): R = {
val t0 = System.nanoTime()
val result = block // call-by-name
val t1 = System.nanoTime()
log.info("Elapsed time: " + FiniteDuration(t1 - t0, "ns").pretty)
result
}
}
object TripleSchema extends StructType("s p o".split(" ").map(fieldName => StructField(fieldName, StringType, nullable = false)))
| SANSA-Stack/SANSA-RDF | sansa-inference/sansa-inference-spark/src/main/scala/net/sansa_stack/inference/spark/backwardchaining/BackwardChainingReasonerDataframe.scala | Scala | apache-2.0 | 22,825 |
package com.estus.optimization
import scala.math._
case class Trace () {
private var trace = List.empty[Double]
var converged = false
var convergeStep = 0
def size: Int = trace.size
def add (node: Double): Unit = trace = (node :: trace).sorted take 2
def contains (value: Double): Boolean = trace.contains(value)
def min: Double = trace.min
def converged(tolRel: Double): Option[Boolean] = {
if (trace.size == 2)
Some((trace.max - trace.min) < (tolRel * (abs(trace.max) + tolRel)))
else
None
}
}
| EstusDev/Estus | estus-optimization/src/main/scala/Trace.scala | Scala | apache-2.0 | 547 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.schema
import org.apache.flink.streaming.api.datastream.DataStream
import org.apache.flink.table.api.TableException
import org.apache.flink.table.planner.calcite.FlinkTypeFactory
import org.apache.flink.table.planner.plan.stats.FlinkStatistic
import org.apache.flink.table.runtime.types.LogicalTypeDataTypeConverter
import org.apache.flink.table.types.DataType
import org.apache.flink.table.types.logical.{LocalZonedTimestampType, LogicalType, RowType, TimestampKind, TimestampType}
import org.apache.flink.table.types.utils.TypeConversions.fromLegacyInfoToDataType
import org.apache.flink.table.typeutils.TimeIndicatorTypeInfo
import org.apache.calcite.plan.RelOptSchema
import org.apache.calcite.rel.`type`.RelDataType
import java.util.{List => JList}
/**
* The class that wraps [[DataStream]] as a Calcite Table.
*/
class DataStreamTable[T](
relOptSchema: RelOptSchema,
names: JList[String],
rowType: RelDataType,
val dataStream: DataStream[T],
val fieldIndexes: Array[Int],
val fieldNames: Array[String],
statistic: FlinkStatistic = FlinkStatistic.UNKNOWN,
fieldNullables: Option[Array[Boolean]] = None)
extends FlinkPreparingTableBase(relOptSchema, rowType, names, statistic) {
if (fieldIndexes.length != fieldNames.length) {
throw new TableException(
s"Number of field names and field indexes must be equal.\n" +
s"Number of names is ${fieldNames.length}, number of indexes is ${fieldIndexes.length}.\n" +
s"List of column names: ${fieldNames.mkString("[", ", ", "]")}.\n" +
s"List of column indexes: ${fieldIndexes.mkString("[", ", ", "]")}.")
}
// check uniqueness of field names
if (fieldNames.length != fieldNames.toSet.size) {
val duplicateFields = fieldNames
// count occurrences of field names
.groupBy(identity).mapValues(_.length)
// filter for occurrences > 1 and map to field name
.filter(g => g._2 > 1).keys
throw new TableException(
s"Field names must be unique.\n" +
s"List of duplicate fields: ${duplicateFields.mkString("[", ", ", "]")}.\n" +
s"List of all fields: ${fieldNames.mkString("[", ", ", "]")}.")
}
val dataType: DataType = fromLegacyInfoToDataType(dataStream.getType)
val fieldTypes: Array[LogicalType] = DataStreamTable.getFieldLogicalTypes(dataType,
fieldIndexes, fieldNames)
}
object DataStreamTable {
def getFieldLogicalTypes(rowType: DataType,
fieldIndexes: Array[Int],
fieldNames: Array[String]): Array[LogicalType] = {
LogicalTypeDataTypeConverter.fromDataTypeToLogicalType(rowType) match {
case rt: RowType =>
// it is ok to leave out fields
if (fieldIndexes.count(_ >= 0) > rt.getFieldCount) {
throw new TableException(
s"Arity of type (" + rt.getFieldNames.toArray.deep + ") " +
"must not be greater than number of field names " + fieldNames.deep + ".")
}
fieldIndexes.map {
case TimeIndicatorTypeInfo.ROWTIME_STREAM_MARKER =>
new TimestampType(true, TimestampKind.ROWTIME, 3)
case TimeIndicatorTypeInfo.PROCTIME_STREAM_MARKER =>
new LocalZonedTimestampType(true, TimestampKind.PROCTIME, 3)
case TimeIndicatorTypeInfo.ROWTIME_BATCH_MARKER =>
new TimestampType(3)
case TimeIndicatorTypeInfo.PROCTIME_BATCH_MARKER =>
new LocalZonedTimestampType(3)
case i => rt.getTypeAt(i)
}
case t: LogicalType =>
var cnt = 0
val types = fieldIndexes.map {
case TimeIndicatorTypeInfo.ROWTIME_STREAM_MARKER =>
new TimestampType(true, TimestampKind.ROWTIME, 3)
case TimeIndicatorTypeInfo.PROCTIME_STREAM_MARKER =>
new LocalZonedTimestampType(true, TimestampKind.PROCTIME, 3)
case TimeIndicatorTypeInfo.ROWTIME_BATCH_MARKER =>
new TimestampType(3)
case TimeIndicatorTypeInfo.PROCTIME_BATCH_MARKER =>
new LocalZonedTimestampType(3)
case _ =>
cnt += 1
t
}
// ensure that the atomic type is matched at most once.
if (cnt > 1) {
throw new TableException(
"Non-composite input type may have only a single field and its index must be 0.")
} else {
types
}
}
}
def getRowType(typeFactory: FlinkTypeFactory,
dataStream: DataStream[_],
fieldNames: Array[String],
fieldIndexes: Array[Int],
fieldNullables: Option[Array[Boolean]]): RelDataType = {
val dataType = fromLegacyInfoToDataType(dataStream.getType)
val fieldTypes = getFieldLogicalTypes(dataType, fieldIndexes, fieldNames)
fieldNullables match {
case Some(nulls) => typeFactory.asInstanceOf[FlinkTypeFactory]
.buildRelNodeRowType(fieldNames, fieldTypes.zip(nulls).map {
case (t, nullable) => t.copy(nullable)
})
case _ => typeFactory.asInstanceOf[FlinkTypeFactory]
.buildRelNodeRowType(fieldNames, fieldTypes)
}
}
}
| apache/flink | flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/plan/schema/DataStreamTable.scala | Scala | apache-2.0 | 5,906 |
package es.weso.shaclex.repl
/** A parsing result from string input */
sealed trait ParseResult
/** Parsed result is simply a newline */
case object Newline extends ParseResult
/** `ctrl-c` obtained from input string */
case object SigKill extends ParseResult
/** A command is on the format:
*
* ```none
* :commandName <optional arguments...>
* ```
* The `Command` trait denotes these commands
*/
sealed trait Command extends ParseResult
/** An unknown command that will not be handled by the REPL */
case class UnknownCommand(cmd: String) extends Command
/** An ambiguous prefix that matches multiple commands */
case class AmbiguousCommand(cmd: String, matchingCommands: List[String]) extends Command
/** `:load <path>` interprets a scala file as if entered line-by-line into
* the REPL
*/
case class Load(path: String) extends Command
object Load {
val command: String = ":load"
}
/** `:quit` exits the repl */
case object Quit extends Command {
val command: String = ":quit"
}
/** `:help` shows the different commands implemented by the Dotty repl */
case object Help extends Command {
val command: String = ":help"
val text: String =
"""The REPL has several commands available:
|
|:help print this summary
|:load <path> interpret lines in a file
|:quit exit the interpreter
""".stripMargin
}
object ParseResult {
private val CommandExtract = """(:[\\S]+)\\s*(.*)""".r
private val commands: List[(String, String => ParseResult)] = List(
Quit.command -> (_ => Quit),
Help.command -> (_ => Help),
Load.command -> (arg => Load(arg)),
)
def apply(source: SourceFile)(implicit state: State): ParseResult = {
val sourceCode = source.content().mkString
sourceCode match {
case "" => Newline
case CommandExtract(cmd, arg) => {
val matchingCommands = commands.filter{ case (command, _) => command.startsWith(cmd) }
matchingCommands match {
case Nil => UnknownCommand(cmd)
case (_, f) :: Nil => f(arg)
case multiple => AmbiguousCommand(cmd, multiple.map(_._1))
}
}
case _ => UnknownCommand(sourceCode)
}
}
def apply(sourceCode: String)(implicit state: State): ParseResult =
apply(SourceFile.virtual("REPL", sourceCode))
} | labra/shaclex | src/main/scala/es/weso/shaclex/repl/ParseResult.scala | Scala | mit | 2,349 |
trait Service extends HttpService with ServiceJsonProtocol {
...
def route(model: ActorRef)(implicit askTimeout: Timeout) =
authorizeToken(verifyNotExpired && injectUser) {
user =>
get {
path("sample" / IntNumber) { id =>
onSuccess(model ? id) {
case item: Sample =>
if(item.userId = user.uid)
complete(OK, item)
else
complete(Forbidden, "Access denided")
case ItemNotFound =>
complete(NotFound, "Not Found")
}
}
} ~
post {
path("newSample") {
entity(as[ImageRequest]) {
image =>
onSuccess(model ? (image, user.uid)) {
case id: Int =>
complete(OK, id.toString)
}
...
} | VerkhovtsovPavel/BSUIR_Labs | Diploma/diploma-latex/src/routing.scala | Scala | mit | 900 |
import java.io.SequenceInputStream
import java.io.{File, InputStream}
import java.text.SimpleDateFormat
import java.util.Date
import scala.annotation.tailrec
//This script will strip the log files emitted from sbt into something that
//will display in the console in idea
object PRINT {def |(s: Any) = Console.println(s)}
object INFO {
def |(s: Any) = {
Console.print("<INF> ")
Console.println(s)
}
}
object DEBUG {
def |(s: Any) = if (IdeaStrip.debug) {
Console.print("<DBG> ")
Console.println(s)
}
}
object IdeaStrip {
val debug = false
val projects = List("looty", "cgta-scala-js").map(Project.apply _)
val MaxLen = projects.flatMap(_.allPaths.map(_.name.length())).max
def start() {
INFO | "Starting Idea Strip -- Pure Scala Script Version 0.1"
INFO | "Projects: " + projects.map(_.name).mkString("[", ",", "]")
INFO | "CWD " + new File(".").getAbsolutePath
// PRINT | "%-20s >>> ".format("HELLO THERE") + "You are welcome, friend."
INFO | "Starting all threads"
projects.foreach(_.start)
while (true) {
Thread.sleep(60000);
}
}
}
object Line {
val info = "info"
val debug = "debug"
val warn = "warn"
val error = "error"
def parse(raw: String): Option[Line] = {
val REGEX = "\\\\[([^ ]*)\\\\] (.*)".r
raw match {
case REGEX(lvl, msg) => Some(Line(lvl, msg))
case x => None // Some(Line("???", x))
}
}
}
case class Line(lvl: String, msg: String) {
def withMsg(message: String): Line = {
val level = lvl match {
case Line.debug => " . "
case Line.info => ".i."
case Line.warn => "=w="
case Line.error => "#E#"
case x => x
}
Line(level, message)
}
def transform = withMsg(this.msg)
}
object LineParsers {
def filepath(line: Line) = {
if (line.lvl == Line.warn || line.lvl == Line.error) {
val Regex = "[^ ]+/src/(main|test)/scala/([^ ]*)/([^/ ]*)\\\\.scala:([0-9]*): (.*)".r
line.msg match {
case Regex(_, pack, cname, lnum, msg) => {
val pkg = pack.replaceAll("/", ".")
val m = "<<< at " + pkg + "." + cname + ".some_method(" + cname + ".scala:" + lnum + ") >>> "
List(line.withMsg(m), line.withMsg(msg))
}
case _ => Nil
}
} else {
Nil
}
}
def initialChanges(line: Line) = {
line.msg.contains("Initial source changes")
}
def fallback(line: Line) : Option[Line] = {
if (line.lvl != Line.debug) {
Some(line.transform)
} else {
None
}
}
}
case class IdeaParse(p: Path) {
var justPrintedInitialChanges = false
def log(s: Line) = {
val tstamp = new SimpleDateFormat("HH:mm:ss").format(new Date())
val res = new StringBuilder
res.append(tstamp).append(" ")
res.append("(").append(("%-" + IdeaStrip.MaxLen + "s").format(p.name)).append(") ")
res.append(s.lvl).append(" ")
// res.append("[").append(s.msg).append("]")
res.append(s.msg)
PRINT | res.toString()
}
def stripANSI(s: String) = {
s.replaceAll("\\033\\\\[[0-9]+[m]", "")
}
def apply(s: String) {
Line.parse(stripANSI(s)).foreach { line =>
val res = LineParsers.filepath(line) match {
case Nil => {
if (LineParsers.initialChanges(line) && !justPrintedInitialChanges) {
justPrintedInitialChanges = true
PRINT | (" " * 50)
PRINT | ("#" * 50)
PRINT | (" " * 50)
} else {
justPrintedInitialChanges = false
LineParsers.fallback(line).foreach(log)
}
}
case xs => {
justPrintedInitialChanges = false
xs.foreach(log)
}
}
}
}
}
case class Project(name: String) {
def streamsPath = Path(name, "./" + name + "/target/streams/")
def globalPath = Path("global", "$global/$global/$global/streams/out")
def testPath = Path("test", "test/compile/$global/streams/out")
def compilePath = Path("compile", "compile/compile/$global/streams/out")
def subpaths = List(globalPath, testPath, compilePath)
def allPaths = subpaths.map(streamsPath / _)
def threads = allPaths.map {p =>
val parser = IdeaParse(p)
TailThread(p)(parser.apply)
}
def start = threads.foreach(_.start())
}
case class Path(name: String, path: String) {
def /(p: Path) = Path(name + "." + p.name, path + (if (path endsWith "/") "" else "/") + p.path)
}
/**
* This class will actually follow the file
* @param p
* @param f
*/
case class TailThread(val p: Path)(f: String => Unit) {
val thread = new Thread(new Runnable {def run() {main()}})
def start() {
DEBUG | "Starting watcher " + p.name
thread.start()
}
var buf = new StringBuilder
def main() {
try {
val fis = Tail.follow(new File(p.path))
def loop {
val c = fis.read()
if (c == '\\n'.toInt) {
f(buf.toString)
buf = new StringBuilder
loop
} else if (c != -1) {
buf.append(c.toChar)
loop
} else {
//-1 is DEAD LAND
PRINT | p + " REACHED EOF ??? " + buf.toString
}
}
loop
} finally {
PRINT | "EXITING Watcher of " + p
}
}
}
IdeaStrip.start()
/**
* https://github.com/alaz/tailf
*
* Copyright (C) 2009 alaz <[email protected]>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
object Tail {
/**
* Create InputStream reading from a log file
*
* Calls follow with reasonable defaults:
* 3 open retries
* 1 second waiting for new file after the previous has disappeared
* 0.1 second waiting between reads
*/
def follow(file: File, openTries: Int = -1, openSleepMs: Int = 500, readSleepMs: Int = 100): InputStream = {
def sleep(msec: Long) = () => Thread.sleep(msec)
follow(file, openTries, sleep(openSleepMs), sleep(readSleepMs))
}
/**
* Create InputStream reading from a log file
*
* Creates an Input Stream reading from a growing file, that may be rotated as well
* @param file File handle to the log file
* @param openTries how many times to try to re-open the file
* @param openSleep a function to sleep between re-open retries
* @param rereadSleep a function to be called when the stream walked to the end of
the file and need to wait for some more input
* @return InputStream object
*/
def follow(file: File, openTries: Int, openSleep: () => Unit, rereadSleep: () => Unit): InputStream = {
val e = new java.util.Enumeration[InputStream]() {
def nextElement = new FollowingInputStream(file, rereadSleep)
def hasMoreElements = testExists(file, openTries, openSleep)
}
new SequenceInputStream(e)
}
/**
* Test file existence N times, wait between retries
*
* @param file file handle
* @param tries how many times to try
* @param sleep function to call between tests
* @return true on success
*/
def testExists(file: File, tries: Int, sleep: () => Unit): Boolean = {
def tryExists(n: Int): Boolean =
if (file.exists) true
else if (tries > 0 && n > tries) false
else {
sleep()
tryExists(n + 1)
}
tryExists(1)
}
}
/**
* InputStream that handles growing file case
*
* The InputStream will not raise EOF when it comes to the file end. Contrary,
* it will wait and continue reading.
*
* It will not handle the case when the file has been rotated. In this case,
* it behaves just if it found EOF.
*/
class FollowingInputStream(val file: File, val waitNewInput: () => Unit) extends InputStream {
import java.io.FileInputStream
private val underlying = new FileInputStream(file)
def read: Int = handle(underlying.read)
override def read(b: Array[Byte]): Int = read(b, 0, b.length)
override def read(b: Array[Byte], off: Int, len: Int): Int = handle(underlying.read(b, off, len))
override def close = underlying.close
protected def rotated_? = {
underlying.getChannel.position > file.length
}
protected def closed_? = !underlying.getChannel.isOpen
@tailrec
private def handle(read: => Int): Int = read match {
case -1 if rotated_? || closed_? => -1
case -1 =>
waitNewInput()
handle(read)
case i => i
}
require(file != null)
assume(file.exists)
} | benjaminjackman/looty | bin/IdeaStrip.scala | Scala | gpl-2.0 | 8,805 |
/* _____ _
* | ___| __ __ _ _ __ ___ (_) __ _ _ __
* | |_ | '__/ _` | '_ ` _ \\| |/ _` | '_ \\
* | _|| | | (_| | | | | | | | (_| | | | |
* |_| |_| \\__,_|_| |_| |_|_|\\__,_|_| |_|
*
* Copyright 2014 Pellucid Analytics
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package framian
package json
import scala.reflect.ClassTag
import spire.algebra.Order
trait JsonModule {
type JsonValue
def JsonValue: JsonValueCompanion
type JsonError
def JsonError: JsonErrorCompanion
type JsonPath
def JsonPath: JsonPathCompanion
implicit def JsonPathOrder: Order[JsonPath]
implicit def JsonPathClassTag: ClassTag[JsonPath]
def visitJson[A](visitor: JsonVisitor[A])(json: JsonValue): A
def parseJson(jsonStr: String): Either[JsonError, JsonValue]
def parseJsonSeq(jsonStr: String): Either[JsonError, Seq[JsonValue]] =
parseJson(jsonStr).right flatMap visitJson(SeqExtractor)
trait JsonValueCompanion {
def jsonObject(values: Seq[(String, JsonValue)]): JsonValue
def jsonArray(values: Seq[JsonValue]): JsonValue
def jsonString(value: String): JsonValue
def jsonNumber(value: BigDecimal): JsonValue
def jsonBoolean(value: Boolean): JsonValue
def jsonNull: JsonValue
}
trait JsonVisitor[A] {
def jsonObject(values: Iterable[(String, JsonValue)]): A
def jsonArray(values: Seq[JsonValue]): A
def jsonString(value: String): A
def jsonNumber(value: BigDecimal): A
def jsonBoolean(value: Boolean): A
def jsonNull(): A
}
trait JsonPathCompanion {
def root: JsonPath
def cons(fieldName: String, path: JsonPath): JsonPath
def cons(index: Int, path: JsonPath): JsonPath
def uncons[A](path: JsonPath)(z: => A, f: (String, JsonPath) => A, g: (Int, JsonPath) => A): A
def isEmpty(path: JsonPath): Boolean = uncons(path)(true, (_, _) => false, (_, _) => false)
}
implicit final class JsonPathOps(path: JsonPath) {
def :: (fieldName: String): JsonPath = JsonPath.cons(fieldName, path)
def :: (index: Int): JsonPath = JsonPath.cons(index, path)
def uncons[A](z: => A, f: (String, JsonPath) => A, g: (Int, JsonPath) => A): A = JsonPath.uncons(path)(z, f, g)
def isEmpty: Boolean = JsonPath.isEmpty(path)
}
trait JsonErrorCompanion {
def invalidJson(message: String): JsonError
def ioError(e: Exception): JsonError
}
/**
* Inflates a JSON object from a set of paths and values. These paths/values
* must be consistent, otherwise `None` will be returned.
*/
def inflate(kvs: List[(JsonPath, JsonValue)]): Option[JsonValue] = {
def sequence[A](xs: List[Option[A]], acc: Option[List[A]] = None): Option[List[A]] =
xs.foldLeft(Some(Nil): Option[List[A]]) { (acc, x) =>
acc flatMap { ys => x map (_ :: ys) }
}
def makeArray(fields: List[(Int, JsonValue)]): JsonValue = {
val size = fields.map(_._1).max
val elems = fields.foldLeft(Vector.fill(size)(JsonValue.jsonNull)) { case (xs, (i, x)) =>
xs.updated(i, x)
}
JsonValue.jsonArray(elems)
}
def obj(kvs: List[(JsonPath, JsonValue)]): Option[JsonValue] = {
val fields: Option[List[(String, (JsonPath, JsonValue))]] = sequence(kvs map { case (path, value) =>
path.uncons(None, (key, path0) => Some((key, (path0, value))), (_, _) => None)
})
fields flatMap { fields0 =>
sequence(fields0.groupBy(_._1).toList map { case (key, kvs0) =>
inflate(kvs0 map (_._2)) map (key -> _)
}) map JsonValue.jsonObject
}
}
def arr(kvs: List[(JsonPath, JsonValue)]): Option[JsonValue] = {
val fields: Option[List[(Int, (JsonPath, JsonValue))]] = sequence(kvs map { case (path, value) =>
path.uncons(None, (_, _) => None, (idx, path0) => Some((idx, (path0, value))))
})
fields flatMap { fields0 =>
sequence(fields0.groupBy(_._1).toList map { case (idx, kvs0) =>
inflate(kvs0 map (_._2)) map (idx -> _)
}) map makeArray
}
}
kvs match {
case Nil => None
case (path, value) :: Nil if path.isEmpty => Some(value)
case _ => obj(kvs) orElse arr(kvs)
}
}
private object SeqExtractor extends JsonVisitor[Either[JsonError, Seq[JsonValue]]] {
def error(tpe: String): Either[JsonError, Seq[JsonValue]] =
Left(JsonError.invalidJson(s"Expected JSON array, but found ${tpe}."))
def jsonObject(values: Iterable[(String, JsonValue)]): Either[JsonError, Seq[JsonValue]] = error("object")
def jsonArray(values: Seq[JsonValue]): Either[JsonError, Seq[JsonValue]] = Right(values)
def jsonString(value: String): Either[JsonError, Seq[JsonValue]] = error("string")
def jsonNumber(value: BigDecimal): Either[JsonError, Seq[JsonValue]] = error("number")
def jsonBoolean(value: Boolean): Either[JsonError, Seq[JsonValue]] = error("boolean")
def jsonNull(): Either[JsonError, Seq[JsonValue]] = error("null")
}
}
| longcao/framian | framian-json-base/src/main/scala/framian/json/JsonModule.scala | Scala | apache-2.0 | 5,456 |
package services
import java.util.UUID
import bson.handlers._
import com.kylegalloway.evescala.{EveScala, EveScalaBuilder}
import com.kylegalloway.evescala.esi.model.CharacterID
import com.kylegalloway.evescala.xml.model._
import com.mohiva.play.silhouette.api.Logger
import com.mohiva.play.silhouette.api.exceptions.NotAuthenticatedException
import com.mohiva.play.silhouette.api.repositories.AuthInfoRepository
import com.mohiva.play.silhouette.impl.providers.OAuth2Info
import concurrent.duration._
import org.joda.time.DateTime
import play.api.libs.concurrent.Execution.Implicits._
import play.modules.reactivemongo.ReactiveMongoApi
import reactivemongo.api.Cursor
import reactivemongo.api.collections.bson.BSONCollection
import reactivemongo.bson.{BSONDateTime, BSONDocument, Macros}
import silhouette.evesso.{EveCharacter, EveSSOProvider}
import silhouette.evesso.service.EveCharacterIdentityService
import scala.concurrent.Future
class WalletJournalService(
reactiveMongoApi: ReactiveMongoApi,
walletJournalCollectionName: String,
updatedAtCollectionName: String,
eveCharacterIdentityService: EveCharacterIdentityService,
authInfoRepository: AuthInfoRepository,
eveSSOProvider: EveSSOProvider,
eveScalaBuilder: EveScalaBuilder
) extends Logger {
private implicit val BountyPrizeWalletJournalEntryHandler = Macros.handler[BountyPrizeWalletJournalEntry]
private implicit val RawWalletJournalEntryHandler = Macros.handler[RawWalletJournalEntry]
private implicit val IncursionPayoutWalletJournalEntryHandler = Macros.handler[IncursionPayoutWalletJournalEntry]
private implicit val SalesTaxWalletJournalEntryHandler = Macros.handler[SalesTaxWalletJournalEntry]
private implicit val BrokerFeeWalletJournalEntryHandler = Macros.handler[BrokerFeeWalletJournalEntry]
private implicit val WalletJournalHandler = Macros.handlerOpts[WalletJournalEntry, Macros.Options.Default]
private val UpdateInterval = 15.minutes.toMillis
private def walletJournalCollectionF: Future[BSONCollection] =
reactiveMongoApi.database.map(_.collection(walletJournalCollectionName))
private def updatedAtCollectionF: Future[BSONCollection] =
reactiveMongoApi.database.map(_.collection(updatedAtCollectionName))
private def selectorForCharacter(characterId: CharacterID) = {
BSONDocument("characterId" -> characterId)
}
private def selectorForUser(globalUserId: UUID) = {
BSONDocument("globalUserId" -> globalUserId)
}
private def eveScalaForCharacter(character: EveCharacter, authInfoOpt: Option[OAuth2Info]): Future[EveScala] = {
authInfoOpt.map { authInfo =>
eveSSOProvider.refreshAccessToken(authInfo).map { newAuthInfo =>
eveScalaBuilder.buildForCharacter(characterId = character.characterId, token = newAuthInfo.accessToken)
}
}.getOrElse(
Future.failed(new NotAuthenticatedException(s"Cannot find auth info for character ${character.characterId}"))
)
}
def updateForCharacterAndSetCachedUntil(eveCharacter: EveCharacter): Future[Unit] = {
val currentTimeMillis = DateTime.now().getMillis
for {
authInfoOpt <- authInfoRepository.find[OAuth2Info](eveCharacter.loginInfo)
eveScala <- eveScalaForCharacter(eveCharacter, authInfoOpt)
walletJournal <- eveScala.xml.walletJournal
walletJournalCollection <- walletJournalCollectionF
_ <- walletJournal.foldLeft(Future.successful({})) { (batchResultF, walletJournalEntry) =>
batchResultF.flatMap { _ =>
val selector = selectorForCharacter(eveCharacter.characterId) ++
BSONDocument("refID" -> walletJournalEntry.refID)
val value = selector ++ WalletJournalHandler.write(walletJournalEntry)
walletJournalCollection.update(selector, value, upsert = true).map(_ => {})
}
}
updatedAtCollection <- updatedAtCollectionF
selector = selectorForCharacter(eveCharacter.characterId)
value = selector ++ BSONDocument(
"cachedUntil" -> BSONDateTime(currentTimeMillis + UpdateInterval),
"updatedAt" -> BSONDateTime(currentTimeMillis)
)
_ <- updatedAtCollection.update(selector, value, upsert = true)
} yield {
}
}
def updateForCharacter(eveCharacter: EveCharacter): Future[Unit] = {
logger.info(s"Updating wallet journal for $eveCharacter")
val currentTimeMillis = DateTime.now().getMillis
for {
updatedAtCollection <- updatedAtCollectionF
cachedUntilOpt <- updatedAtCollection.find(selectorForCharacter(eveCharacter.characterId)).one[BSONDocument]
_ <- if (cachedUntilOpt.isEmpty ||
cachedUntilOpt.flatMap(_.getAs[BSONDateTime]("cachedUntil")).get.value < currentTimeMillis) {
logger.debug(s"Cache expired, wallet journal for $eveCharacter")
updateForCharacterAndSetCachedUntil(eveCharacter)
} else {
logger.debug(s"Wallet journal cache still valid for $eveCharacter")
Future.successful[Unit]({})
}
} yield {
}
}
def updateForAll(): Future[Unit] = {
logger.info(s"Updating net worth for all users")
for {
characters <- eveCharacterIdentityService.retrieveAll
_ <- characters.grouped(5).foldLeft(Future.successful[Unit]({})) { (batchResultF, charactersBatch) =>
batchResultF.flatMap { _ =>
Future.sequence {
characters.map(updateForCharacter)
}.map(_ => {})
}
}
} yield {
}
}
private def beforeAfterSelector(before: Option[DateTime], after: Option[DateTime]): BSONDocument = {
val beforeSelector =
before.map(dateTime => "date" -> BSONDocument("$lt" -> DateTimeHandler.write(dateTime)))
val afterSelector =
after.map(dateTime => "date" -> BSONDocument("$gt" -> DateTimeHandler.write(dateTime)))
val selectors = Seq(beforeSelector, afterSelector).flatten
BSONDocument(selectors)
}
def walletJournal(
globalUserId: UUID,
before: Option[DateTime],
after: Option[DateTime],
limitOpt: Option[Int]
): Future[Seq[WalletJournalEntry]] = {
for {
characters <- eveCharacterIdentityService.retrieve(globalUserId)
_ <- Future.sequence(characters.map(updateForCharacter))
walletJournalCollection <- walletJournalCollectionF
walletJournal <- Future.sequence(characters.map { eveCharacter =>
val selector = selectorForCharacter(eveCharacter.characterId) ++ beforeAfterSelector(before, after)
val cursor = walletJournalCollection.find(selector).sort(BSONDocument("date" -> -1)).cursor[WalletJournalEntry]()
val maxDocs = limitOpt.getOrElse(-1)
cursor.collect[List](maxDocs = maxDocs, err = Cursor.ContOnError[List[WalletJournalEntry]]())
}).map(_.flatten.toSeq.sortBy(-_.date.getMillis))
} yield {
limitOpt.map(limit => walletJournal.take(limit)).getOrElse(walletJournal)
}
}
}
| pequalsnp/eve-isk-tracker | app/services/WalletJournalService.scala | Scala | mit | 6,854 |
/*
* Copyright 2016 Dennis Vriend
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.dnvriend.streams.flow
import akka.stream.{ OverflowStrategy, SourceShape }
import akka.stream.scaladsl.{ Concat, Framing, GraphDSL, Merge, RunnableGraph, Sink, Source }
import akka.util.ByteString
import com.github.dnvriend.streams.{ Person, TestSpec }
import com.github.dnvriend.streams.flow.SimpleFlowTest.StarWars
import play.api.libs.json.Json
import scala.collection.immutable.{ Iterable, Seq }
import scala.concurrent.Future
object SimpleFlowTest {
final case class StarWars(first: String, last: String)
}
class SimpleFlowTest extends TestSpec {
it should "mapAsync with odd number of parallelism" in {
Source(1 to 3).mapAsync(5)(i ⇒ Future(i * 2))
.runWith(Sink.seq).futureValue shouldBe Seq(2, 4, 6)
}
it should "zip with an index" in {
Source(Seq("a", "b")).statefulMapConcat { () ⇒
var index = 0L
def next: Long = {
index += 1
index
}
(string) ⇒ Iterable((string, next))
}.take(10).runWith(Sink.seq).futureValue shouldBe Seq(("a", 1), ("b", 2))
Source(List("a", "b", "c"))
.zip(Source.fromIterator(() ⇒ Iterator from 1))
.runWith(Sink.seq).futureValue shouldBe Seq(("a", 1), ("b", 2), ("c", 3))
}
it should "emit only odd numbers" in {
Source.fromIterator(() ⇒ Iterator from 0).statefulMapConcat { () ⇒
var index = 1L
def next: Long = {
index += 1L
if (index % 2 != 0) index else {
next
}
}
(string) ⇒ Iterable((string, next))
}.take(10).runForeach(println)
}
it should "create tuples" in {
Source(List(List("a", "b"), List("c", "d")))
.flatMapConcat { xs ⇒
Source(xs).take(1).zip(Source(xs).drop(1))
}.runWith(Sink.seq).futureValue shouldBe Seq(("a", "b"), ("c", "d"))
}
it should "parse some csv from the classpath" in withByteStringSource("csv/starwars.csv") { src ⇒
src.via(Framing.delimiter(ByteString("\\n"), Integer.MAX_VALUE))
.map(_.utf8String)
.drop(1)
.map(_.split(",").toList)
.flatMapConcat { xs ⇒
Source(xs).take(1).zip(Source(xs).drop(1))
}.map(StarWars.tupled)
.runWith(Sink.seq).futureValue shouldBe Seq(
StarWars("darth", "vader"),
StarWars("leia", "organa"),
StarWars("luke", "skywalker"),
StarWars("han", "solo"),
StarWars("boba", "fett"),
StarWars("obi-wan", "kenobi"),
StarWars("darth", "maul"),
StarWars("darth", "sidious"),
StarWars("padme", "amidala"),
StarWars("lando", "calrissian"),
StarWars("mace", "windu")
)
}
it should "concat" in {
Source(List(1, 2)).concat(Source(List(3, 4)))
.runWith(Sink.seq).futureValue shouldBe Seq(1, 2, 3, 4)
}
it should "merge" in {
Source.fromGraph(GraphDSL.create() { implicit b ⇒
import GraphDSL.Implicits._
val merge = b.add(Concat[Int](2))
Source.single(1) ~> merge
Source.repeat(5) ~> merge
SourceShape(merge.out)
}).take(4).runWith(Sink.seq).futureValue shouldBe Seq(1, 5, 5, 5)
Source.single(1).concat(Source.repeat(5))
.take(4).runWith(Sink.seq).futureValue shouldBe Seq(1, 5, 5, 5)
}
it should "unfold" in {
import scala.concurrent.duration._
Source.tick(0.seconds, 500.millis, 0).flatMapConcat { _ ⇒
Source.unfold(0) { (e) ⇒
val next = e + 1
if (next > 3) None else Some((next, next))
}
}.take(6).runWith(Sink.seq).futureValue shouldBe Seq(1, 2, 3, 1, 2, 3)
}
}
| dnvriend/intro-to-akka-streams | src/test/scala/com/github/dnvriend/streams/flow/SimpleFlowTest.scala | Scala | apache-2.0 | 4,138 |
package vep.app.user
import java.time.LocalDateTime
import org.mindrot.jbcrypt.BCrypt
import scalikejdbc.WrappedResultSet
import spray.json.{JsonFormat, JsonParser, RootJsonFormat}
import vep.framework.utils.JsonProtocol
case class User(
id: String,
email: String,
password: String,
role: UserRole.Value,
authentications: Seq[Authentication],
activationKey: Option[String],
resetPasswordKey: Option[String]
)
object User {
import JsonProtocol._
def apply(rs: WrappedResultSet): User = new User(
id = rs.string("id"),
email = rs.string("email"),
password = rs.string("password"),
role = UserRole.deserialize(rs.string("role")),
authentications = Authentication.authenticationSeqFormat.read(JsonParser(rs.string("authentications"))),
activationKey = rs.stringOpt("activationKey"),
resetPasswordKey = rs.stringOpt("resetPasswordKey")
)
implicit val userFormat: RootJsonFormat[User] = jsonFormat7(User.apply)
}
object UserRole extends Enumeration {
val user, admin = Value
import JsonProtocol._
implicit val userRoleFormat: RootJsonFormat[UserRole.Value] = enumFormat(this, withName)
def fromString(value: String): Option[UserRole.Value] = {
values.find(_.toString == value)
}
private[user] def deserialize(value: String): UserRole.Value = {
fromString(value).get
}
}
case class Authentication(
token: String,
date: LocalDateTime
) {
def crypt(): Authentication = copy(
token = BCrypt.hashpw(token, BCrypt.gensalt())
)
}
object Authentication {
import JsonProtocol._
implicit val authenticationFormat: JsonFormat[Authentication] = jsonFormat2(Authentication.apply)
implicit val authenticationSeqFormat: JsonFormat[Seq[Authentication]] = seqJsonFormat(authenticationFormat)
}
case class Profile(
email: String,
firstName: String,
lastName: String,
address: String,
zipCode: String,
city: String,
phones: Seq[Phone]
)
object Profile {
import JsonProtocol._
def apply(rs: WrappedResultSet): Profile = new Profile(
email = rs.stringOpt("email").getOrElse(""),
firstName = rs.stringOpt("first_name").getOrElse(""),
lastName = rs.stringOpt("last_name").getOrElse(""),
address = rs.stringOpt("address").getOrElse(""),
zipCode = rs.stringOpt("zip_code").getOrElse(""),
city = rs.stringOpt("city").getOrElse(""),
phones = rs.stringOpt("phones").map(phones => Phone.phoneSeqFormat.read(JsonParser(phones))).getOrElse(Seq.empty)
)
implicit val profileFormat: RootJsonFormat[Profile] = jsonFormat7(Profile.apply)
}
case class Phone(
name: String,
number: String
)
object Phone {
import JsonProtocol._
implicit val phoneFormat: JsonFormat[Phone] = jsonFormat2(Phone.apply)
implicit val phoneSeqFormat: JsonFormat[Seq[Phone]] = seqJsonFormat(phoneFormat)
}
case class UserView(
id: String,
email: String,
firstName: String,
lastName: String
)
object UserView {
import JsonProtocol._
def apply(rs: WrappedResultSet): UserView = new UserView(
id = rs.stringOpt("id").getOrElse(""),
email = rs.stringOpt("email").getOrElse(""),
firstName = rs.stringOpt("first_name").getOrElse(""),
lastName = rs.stringOpt("last_name").getOrElse("")
)
implicit val userViewFormat: JsonFormat[UserView] = jsonFormat4(UserView.apply)
} | kneelnrise/vep | src/main/scala/vep/app/user/User.scala | Scala | mit | 3,302 |
/*
* -------------------------------------------------------------------------------------------------
* - Project: Objectify -
* - Copyright: ©2014 Matygo Educational Incorporated operating as Learndot -
* - Author: Arthur Gonigberg ([email protected]) and contributors (see contributors.txt) -
* - License: Licensed under MIT license (see license.txt) -
* -------------------------------------------------------------------------------------------------
*/
package org.objectify.resolvers
import javax.servlet.ServletContext
import org.objectify.adapters.ObjectifyRequestAdapter
/**
* Resolver for the servlet context
*/
class ServletContextResolver extends Resolver[ServletContext, ObjectifyRequestAdapter] {
def apply(req: ObjectifyRequestAdapter): ServletContext = {
req.getRequest.getServletContext
}
}
| learndot/Objectify.scala | src/main/scala/org/objectify/resolvers/ServletContextResolver.scala | Scala | mit | 982 |
/*
* Copyright 2020 ABSA Group Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package za.co.absa.spline.persistence.migration
import za.co.absa.commons.version.impl.SemVer20Impl.SemanticVersion
import scala.util.matching.Regex
case class MigrationScript(verFrom: SemanticVersion, verTo: SemanticVersion, script: String) {
override def toString: String = MigrationScript.asString(this)
}
object MigrationScript {
private val SemVerRegexp: Regex = ("" +
"(?:0|[1-9]\\\\d*)\\\\." +
"(?:0|[1-9]\\\\d*)\\\\." +
"(?:0|[1-9]\\\\d*)" +
"(?:-(?:0|[1-9]\\\\d*|\\\\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\\\\.(?:0|[1-9]\\\\d*|\\\\d*[a-zA-Z-][0-9a-zA-Z-]*))*)?" +
"(?:\\\\+[0-9a-zA-Z-]+(?:\\\\.[0-9a-zA-Z-]+)*)?").r
private def asString(script: MigrationScript): String =
FileNamePattern
.replaceFirst("\\\\*", script.verFrom.asString)
.replaceFirst("\\\\*", script.verTo.asString)
val FileNamePattern = "*-*.js"
val NameRegexp: Regex = s"($SemVerRegexp)-($SemVerRegexp).js".r
}
| AbsaOSS/spline | persistence/src/main/scala/za/co/absa/spline/persistence/migration/MigrationScript.scala | Scala | apache-2.0 | 1,506 |
package gitbucket.core.model
trait MilestoneComponent extends TemplateComponent { self: Profile =>
import profile.api._
import self._
lazy val Milestones = TableQuery[Milestones]
class Milestones(tag: Tag) extends Table[Milestone](tag, "MILESTONE") with MilestoneTemplate {
override val milestoneId = column[Int]("MILESTONE_ID", O AutoInc)
val title = column[String]("TITLE")
val description = column[Option[String]]("DESCRIPTION")
val dueDate = column[Option[java.util.Date]]("DUE_DATE")
val closedDate = column[Option[java.util.Date]]("CLOSED_DATE")
def * = (userName, repositoryName, milestoneId, title, description, dueDate, closedDate) <> (Milestone.tupled, Milestone.unapply)
def byPrimaryKey(owner: String, repository: String, milestoneId: Int) = byMilestone(owner, repository, milestoneId)
def byPrimaryKey(userName: Rep[String], repositoryName: Rep[String], milestoneId: Rep[Int]) = byMilestone(userName, repositoryName, milestoneId)
}
}
case class Milestone(
userName: String,
repositoryName: String,
milestoneId: Int = 0,
title: String,
description: Option[String],
dueDate: Option[java.util.Date],
closedDate: Option[java.util.Date]
)
| shiena/gitbucket | src/main/scala/gitbucket/core/model/Milestone.scala | Scala | apache-2.0 | 1,208 |
package gsd.linux
import cnf.{DimacsReader, SATBuilder, CNFParser}
import org.junit.Test
class LinuxSATTest {
def isDimacsSAT(file: String): Boolean = {
println("Reading %s...".format(file))
val (header, problem) =
(DimacsReader.readHeaderFile(file), DimacsReader.readFile(file))
val sat = new SATBuilder(problem.cnf, problem.numVars, header.generated)
sat.isSatisfiable
}
// FIXME add some test cases
}
| scas-mdd/linux-variability-analysis-tools.fm-translation | src/test/scala/gsd/linux/LinuxSATTest.scala | Scala | gpl-3.0 | 439 |
package library.network.rpcprotocol
import java.lang.reflect.{ParameterizedType, Type}
import java.util
class ListOfJson[T](var wrapped: Class[_]) extends ParameterizedType
{
def getActualTypeArguments: Array[Type] =
{
return Array[Type](wrapped)
}
def getRawType: Type =
{
return classOf[util.List[_]]
}
def getOwnerType: Type =
{
return null
}
}
| leyyin/university | systems-for-design-and-implementation/labs/lab3/LibraryNetworking/src/library/network/rpcprotocol/ListOfJson.scala | Scala | mit | 412 |
/*
* Copyright (C) 2017 HAT Data Exchange Ltd
* SPDX-License-Identifier: AGPL-3.0
*
* This file is part of the Hub of All Things project (HAT).
*
* HAT is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License
* as published by the Free Software Foundation, version 3 of
* the License.
*
* HAT is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See
* the GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General
* Public License along with this program. If not, see
* <http://www.gnu.org/licenses/>.
*
* Written by Tyler Weir <[email protected]>
* 1 / 2021
*/
package org.hatdex.hat.NamespaceUtils
import org.hatdex.hat.api.models.{ NamespaceRead, NamespaceWrite, UserRole }
object NamespaceUtils {
def testWriteNamespacePermissions(
roles: Seq[UserRole],
namespace: String): Boolean = {
val matchedRoles = roles.map {
case NamespaceWrite(n) if n == namespace => Some(namespace)
case _ => None
}
matchedRoles.flatten.nonEmpty
}
def testReadNamespacePermissions(
roles: Seq[UserRole],
namespace: String): Boolean = {
val matchedRoles = roles.map {
case NamespaceRead(n) if n == namespace => Some(namespace)
case _ => None
}
matchedRoles.flatten.nonEmpty
}
}
| Hub-of-all-Things/HAT2.0 | hat/app/org/hatdex/hat/utils/NamespaceUtils.scala | Scala | agpl-3.0 | 1,573 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.leveldb.replicated
import org.fusesource.hawtbuf.Buffer._
import java.util.concurrent._
import java.nio.MappedByteBuffer
import sun.nio.ch.DirectBuffer
import java.io.{RandomAccessFile, File}
import java.nio.channels.FileChannel
import java.util.concurrent.atomic.AtomicInteger
import org.fusesource.hawtdispatch._
import org.apache.activemq.leveldb.util.FileSupport._
import org.apache.activemq.leveldb.LevelDBClient
import scala.collection.immutable.TreeMap
object ReplicationSupport {
val WAL_ACTION = ascii("wal")
val LOGIN_ACTION= ascii("LevelDB Store Replication v1:login")
val SYNC_ACTION = ascii("sync")
val GET_ACTION = ascii("get")
val ACK_ACTION = ascii("ack")
val OK_ACTION = ascii("ok")
val DISCONNECT_ACTION = ascii("disconnect")
val ERROR_ACTION = ascii("error")
val LOG_DELETE_ACTION = ascii("rm")
def unmap(buffer:MappedByteBuffer ) {
try {
buffer.asInstanceOf[DirectBuffer].cleaner().clean();
} catch {
case ignore:Throwable =>
}
}
def map(file:File, offset:Long, length:Long, readOnly:Boolean) = {
val raf = new RandomAccessFile(file, if(readOnly) "r" else "rw");
try {
val mode = if (readOnly) FileChannel.MapMode.READ_ONLY else FileChannel.MapMode.READ_WRITE
raf.getChannel().map(mode, offset, length);
} finally {
raf.close();
}
}
def stash(directory:File) {
directory.mkdirs()
val tmp_stash = directory / "stash.tmp"
val stash = directory / "stash"
stash.recursiveDelete
tmp_stash.recursiveDelete
tmp_stash.mkdirs()
copy_store_dir(directory, tmp_stash)
tmp_stash.renameTo(stash)
}
def copy_store_dir(from:File, to:File) = {
val log_files = LevelDBClient.find_sequence_files(from, LevelDBClient.LOG_SUFFIX)
if( !log_files.isEmpty ) {
val append_file = log_files.last._2
for( file <- log_files.values ; if file != append_file) {
file.linkTo(to / file.getName)
val crc_file = file.getParentFile / (file.getName+".crc32" )
if( crc_file.exists() ) {
crc_file.linkTo(to / crc_file.getName)
}
}
append_file.copyTo(to / append_file.getName)
}
val index_dirs = LevelDBClient.find_sequence_files(from, LevelDBClient.INDEX_SUFFIX)
if( !index_dirs.isEmpty ) {
val index_file = index_dirs.last._2
var target = to / index_file.getName
target.mkdirs()
LevelDBClient.copyIndex(index_file, target)
}
}
def stash_clear(directory:File) {
val stash = directory / "stash"
stash.recursiveDelete
}
def unstash(directory:File) {
val tmp_stash = directory / "stash.tmp"
tmp_stash.recursiveDelete
val stash = directory / "stash"
if( stash.exists() ) {
delete_store(directory)
copy_store_dir(stash, directory)
stash.recursiveDelete
}
}
def delete_store(directory: File) {
// Delete any existing files to make space for the stash we will be restoring..
var t: TreeMap[Long, File] = LevelDBClient.find_sequence_files(directory, LevelDBClient.LOG_SUFFIX)
for (entry <- t) {
val file = entry._2
file.delete()
val crc_file = directory / (file.getName+".crc32" )
if( crc_file.exists() ) {
crc_file.delete()
}
}
for (file <- LevelDBClient.find_sequence_files(directory, LevelDBClient.INDEX_SUFFIX)) {
file._2.recursiveDelete
}
}
}
| chirino/activemq | activemq-leveldb-store/src/main/scala/org/apache/activemq/leveldb/replicated/ReplicationSupport.scala | Scala | apache-2.0 | 4,227 |
package ml.combust.mleap.runtime.frame
import ml.combust.mleap.core.types._
import ml.combust.mleap.runtime.MleapSupport._
import org.scalatest.FunSpec
class LeapFrameConverterSpec extends FunSpec {
describe("LeapFrameConverter") {
val expectedSchema = StructType(Seq(StructField("test_string", ScalarType.String),
StructField("test_double", ScalarType.Double.nonNullable))).get
val frameWith1Row = DefaultLeapFrame(expectedSchema,
Seq(Row("hello", 42.13)))
val frameWithMultipleRows = DefaultLeapFrame(expectedSchema,
Seq(Row("hello", 42.13), Row("mleap", 4.3), Row("world", 1.2)))
it("converts from a case class to a default leap frame with 1 row") {
assert(DummyData("hello", 42.13).toLeapFrame == frameWith1Row)
}
it("creates a Seq with one new instance of a case class from a default leap frame with 1 row") {
assert(frameWith1Row.to[DummyData] == Seq(DummyData("hello", 42.13)))
}
it("converts from a case class to a default leap frame with multiple rows") {
assert(Seq(DummyData("hello", 42.13), DummyData("mleap", 4.3),
DummyData("world", 1.2)).toLeapFrame == frameWithMultipleRows)
}
it("creates a Seq with multiple new instances of a case class from a default leap frame with multiple row") {
assert(frameWithMultipleRows.to[DummyData] ==
Seq(DummyData("hello", 42.13), DummyData("mleap", 4.3), DummyData("world", 1.2)))
}
}
}
case class DummyData(test_string: String, test_double: Double)
| combust/mleap | mleap-runtime/src/test/scala/ml/combust/mleap/runtime/frame/LeapFrameConverterSpec.scala | Scala | apache-2.0 | 1,515 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.nodes.physical.batch
import org.apache.flink.table.functions.UserDefinedFunction
import org.apache.flink.table.planner.calcite.FlinkRelBuilder.PlannerNamedWindowProperty
import org.apache.flink.table.planner.calcite.FlinkTypeFactory
import org.apache.flink.table.planner.plan.logical.LogicalWindow
import org.apache.flink.table.planner.plan.nodes.exec.batch.BatchExecSortWindowAggregate
import org.apache.flink.table.planner.plan.nodes.exec.{InputProperty, ExecNode}
import org.apache.calcite.plan.{RelOptCluster, RelTraitSet}
import org.apache.calcite.rel.RelNode
import org.apache.calcite.rel.`type`.RelDataType
import org.apache.calcite.rel.core.AggregateCall
import java.util
/** Batch physical RelNode for local sort-based window aggregate. */
class BatchPhysicalLocalSortWindowAggregate(
cluster: RelOptCluster,
traitSet: RelTraitSet,
inputRel: RelNode,
outputRowType: RelDataType,
inputRowType: RelDataType,
grouping: Array[Int],
auxGrouping: Array[Int],
aggCallToAggFunction: Seq[(AggregateCall, UserDefinedFunction)],
window: LogicalWindow,
val inputTimeFieldIndex: Int,
inputTimeIsDate: Boolean,
namedWindowProperties: Seq[PlannerNamedWindowProperty],
enableAssignPane: Boolean = false)
extends BatchPhysicalSortWindowAggregateBase(
cluster,
traitSet,
inputRel,
outputRowType,
grouping,
auxGrouping,
aggCallToAggFunction,
window,
namedWindowProperties,
enableAssignPane,
isMerge = false,
isFinal = false) {
override def copy(traitSet: RelTraitSet, inputs: util.List[RelNode]): RelNode = {
new BatchPhysicalLocalSortWindowAggregate(
cluster,
traitSet,
inputs.get(0),
outputRowType,
inputRowType,
grouping,
auxGrouping,
aggCallToAggFunction,
window,
inputTimeFieldIndex,
inputTimeIsDate,
namedWindowProperties,
enableAssignPane)
}
override def translateToExecNode(): ExecNode[_] = {
new BatchExecSortWindowAggregate(
grouping,
auxGrouping,
getAggCallList.toArray,
window,
inputTimeFieldIndex,
inputTimeIsDate,
namedWindowProperties.toArray,
FlinkTypeFactory.toLogicalRowType(inputRowType),
enableAssignPane,
false, // isMerge is always false
false, // isFinal is always false
InputProperty.DEFAULT,
FlinkTypeFactory.toLogicalRowType(getRowType),
getRelDetailedDescription
)
}
}
| kl0u/flink | flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/plan/nodes/physical/batch/BatchPhysicalLocalSortWindowAggregate.scala | Scala | apache-2.0 | 3,327 |
package p
class X[T]
trait A {
def m(s:X[_]) {}
}
trait B extends A {
def f { super.m(null) }
}
| loskutov/intellij-scala | testdata/scalacTests/pos/t1896/D0.scala | Scala | apache-2.0 | 103 |
package org.jetbrains.plugins.scala
package annotator.quickfix
import com.intellij.codeInsight.intention.IntentionAction
import com.intellij.openapi.editor.Editor
import com.intellij.openapi.project.Project
import com.intellij.psi.PsiFile
import org.jetbrains.plugins.scala.codeInsight.intention.types.AddOnlyStrategy
import org.jetbrains.plugins.scala.lang.psi.api.statements.ScFunctionDefinition
import org.jetbrains.plugins.scala.lang.psi.types.ScType
/**
* Nikolay.Tropin
* 2014-09-23
*/
class AddReturnTypeFix(fun: ScFunctionDefinition, tp: ScType) extends IntentionAction {
override def getText: String = "Add return type"
override def getFamilyName: String = getText
override def invoke(project: Project, editor: Editor, file: PsiFile): Unit = {
new AddOnlyStrategy(Option(editor)).addTypeAnnotation(tp, fun.getParent, fun.parameterList)
}
override def startInWriteAction(): Boolean = true
override def isAvailable(project: Project, editor: Editor, file: PsiFile): Boolean = fun.returnTypeElement.isEmpty
}
| ilinum/intellij-scala | src/org/jetbrains/plugins/scala/annotator/quickfix/AddReturnTypeFix.scala | Scala | apache-2.0 | 1,039 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// scalastyle:off println
package org.apache.spark.examples.mllib
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
// $example on$
import org.apache.spark.mllib.linalg.Matrix
import org.apache.spark.mllib.linalg.Vectors
import org.apache.spark.mllib.linalg.distributed.RowMatrix
// $example off$
object PCAOnRowMatrixExample {
def main(args: Array[String]): Unit = {
val conf = new SparkConf().setAppName("PCAOnRowMatrixExample")
val sc = new SparkContext(conf)
// $example on$
val data = Array(
Vectors.sparse(5, Seq((1, 1.0), (3, 7.0))),
Vectors.dense(2.0, 0.0, 3.0, 4.0, 5.0),
Vectors.dense(4.0, 0.0, 0.0, 6.0, 7.0))
val dataRDD = sc.parallelize(data, 2)
val mat: RowMatrix = new RowMatrix(dataRDD)
// Compute the top 4 principal components.
// Principal components are stored in a local dense matrix.
val pc: Matrix = mat.computePrincipalComponents(4)
// Project the rows to the linear space spanned by the top 4 principal components.
val projected: RowMatrix = mat.multiply(pc)
// $example off$
val collect = projected.rows.collect()
println("Projected Row Matrix of principal component:")
collect.foreach { vector => println(vector) }
sc.stop()
}
}
// scalastyle:on println
| jianran/spark | examples/src/main/scala/org/apache/spark/examples/mllib/PCAOnRowMatrixExample.scala | Scala | apache-2.0 | 2,100 |
package com.bot4s.telegram.models
/**
* This object represents a chat photo.
*
* @param smallFileId String File identifier of small (160x160) chat photo. This file_id can be used only for photo download.
* @param smallFileUniqueId String File unique identifier of small chat photo.
* @param bigFileId String File identifier of big (640x640) chat photo. This file_id can be used only for photo download.
* @param bigFileUniqueId String File unique identifier of big chat photo.
*/
case class ChatPhoto(
smallFileId: String,
smallFileUniqueId: String,
bigFileId: String,
bigFileUniqueId: String
)
| mukel/telegrambot4s | core/src/com/bot4s/telegram/models/ChatPhoto.scala | Scala | apache-2.0 | 631 |
package nl.woupiestek.midi
import javax.sound.midi._
object OtherSynthesizerWrapper {
implicit object MessageInstance extends MidiMessages[Synthesizer => Unit] {
override def noteOn(channel: Int, pitch: Int, velocity: Int): (Synthesizer) => Unit =
get(channel) andThen (_.noteOn(pitch, velocity))
private def get(index: Int): Synthesizer => MidiChannel = {
_.getChannels()(index)
}
override def noteOff(channel: Int, pitch: Int): (Synthesizer) => Unit =
get(channel) andThen (_.noteOff(pitch))
override def setProgram(channel: Int, program: Int): (Synthesizer) => Unit =
get(channel) andThen (_.programChange(program))
}
}
class OtherSynthesizerWrapper(synthesizer: Synthesizer) {
val million = 1000000l
def play(score: List[(Int, Synthesizer => Unit)]): Unit = {
def await(time: Long): Unit = Some((time - System.nanoTime()) / million).filter(_ > 0).foreach(Thread.sleep)
val ctm = System.nanoTime()
val s2 = score.sortBy[Int](_._1).map { case (t, e) => (t * million + ctm, e) }
synthesizer.open()
for ((time, event) <- s2) {
await(time)
event(synthesizer)
}
synthesizer.close()
}
}
| woupiestek/midistuff | src/main/scala/nl/woupiestek/midi/OtherSynthesizerWrapper.scala | Scala | mit | 1,189 |
package com.twitter.util
import com.twitter.conversions.time._
import java.util.concurrent.{Future => JFuture, _}
import org.junit.runner.RunWith
import org.scalatest.FunSuite
import org.scalatest.concurrent.Eventually
import org.scalatest.junit.JUnitRunner
import org.scalatest.time.{Millis, Seconds, Span}
import scala.runtime.NonLocalReturnControl
@RunWith(classOf[JUnitRunner])
class FuturePoolTest extends FunSuite with Eventually {
implicit override val patienceConfig =
PatienceConfig(timeout = scaled(Span(15, Seconds)), interval = scaled(Span(5, Millis)))
test("FuturePool should dispatch to another thread") {
val executor = Executors.newFixedThreadPool(1)
val pool = FuturePool(executor)
val source = new Promise[Int]
val result = pool { Await.result(source) } // simulate blocking call
source.setValue(1)
assert(Await.result(result) == 1)
}
test("Executor failing contains failures") {
val badExecutor = new ScheduledThreadPoolExecutor(1) {
override def submit(runnable: Runnable): JFuture[_] = {
throw new RejectedExecutionException()
}
}
val pool = FuturePool(badExecutor)
val runCount = new atomic.AtomicInteger(0)
val result1 = pool {
runCount.incrementAndGet()
}
Await.ready(result1)
assert(runCount.get() == 0)
}
test("does not execute interrupted tasks") {
val executor = Executors.newFixedThreadPool(1).asInstanceOf[ThreadPoolExecutor]
val pool = FuturePool(executor)
val runCount = new atomic.AtomicInteger
val source1 = new Promise[Int]
val source2 = new Promise[Int]
val result1 = pool { runCount.incrementAndGet(); Await.result(source1) }
val result2 = pool { runCount.incrementAndGet(); Await.result(source2) }
result2.raise(new Exception)
source1.setValue(1)
// The executor will run the task for result 2, but the wrapper
// in FuturePool will throw away the work if the future
// representing the outcome has already been interrupted,
// and will set the result to a CancellationException
eventually { assert(executor.getCompletedTaskCount == 2) }
assert(runCount.get() == 1)
assert(Await.result(result1) == 1)
intercept[CancellationException] { Await.result(result2) }
}
test("continue to run a task if it's interrupted while running") {
val executor = Executors.newFixedThreadPool(1).asInstanceOf[ThreadPoolExecutor]
val pool = FuturePool(executor)
val runCount = new atomic.AtomicInteger
val startedLatch = new CountDownLatch(1)
val cancelledLatch = new CountDownLatch(1)
val result: Future[Int] = pool {
try {
startedLatch.countDown()
runCount.incrementAndGet()
cancelledLatch.await()
throw new RuntimeException()
} finally {
runCount.incrementAndGet()
}
runCount.get
}
startedLatch.await(1.second)
result.raise(new Exception)
cancelledLatch.countDown()
eventually { assert(executor.getCompletedTaskCount == 1) }
assert(runCount.get() == 2)
intercept[RuntimeException] { Await.result(result) }
}
test("returns exceptions that result from submitting a task to the pool") {
val executor = new ThreadPoolExecutor(1, 1, 60, TimeUnit.SECONDS, new LinkedBlockingQueue(1))
val pool = FuturePool(executor)
val source = new Promise[Int]
pool { Await.result(source) } // occupy the thread
pool { Await.result(source) } // fill the queue
val rv = pool { "yay!" }
assert(rv.isDefined)
intercept[RejectedExecutionException] { Await.result(rv) }
source.setValue(1)
}
test("interrupt threads when interruptible") {
val executor = Executors.newFixedThreadPool(1)
val started = new Promise[Unit]
val interrupted = new Promise[Unit]
val ipool = FuturePool.interruptible(executor)
val f = ipool {
try {
started.setDone()
while (true) {
Thread.sleep(Long.MaxValue)
}
} catch { case _: InterruptedException =>
interrupted.setDone()
}
}
Await.result(started)
f.raise(new RuntimeException("foo"))
intercept[RuntimeException] { Await.result(f) }
assert(Await.result(interrupted.liftToTry) == Return(()))
}
test("not interrupt threads when not interruptible") {
val executor = Executors.newFixedThreadPool(1)
val a = new Promise[Unit]
val b = new Promise[Unit]
val nipool = FuturePool(executor)
val f = nipool {
a.setDone()
Await.result(b)
1
}
Await.result(a)
f.raise(new RuntimeException("foo"))
b.setDone()
assert(Await.result(f) == 1)
}
test("satisfies result promise on fatal exceptions thrown by task") {
val executor = Executors.newFixedThreadPool(1)
val pool = FuturePool(executor)
val fatal = new LinkageError
assert(!NonFatal.isNonFatal(fatal))
val rv = pool { throw fatal }
val ex = intercept[ExecutionException] { Await.result(rv) }
assert(ex.getCause == fatal)
}
class PoolCtx {
val executor = Executors.newFixedThreadPool(1)
val pool = FuturePool(executor)
val pools = Seq(FuturePool.immediatePool, pool)
}
test("handles NonLocalReturnControl properly") {
val ctx = new PoolCtx
import ctx._
def fake(): String = {
pools foreach { pool =>
val rv = pool { return "OK" }
val e = intercept[FutureNonLocalReturnControl] { Await.result(rv) }
val f = intercept[NonLocalReturnControl[String]] { throw e.getCause }
assert(f.value == "OK")
}
"FINISHED"
}
assert(fake() == "FINISHED")
}
test("FuturePool metrics") {
// We want isolation and thus can't use FuturePool.unboundedPool
// But we want to make sure it will have the correct behavior.
// We compromise by roughly creating an ExecutorService/FuturePool
// that behaves the same.
val executor = Executors.newCachedThreadPool()
val pool = new ExecutorServiceFuturePool(executor)
// verify the initial state
assert(pool.poolSize == 0)
assert(pool.numActiveTasks == 0)
assert(pool.numCompletedTasks == 0)
// execute a task we can control
val latch = new CountDownLatch(1)
val future = pool {
latch.await(10.seconds)
true
}
assert(pool.poolSize == 1)
assert(pool.numActiveTasks == 1)
assert(pool.numCompletedTasks == 0)
// let the task complete
latch.countDown()
Await.ready(future, 5.seconds)
assert(pool.poolSize == 1)
assert(pool.numActiveTasks == 0)
assert(pool.numCompletedTasks == 1)
// cleanup.
executor.shutdown()
}
}
| folone/util | util-core/src/test/scala/com/twitter/util/FuturePoolTest.scala | Scala | apache-2.0 | 6,670 |
package avrohugger.filesorter
import java.io.File
import scala.annotation.tailrec
import scala.io.Source
/**
* The order in which avsc files are compiled depends on the underlying file
* system (under OSX its is alphabetical, under some linux distros it's not).
* This is an issue when you have a record type that is used in different
* other types. This ensures that dependent types are compiled in the
* correct order.
* Created by Jon Morra on 2/7/17.
*/
object AvdlFileSorter {
def sortSchemaFiles(filesIterable: Iterable[File]): Seq[File] = {
val files = filesIterable.toList
val importsMap = files.map{ file =>
(file.getCanonicalFile, getImports(file))
}.toMap.mapValues(f => f.filter(_.exists))
@tailrec def addFiles(processedFiles: Seq[File], remainingFiles: List[File]): Seq[File] = {
remainingFiles match {
case Nil => processedFiles
case h :: t =>
val processedFilesSet = processedFiles.toSet
if (importsMap(h).forall(processedFilesSet.contains))
addFiles(processedFiles :+ h, t)
else
addFiles(processedFiles, t :+ h)
}
}
val result = addFiles(Seq.empty, files)
result
}
// TODO This should be replaced by letting AVRO compile the IDL files directly, but I'm not sure how to do that now.
private[this] val importPattern = """\\s*import\\s+idl\\s+"([^"]+)"\\s*;\\s*""".r
private[this] def getImports(file: File): Vector[File] = {
val source = Source.fromFile(file)
try {
source.getLines().collect{
case importPattern(currentImport) => new File(file.getParentFile, currentImport).getCanonicalFile
}.toVector
}
finally source.close()
}
}
| julianpeeters/avrohugger | avrohugger-filesorter/src/main/scala/com/julianpeeters/avrohugger/filesorter/AvdlFileSorter.scala | Scala | apache-2.0 | 1,725 |
package com.arcusys.learn.models
import com.arcusys.valamis.questionbank.model.{ Question, Answer }
case class QuizResponse(id: Int, title: String, description: String, logo: String, size: Int, maxDuration: Option[Int])
case class QuizPublishStatusResponse(status: Boolean)
sealed trait QuizContentResponse {
def id: String
def contentType: String
def title: String
def arrangementIndex: Int
def lessonId: Int
}
case class QuizCategoryResponse(
id: String,
lessonId: Int,
title: String,
arrangementIndex: Int = 1,
children: Seq[QuizQuestionResponse],
contentType: String = "category") extends QuizContentResponse {
}
sealed trait QuizQuestionResponse extends QuizContentResponse {
}
case class QuizQuestionExternalResponse(
id: String,
lessonId: Int,
categoryID: Option[Int],
title: String,
url: String,
arrangementIndex: Int = 1,
contentType: String = "questionExternalResource") extends QuizQuestionResponse
case class QuizQuestionPlainTextResponse(
id: String,
lessonId: Int,
categoryID: Option[Int],
title: String,
text: String,
arrangementIndex: Int = 1,
contentType: String = "questionPlainText") extends QuizQuestionResponse
case class QuizQuestionBankResponse(
id: String,
lessonId: Int,
categoryID: Option[Int],
title: String,
question: Question[Answer],
autoShowAnswer: Boolean,
arrangementIndex: Int = 1,
questionTypeCode: Int,
contentType: String = "question") extends QuizQuestionResponse
case class QuizQuestionRevealJSResponse(
id: String,
lessonId: Int,
categoryID: Option[Int],
title: String,
text: String,
arrangementIndex: Int = 1,
contentType: String = "questionRevealJS") extends QuizQuestionResponse
case class QuizQuestionPDFResponse(
id: String,
lessonId: Int,
categoryID: Option[Int],
title: String,
filename: String,
arrangementIndex: Int = 1,
contentType: String = "questionPDF") extends QuizQuestionResponse
case class QuizQuestionPPTXResponse(
id: String,
lessonId: Int,
categoryID: Option[Int],
title: String,
arrangementIndex: Int = 1,
contentType: String = "questionPPTX") extends QuizQuestionResponse
case class QuizQuestionVideoDLResponse(
id: String,
lessonId: Int,
categoryID: Option[Int],
title: String,
uuid: String,
arrangementIndex: Int = 1,
contentType: String = "questionVideoDL") extends QuizQuestionResponse
abstract class QuizQuestionPreview
case class QuizQuestionPreviewContent(content: String) extends QuizQuestionPreview
case class QuizQuestionPreviewRedirect(url: String) extends QuizQuestionPreview | icacic/Valamis | learn-portlet/src/main/scala/com/arcusys/learn/models/QuizResponse.scala | Scala | gpl-3.0 | 2,597 |
package com.dy.templates.steps
import org.jbehave.core.annotations.{Then, When, Given}
class TestSteps {
@Given("system in default state")
def systemDefaultState() {
// Thread.sleep(5000)
println("System is in default state")
}
@When("I do something")
def userAction() {
println("Uses made some action")
}
@Then("system is in a different state")
def changeSystemState() {
println("System's state changed")
}
}
| Linosh/mvn-jbehave-webdriver-grid | src/test/scala/com/dy/templates/steps/TestSteps.scala | Scala | apache-2.0 | 448 |
package no.digipost.labs.items
import org.scalatra.test.scalatest._
import org.json4s._
import org.json4s.jackson.JsonMethods._
import org.json4s.jackson.Serialization
import no.digipost.labs.security.Headers
import Headers.X_CSRF_Token
import org.scalatra.test.HttpComponentsClient
import org.bson.types.ObjectId
import no.digipost.labs.users.SessionHelper
import SessionHelper._
import java.util.Date
import no.digipost.labs.security.Headers
import no.digipost.labs.errorhandling.Error
class ItemsResourceTest extends ScalatraFunSuite {
val itemsRepo = new TestItemsRepository
private val itemWithBody = DbItem(new ObjectId(), ItemType.news, new Date, author = "Test Testesen", body = "body", source = Some("Original source"))
itemsRepo.insert(itemWithBody)
private val itemWithoutSource = DbItem(new ObjectId(), ItemType.news, new Date, author = "Test Testesen", body = "body", source = None)
itemsRepo.insert(itemWithoutSource)
addServlet(new ItemsResource(new ItemsService(itemsRepo)), "/*")
addServlet(sessionServletWithMocks, "/sessions/*")
val validIdea = IdeaInput("Great idea everyone!", "Something smart and something funny", None)
val validNews = NewsInput("Good news everyone!", "Today you'll be delivering a crate of subpoenas to Sicily 8, the Mob Planet!")
test("get items") {
get("/items") {
status should equal (200)
val items = parse(body).extract[Items]
assert(items.items.size === 2)
}
}
test("should have no-cache and security headers") {
get("/items") {
assert(response.headers(Headers.CacheControl) === List("no-cache, no-store, no-transform"))
assert(response.headers(Headers.StrictTransportSecurity) === List("max-age=31536000"))
assert(response.headers(Headers.XContentTypeOptions) === List("nosniff"))
assert(response.headers(Headers.XFrameOptions) === List("deny"))
assert(response.headers(Headers.XPermittedCrossDomainPolicies) === List("master-only"))
assert(response.headers(Headers.XXSSProtection) === List("1; mode=block"))
}
}
test("get a single item by id") {
get(s"/items/${itemWithBody._id}") {
status should equal (200)
val item = parse(body).extract[Item]
assert(item.id === itemWithBody._id.toHexString)
assert(item.source === None)
}
}
test("get a single item by id not accessible for non admin") {
get(s"/items/${itemWithBody._id}/editable") {
status should equal (403)
}
}
test("get a single item by id for editing by admin") {
session {
SessionHelper.loginUser(this, admin = true)
get(s"/items/${itemWithBody._id}/editable") {
status should equal (200)
val item = parse(body).extract[Item]
assert(item.id === itemWithBody._id.toHexString)
assert(item.source === Some("Original source"))
}
}
}
test("items without source should use body when editing") {
session {
SessionHelper.loginUser(this, admin = true)
get(s"/items/${itemWithoutSource._id}/editable") {
status should equal (200)
val item = parse(body).extract[Item]
assert(item.id === itemWithoutSource._id.toHexString)
assert(item.body === "body")
assert(item.source === Some("body"))
}
}
}
test("get items of type") {
get("/items/type/news") {
status should equal (200)
val items = parse(body).extract[Items]
assert(items.items.size === 2)
}
}
test("no access when not logged in") {
post("/ideas", body = Serialization.write(validIdea), headers = Map("Content-Type" -> "application/json")) {
assert(status === 403)
}
}
test("should create new item when logged in") {
session {
val csrfToken = loginUserAndGetCsrfToken(this)
createNewIdea(this, validIdea, csrfToken)
}
}
test("should not create idea when not admin and status is set") {
session {
val idea = IdeaInput("Great idea everyone!", "Something smart and something funny", Some(Status.Closed.toString))
post("/ideas", Serialization.write(idea).getBytes, Map("Content-Type" -> "application/json", X_CSRF_Token -> loginUserAndGetCsrfToken(this))) {
assert(status === 403)
}
}
}
test("should create idea when admin and status is set") {
session {
val idea = IdeaInput("Great idea everyone!", "Something smart and something funny", Some(Status.Closed.toString))
val newIdea = createNewIdea(this, idea, loginUserAndGetCsrfToken(this, admin = true))
assert(newIdea.status.get === Status.Closed.toString)
}
}
test("should not create news when not admin") {
session {
val csrfToken = loginUserAndGetCsrfToken(this, admin = false)
val headers = Map("Content-Type" -> "application/json", X_CSRF_Token -> csrfToken)
val news = NewsInput("news", "body")
post("/news", Serialization.write(news).getBytes, headers) {
status should be(403)
}
}
}
test("should update news when admin") {
session {
val csrfToken = loginUserAndGetCsrfToken(this, admin = true)
val item = createNews(this, NewsInput("Breaking news", "Long story short"), csrfToken)
val changed = NewsInput("changed title", "changed body")
post(s"/news/${item.id}", Serialization.write(changed), headers = Map("Content-Type" -> "application/json", X_CSRF_Token -> csrfToken)) {
status should be(200)
val updated = parse(body).extract[Item]
updated.id should equal(item.id)
updated.title should equal(Some("changed title"))
updated.source should equal(Some("changed body"))
updated.body should equal("<p>changed body</p>")
}
}
}
test("should create tweet when admin") {
session {
val csrfToken = loginUserAndGetCsrfToken(this, admin = true)
val headers = Map("Content-Type" -> "application/json", X_CSRF_Token -> csrfToken)
val tweet = TweetInput("https://twitter.com", "@froden", "Digipost er best, ingen protest!")
post("/tweets", Serialization.write(tweet).getBytes, headers) {
status should be(201)
val item = parse(body).extract[Item]
item.url should be(Some("https://twitter.com"))
item.author.name should be("@froden")
}
}
}
test("should not be able to delete when not admin") {
session {
val csrfToken = loginUserAndGetCsrfToken(this, admin = false)
val item = createNewIdea(this, validIdea, csrfToken)
delete(s"/items/${item.id}", headers = Map(X_CSRF_Token -> csrfToken)) {
status should be(403)
}
}
}
test("should delete news when admin") {
session {
val csrfToken = loginUserAndGetCsrfToken(this, admin = true)
val item = createNewIdea(this, validIdea, csrfToken)
delete(s"/items/${item.id}", headers = Map(X_CSRF_Token -> csrfToken)) {
status should be(204)
}
get(s"/items/${item.id}") {
status should be(404)
}
}
}
test("should not create item when missing csrf-token") {
session {
loginUser(this)
post("/items", body = Serialization.write(validIdea), headers = Map("Content-Type" -> "application/json")) {
assert(status === 403)
assert(response.body.contains("Missing " + X_CSRF_Token))
}
}
}
test("should not create item when invalid csrf-token") {
session {
loginUser(this)
post("/ideas", body = Serialization.write(validIdea), headers = Map("Content-Type" -> "application/json", X_CSRF_Token -> "invalid")) {
assert(status === 403)
assert(response.body.contains("Invalid " + X_CSRF_Token))
}
}
}
test("should comment on item") {
session {
val csrfToken = loginUserAndGetCsrfToken(this)
val item = createNewIdea(this, validIdea, csrfToken)
createNewComment(this, item.id, "Hei på deg", csrfToken, 201)
assert(getItem(this, item.id).comments.size === 1)
}
}
test("no empty comments") {
session {
val csrfToken = loginUserAndGetCsrfToken(this)
val item = createNewIdea(this, validIdea, csrfToken)
createNewComment(this, item.id, "", csrfToken, 400)
}
}
test("should list latest comments for admin") {
session {
val csrfToken = loginUserAndGetCsrfToken(this, admin = true)
val idea = createNewIdea(this, validIdea, csrfToken)
val news = createNews(this, validNews, csrfToken)
createNews(this, NewsInput("This just in", "News without comments"), csrfToken)
createNewComment(this, idea.id, "Kommentar1", csrfToken, 201)
createNewComment(this, news.id, "Kommentar2", csrfToken, 201)
createNewComment(this, news.id, "Kommentar3", csrfToken, 201)
createNewComment(this, idea.id, "Kommentar4", csrfToken, 201)
createNewComment(this, news.id, "Kommentar5", csrfToken, 201)
createNewComment(this, news.id, "Kommentar6", csrfToken, 201)
get("/comments", headers = Map(X_CSRF_Token -> csrfToken)) {
assert(status === 200)
val comments = parse(body).camelizeKeys.extract[List[Comment]]
comments.size should be >= 6
comments.head.body should include ("Kommentar6")
comments.head.itemId should be(news.id)
}
}
}
test("should not list comments when user or not logged in") {
session {
val csrfToken = loginUserAndGetCsrfToken(this, admin = false)
get("/comments", headers = Map(X_CSRF_Token -> csrfToken)) {
assert(status === 403)
}
}
get("/comments") {
assert(status === 403)
}
}
test("should delete a comment when admin") {
session {
val csrfToken = loginUserAndGetCsrfToken(this, admin = true)
val item = createNewIdea(this, validIdea, csrfToken)
createNewComment(this, item.id, "En kommentar", csrfToken, 201)
createNewComment(this, item.id, "En kommentar til", csrfToken, 201)
createNewComment(this, item.id, "Enda en kommentar", csrfToken, 201)
val itemWithComments = getItem(this, item.id)
assert(itemWithComments.comments.size === 3)
val commentToDelete = itemWithComments.comments.head
delete(s"/items/${item.id}/comments/${commentToDelete.id}", headers = Map("Content-Type" -> "application/json", X_CSRF_Token -> csrfToken)) {
assert(status === 200)
val item = parse(body).extract[Item]
assert(item.comments.size === 2)
}
assert(getItem(this, item.id).comments.size === 2)
}
}
test("should not delete a comment when not admin") {
val item = session {
val csrfToken = loginUserAndGetCsrfToken(this, admin = true)
val item = createNewIdea(this, validIdea, csrfToken)
createNewComment(this, item.id, "En kommentar", csrfToken, 201)
createNewComment(this, item.id, "En kommentar til", csrfToken, 201)
createNewComment(this, item.id, "Enda en kommentar", csrfToken, 201)
item
}
val itemWithComments = getItem(this, item.id)
assert(itemWithComments.comments.size === 3)
val commentToDelete = itemWithComments.comments.head
delete(s"/items/${item.id}/comments/${commentToDelete.id}", headers = Map("Content-Type" -> "application/json")) {
assert(status === 403)
}
assert(getItem(this, item.id).comments.size === 3)
}
test("should vote on item") {
session {
val csrfToken = loginUserAndGetCsrfToken(this)
val item = createNewIdea(this, validIdea, csrfToken)
assert(item.votes === 0)
val headers = Map("Content-Type" -> "application/json", X_CSRF_Token -> csrfToken)
val resultItem = post(s"/items/${item.id}/votes", params = Nil, headers = headers) {
assert(status === 200)
parse(body).extract[Item]
}
assert(resultItem.votes === 1)
assert(resultItem.voted)
}
}
test("should not be able to vote when not logged in") {
val itemId = session {
val csrfToken = loginUserAndGetCsrfToken(this)
val item = createNewIdea(this, validIdea, csrfToken)
item.id
}
post(s"/items/$itemId/votes", params = Nil, headers = Map("Content-Type" -> "application/json")) {
assert(status === 403)
}
}
test("Invalid routes should give 404 not found") {
get("/invalid") {
assertNotFound()
}
get("invalid") {
assertNotFound()
}
get("/") {
assertNotFound()
}
get("") {
assertNotFound()
}
def assertNotFound() = {
assert(status === 404)
assert(parse(body).camelizeKeys.extract[Error] === Error("Not found"))
}
}
def getItem(client: HttpComponentsClient, itemId: String): Item = {
client.get(s"/items/$itemId") {
assert(client.status === 200)
parse(client.body).extract[Item]
}
}
def createNewIdea(client: HttpComponentsClient, idea: IdeaInput, csrfToken: String): Item = {
val headers = Map("Content-Type" -> "application/json", X_CSRF_Token -> csrfToken)
val created = client.post("/ideas", Serialization.write(idea).getBytes, headers) {
assert(client.status === 201)
parse(client.body).extract[Item]
}
getItem(client, created.id)
}
def createNews(client: HttpComponentsClient, news: NewsInput, csrfToken: String): Item = {
val headers = Map("Content-Type" -> "application/json", X_CSRF_Token -> csrfToken)
val created = client.post("/ideas", Serialization.write(news).getBytes, headers) {
assert(client.status === 201)
parse(client.body).extract[Item]
}
getItem(client, created.id)
}
def createNewComment(client: HttpComponentsClient, itemId: String, body: String, csrfToken: String, expectedStatus: Int) {
val item = getItem(client, itemId)
val headers = Map("Content-Type" -> "application/json", X_CSRF_Token -> csrfToken)
val comment = CommentInput(body = body)
client.post(s"/items/$itemId/comments", Serialization.write(comment).getBytes, headers) {
assert(client.status === expectedStatus)
expectedStatus match {
case 201 => {
val updatedItem = parse(client.body).extract[Item]
assert(updatedItem.comments.size === (item.comments.size + 1))
}
case _ => parse(client.body).camelizeKeys.extract[Error]
}
}
}
} | digipost/labs | backend/src/test/scala/no/digipost/labs/items/ItemsResourceTest.scala | Scala | apache-2.0 | 14,278 |
package edu.rice.habanero.actors
import java.util.concurrent.ForkJoinPool
import java.util.concurrent.ForkJoinPool.ManagedBlocker
import java.util.concurrent.atomic.AtomicBoolean
import edu.rice.hj.runtime.config.HjSystemProperty
import edu.rice.hj.runtime.util.ModCountDownLatch
import org.jetlang.core.{BatchExecutor, EventReader}
import org.jetlang.fibers.{Fiber, PoolFiberFactory}
import scala.concurrent.duration.Duration
import scala.concurrent.{Await, Promise}
/**
* ORIGINAL SOURCE: http://code.google.com/p/jetlang/source/browse/scala/src/jetlang/example/JetlangActor.scala
* March 16, 2012
*
* Note:
* - Fixed ActorExecutor signature to use execute(EventReader) instead of execute(Array[Runnable])
* - allow configurable pool size using system property: actors.corePoolSize
* - add start() and exit() to JetlangActor
*
* @author <a href="http://shams.web.rice.edu/">Shams Imam</a> ([email protected])
*/
object JetlangActorState {
val actorLatch = new ModCountDownLatch(0)
val current = new ThreadLocal[ReplyTo[_]]()
def awaitTermination() {
try {
actorLatch.await()
} catch {
case ex: InterruptedException => {
ex.printStackTrace()
}
}
}
}
class ReplyTo[MsgType](actor: MsgType => Unit) {
def !(msg: MsgType): Unit = actor(msg)
}
class ActorExecutor[MsgType](actor: MsgType => Unit) extends BatchExecutor {
val replyTo = new ReplyTo[MsgType](actor)
def execute(eventReader: EventReader) = {
JetlangActorState.current.set(replyTo)
for (index <- 0 to eventReader.size() - 1)
eventReader.get(index).run()
JetlangActorState.current.set(null)
}
}
object JetlangPool {
val executors = {
val workers: Int = HjSystemProperty.numWorkers.getPropertyValue.toInt
new ForkJoinPool(workers)
}
private val fiberFactory = new PoolFiberFactory(executors)
def create[MsgType](callback: MsgType => Unit): Fiber = {
val e = new ActorExecutor[MsgType](callback)
fiberFactory.create(e)
}
def await[A](aPromise: Promise[A]): A = {
val blocker = new ManagedBlocker {
override def block(): Boolean = {
Await.result(aPromise.future, Duration.Inf)
true
}
override def isReleasable(): Boolean = {
aPromise.isCompleted
}
}
ForkJoinPool.managedBlock(blocker)
val res = Await.result(aPromise.future, Duration.Inf)
res
}
def shutdown(): Unit = executors.shutdown()
}
abstract class JetlangActor[MsgType] {
private val startTracker = new AtomicBoolean(false)
private val exitTracker = new AtomicBoolean(false)
val fiber = createFiber(receiveMsg)
var sender: ReplyTo[MsgType] = null
def createFiber(callback: MsgType => Unit): Fiber = JetlangPool.create(callback)
final def receiveMsg(msg: MsgType): Unit = {
val runner = new Runnable() {
def run() = {
process(msg)
}
}
try {
fiber.execute(runner)
} catch {
case th: Throwable =>
th.printStackTrace(System.err)
System.err.flush()
th.getCause
}
}
final def !(msg: MsgType): Unit = receiveMsg(msg)
final def send(msg: MsgType): Unit = receiveMsg(msg)
def process(msg: MsgType): Unit
final def hasStarted() = {
startTracker.get()
}
final def start() = {
if (!hasStarted()) {
JetlangActorState.actorLatch.updateCount()
onPreStart()
fiber.start()
onPostStart()
startTracker.set(true)
}
}
/**
* Convenience: specify code to be executed before actor is started
*/
protected def onPreStart() = {
}
/**
* Convenience: specify code to be executed after actor is started
*/
protected def onPostStart() = {
}
final def hasExited() = {
exitTracker.get()
}
final def exit() = {
val success = exitTracker.compareAndSet(false, true)
if (success) {
onPreExit()
fiber.dispose()
onPostExit()
JetlangActorState.actorLatch.countDown()
}
}
/**
* Convenience: specify code to be executed before actor is terminated
*/
protected def onPreExit() = {
}
/**
* Convenience: specify code to be executed after actor is terminated
*/
protected def onPostExit() = {
}
}
| shamsmahmood/savina | src/main/scala/edu/rice/habanero/actors/JetlangActor.scala | Scala | gpl-2.0 | 4,218 |
package beamly.core.lang.extensions
import beamly.core.lang.TryToFuture
import scala.concurrent.Future
import scala.util.{Failure, Success, Try}
final class TryW[T](val underlying: Try[T]) extends AnyVal {
/**
* Converts [[scala.util.Try]] to [[scala.concurrent.Future]]
* @return Future from Try
*/
@inline
def future: Future[T] = TryToFuture autoTryToFuture underlying
/**
* Returns successful value from underlying [[scala.util.Try]] or attempts to convert exception to value.
* @param pf Partial function to convert exceptions to a value
* @tparam U type of return value
* @return Underlying value or resulting value after converting exception
*/
@inline
def getOrRecover[U >: T](pf: => PartialFunction[Throwable, U]): U = underlying match {
case Success(s) => s
case Failure(e) => pf.applyOrElse(e, throw (_: Throwable))
}
}
| beamly/beamly.core.lang | src/main/scala/beamly/core/lang/extensions/TryW.scala | Scala | apache-2.0 | 880 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.log
import kafka.utils._
import kafka.message._
import kafka.common._
import kafka.metrics.KafkaMetricsGroup
import kafka.server.{LogOffsetMetadata, FetchDataInfo, BrokerTopicStats}
import java.io.{IOException, File}
import java.util.concurrent.{ConcurrentNavigableMap, ConcurrentSkipListMap}
import java.util.concurrent.atomic._
import java.text.NumberFormat
import scala.collection.JavaConversions
import com.yammer.metrics.core.Gauge
object LogAppendInfo {
val UnknownLogAppendInfo = LogAppendInfo(-1, -1, NoCompressionCodec, NoCompressionCodec, -1, -1, false)
}
/**
* Struct to hold various quantities we compute about each message set before appending to the log
* @param firstOffset The first offset in the message set
* @param lastOffset The last offset in the message set
* @param shallowCount The number of shallow messages
* @param validBytes The number of valid bytes
* @param sourceCodec The source codec used in the message set (send by the producer)
* @param targetCodec The target codec of the message set(after applying the broker compression configuration if any)
* @param offsetsMonotonic Are the offsets in this message set monotonically increasing
*/
case class LogAppendInfo(var firstOffset: Long, var lastOffset: Long, sourceCodec: CompressionCodec, targetCodec: CompressionCodec, shallowCount: Int, validBytes: Int, offsetsMonotonic: Boolean)
/**
* An append-only log for storing messages.
*
* The log is a sequence of LogSegments, each with a base offset denoting the first message in the segment.
*
* New log segments are created according to a configurable policy that controls the size in bytes or time interval
* for a given segment.
*
* @param dir The directory in which log segments are created.
* @param config The log configuration settings
* @param recoveryPoint The offset at which to begin recovery--i.e. the first offset which has not been flushed to disk
* @param scheduler The thread pool scheduler used for background actions
* @param time The time instance used for checking the clock
*
*/
@threadsafe
class Log(val dir: File,
@volatile var config: LogConfig,
@volatile var recoveryPoint: Long = 0L,
scheduler: Scheduler,
time: Time = SystemTime) extends Logging with KafkaMetricsGroup {
import kafka.log.Log._
/* A lock that guards all modifications to the log */
private val lock = new Object
/* last time it was flushed */
private val lastflushedTime = new AtomicLong(time.milliseconds)
def initFileSize() : Int = {
if (config.preallocate)
config.segmentSize
else
0
}
/* the actual segments of the log */
private val segments: ConcurrentNavigableMap[java.lang.Long, LogSegment] = new ConcurrentSkipListMap[java.lang.Long, LogSegment]
loadSegments()
/* Calculate the offset of the next message */
@volatile var nextOffsetMetadata = new LogOffsetMetadata(activeSegment.nextOffset(), activeSegment.baseOffset, activeSegment.size.toInt)
val topicAndPartition: TopicAndPartition = Log.parseTopicPartitionName(dir)
info("Completed load of log %s with log end offset %d".format(name, logEndOffset))
val tags = Map("topic" -> topicAndPartition.topic, "partition" -> topicAndPartition.partition.toString)
newGauge("NumLogSegments",
new Gauge[Int] {
def value = numberOfSegments
},
tags)
newGauge("LogStartOffset",
new Gauge[Long] {
def value = logStartOffset
},
tags)
newGauge("LogEndOffset",
new Gauge[Long] {
def value = logEndOffset
},
tags)
newGauge("Size",
new Gauge[Long] {
def value = size
},
tags)
/** The name of this log */
def name = dir.getName()
/* Load the log segments from the log files on disk */
private def loadSegments() {
// create the log directory if it doesn't exist
dir.mkdirs()
var swapFiles = Set[File]()
// first do a pass through the files in the log directory and remove any temporary files
// and find any interrupted swap operations
for(file <- dir.listFiles if file.isFile) {
if(!file.canRead)
throw new IOException("Could not read file " + file)
val filename = file.getName
if(filename.endsWith(DeletedFileSuffix) || filename.endsWith(CleanedFileSuffix)) {
// if the file ends in .deleted or .cleaned, delete it
file.delete()
} else if(filename.endsWith(SwapFileSuffix)) {
// we crashed in the middle of a swap operation, to recover:
// if a log, delete the .index file, complete the swap operation later
// if an index just delete it, it will be rebuilt
val baseName = new File(CoreUtils.replaceSuffix(file.getPath, SwapFileSuffix, ""))
if(baseName.getPath.endsWith(IndexFileSuffix)) {
file.delete()
} else if(baseName.getPath.endsWith(LogFileSuffix)){
// delete the index
val index = new File(CoreUtils.replaceSuffix(baseName.getPath, LogFileSuffix, IndexFileSuffix))
index.delete()
swapFiles += file
}
}
}
// now do a second pass and load all the .log and .index files
for(file <- dir.listFiles if file.isFile) {
val filename = file.getName
if(filename.endsWith(IndexFileSuffix)) {
// if it is an index file, make sure it has a corresponding .log file
val logFile = new File(file.getAbsolutePath.replace(IndexFileSuffix, LogFileSuffix))
if(!logFile.exists) {
warn("Found an orphaned index file, %s, with no corresponding log file.".format(file.getAbsolutePath))
file.delete()
}
} else if(filename.endsWith(LogFileSuffix)) {
// if its a log file, load the corresponding log segment
val start = filename.substring(0, filename.length - LogFileSuffix.length).toLong
val indexFile = Log.indexFilename(dir, start)
val segment = new LogSegment(dir = dir,
startOffset = start,
indexIntervalBytes = config.indexInterval,
maxIndexSize = config.maxIndexSize,
rollJitterMs = config.randomSegmentJitter,
time = time,
fileAlreadyExists = true)
if(indexFile.exists()) {
try {
segment.index.sanityCheck()
} catch {
case e: java.lang.IllegalArgumentException =>
warn("Found an corrupted index file, %s, deleting and rebuilding index...".format(indexFile.getAbsolutePath))
indexFile.delete()
segment.recover(config.maxMessageSize)
}
}
else {
error("Could not find index file corresponding to log file %s, rebuilding index...".format(segment.log.file.getAbsolutePath))
segment.recover(config.maxMessageSize)
}
segments.put(start, segment)
}
}
// Finally, complete any interrupted swap operations. To be crash-safe,
// log files that are replaced by the swap segment should be renamed to .deleted
// before the swap file is restored as the new segment file.
for (swapFile <- swapFiles) {
val logFile = new File(CoreUtils.replaceSuffix(swapFile.getPath, SwapFileSuffix, ""))
val fileName = logFile.getName
val startOffset = fileName.substring(0, fileName.length - LogFileSuffix.length).toLong
val indexFile = new File(CoreUtils.replaceSuffix(logFile.getPath, LogFileSuffix, IndexFileSuffix) + SwapFileSuffix)
val index = new OffsetIndex(file = indexFile, baseOffset = startOffset, maxIndexSize = config.maxIndexSize)
val swapSegment = new LogSegment(new FileMessageSet(file = swapFile),
index = index,
baseOffset = startOffset,
indexIntervalBytes = config.indexInterval,
rollJitterMs = config.randomSegmentJitter,
time = time)
info("Found log file %s from interrupted swap operation, repairing.".format(swapFile.getPath))
swapSegment.recover(config.maxMessageSize)
val oldSegments = logSegments(swapSegment.baseOffset, swapSegment.nextOffset)
replaceSegments(swapSegment, oldSegments.toSeq, isRecoveredSwapFile = true)
}
if(logSegments.size == 0) {
// no existing segments, create a new mutable segment beginning at offset 0
segments.put(0L, new LogSegment(dir = dir,
startOffset = 0,
indexIntervalBytes = config.indexInterval,
maxIndexSize = config.maxIndexSize,
rollJitterMs = config.randomSegmentJitter,
time = time,
fileAlreadyExists = false,
initFileSize = this.initFileSize(),
preallocate = config.preallocate))
} else {
recoverLog()
// reset the index size of the currently active log segment to allow more entries
activeSegment.index.resize(config.maxIndexSize)
}
}
private def updateLogEndOffset(messageOffset: Long) {
nextOffsetMetadata = new LogOffsetMetadata(messageOffset, activeSegment.baseOffset, activeSegment.size.toInt)
}
private def recoverLog() {
// if we have the clean shutdown marker, skip recovery
if(hasCleanShutdownFile) {
this.recoveryPoint = activeSegment.nextOffset
return
}
// okay we need to actually recovery this log
val unflushed = logSegments(this.recoveryPoint, Long.MaxValue).iterator
while(unflushed.hasNext) {
val curr = unflushed.next
info("Recovering unflushed segment %d in log %s.".format(curr.baseOffset, name))
val truncatedBytes =
try {
curr.recover(config.maxMessageSize)
} catch {
case e: InvalidOffsetException =>
val startOffset = curr.baseOffset
warn("Found invalid offset during recovery for log " + dir.getName +". Deleting the corrupt segment and " +
"creating an empty one with starting offset " + startOffset)
curr.truncateTo(startOffset)
}
if(truncatedBytes > 0) {
// we had an invalid message, delete all remaining log
warn("Corruption found in segment %d of log %s, truncating to offset %d.".format(curr.baseOffset, name, curr.nextOffset))
unflushed.foreach(deleteSegment)
}
}
}
/**
* Check if we have the "clean shutdown" file
*/
private def hasCleanShutdownFile() = new File(dir.getParentFile, CleanShutdownFile).exists()
/**
* The number of segments in the log.
* Take care! this is an O(n) operation.
*/
def numberOfSegments: Int = segments.size
/**
* Close this log
*/
def close() {
debug("Closing log " + name)
lock synchronized {
for(seg <- logSegments)
seg.close()
}
}
/**
* Append this message set to the active segment of the log, rolling over to a fresh segment if necessary.
*
* This method will generally be responsible for assigning offsets to the messages,
* however if the assignOffsets=false flag is passed we will only check that the existing offsets are valid.
*
* @param messages The message set to append
* @param assignOffsets Should the log assign offsets to this message set or blindly apply what it is given
*
* @throws KafkaStorageException If the append fails due to an I/O error.
*
* @return Information about the appended messages including the first and last offset.
*/
def append(messages: ByteBufferMessageSet, assignOffsets: Boolean = true): LogAppendInfo = {
val appendInfo = analyzeAndValidateMessageSet(messages)
// if we have any valid messages, append them to the log
if(appendInfo.shallowCount == 0)
return appendInfo
// trim any invalid bytes or partial messages before appending it to the on-disk log
var validMessages = trimInvalidBytes(messages, appendInfo)
try {
// they are valid, insert them in the log
lock synchronized {
appendInfo.firstOffset = nextOffsetMetadata.messageOffset
if(assignOffsets) {
// assign offsets to the message set
val offset = new AtomicLong(nextOffsetMetadata.messageOffset)
try {
validMessages = validMessages.validateMessagesAndAssignOffsets(offset, appendInfo.sourceCodec, appendInfo.targetCodec, config.compact)
} catch {
case e: IOException => throw new KafkaException("Error in validating messages while appending to log '%s'".format(name), e)
}
appendInfo.lastOffset = offset.get - 1
} else {
// we are taking the offsets we are given
if(!appendInfo.offsetsMonotonic || appendInfo.firstOffset < nextOffsetMetadata.messageOffset)
throw new IllegalArgumentException("Out of order offsets found in " + messages)
}
// re-validate message sizes since after re-compression some may exceed the limit
for(messageAndOffset <- validMessages.shallowIterator) {
if(MessageSet.entrySize(messageAndOffset.message) > config.maxMessageSize) {
// we record the original message set size instead of trimmed size
// to be consistent with pre-compression bytesRejectedRate recording
BrokerTopicStats.getBrokerTopicStats(topicAndPartition.topic).bytesRejectedRate.mark(messages.sizeInBytes)
BrokerTopicStats.getBrokerAllTopicsStats.bytesRejectedRate.mark(messages.sizeInBytes)
throw new MessageSizeTooLargeException("Message size is %d bytes which exceeds the maximum configured message size of %d."
.format(MessageSet.entrySize(messageAndOffset.message), config.maxMessageSize))
}
}
// check messages set size may be exceed config.segmentSize
if(validMessages.sizeInBytes > config.segmentSize) {
throw new MessageSetSizeTooLargeException("Message set size is %d bytes which exceeds the maximum configured segment size of %d."
.format(validMessages.sizeInBytes, config.segmentSize))
}
// maybe roll the log if this segment is full
val segment = maybeRoll(validMessages.sizeInBytes)
// now append to the log
segment.append(appendInfo.firstOffset, validMessages)
// increment the log end offset
updateLogEndOffset(appendInfo.lastOffset + 1)
trace("Appended message set to log %s with first offset: %d, next offset: %d, and messages: %s"
.format(this.name, appendInfo.firstOffset, nextOffsetMetadata.messageOffset, validMessages))
if(unflushedMessages >= config.flushInterval)
flush()
appendInfo
}
} catch {
case e: IOException => throw new KafkaStorageException("I/O exception in append to log '%s'".format(name), e)
}
}
/**
* Validate the following:
* <ol>
* <li> each message matches its CRC
* <li> each message size is valid
* </ol>
*
* Also compute the following quantities:
* <ol>
* <li> First offset in the message set
* <li> Last offset in the message set
* <li> Number of messages
* <li> Number of valid bytes
* <li> Whether the offsets are monotonically increasing
* <li> Whether any compression codec is used (if many are used, then the last one is given)
* </ol>
*/
private def analyzeAndValidateMessageSet(messages: ByteBufferMessageSet): LogAppendInfo = {
var shallowMessageCount = 0
var validBytesCount = 0
var firstOffset, lastOffset = -1L
var sourceCodec: CompressionCodec = NoCompressionCodec
var monotonic = true
for(messageAndOffset <- messages.shallowIterator) {
// update the first offset if on the first message
if(firstOffset < 0)
firstOffset = messageAndOffset.offset
// check that offsets are monotonically increasing
if(lastOffset >= messageAndOffset.offset)
monotonic = false
// update the last offset seen
lastOffset = messageAndOffset.offset
val m = messageAndOffset.message
// Check if the message sizes are valid.
val messageSize = MessageSet.entrySize(m)
if(messageSize > config.maxMessageSize) {
BrokerTopicStats.getBrokerTopicStats(topicAndPartition.topic).bytesRejectedRate.mark(messages.sizeInBytes)
BrokerTopicStats.getBrokerAllTopicsStats.bytesRejectedRate.mark(messages.sizeInBytes)
throw new MessageSizeTooLargeException("Message size is %d bytes which exceeds the maximum configured message size of %d."
.format(messageSize, config.maxMessageSize))
}
// check the validity of the message by checking CRC
m.ensureValid()
shallowMessageCount += 1
validBytesCount += messageSize
val messageCodec = m.compressionCodec
if(messageCodec != NoCompressionCodec)
sourceCodec = messageCodec
}
// Apply broker-side compression if any
val targetCodec = BrokerCompressionCodec.getTargetCompressionCodec(config.compressionType, sourceCodec)
LogAppendInfo(firstOffset, lastOffset, sourceCodec, targetCodec, shallowMessageCount, validBytesCount, monotonic)
}
/**
* Trim any invalid bytes from the end of this message set (if there are any)
* @param messages The message set to trim
* @param info The general information of the message set
* @return A trimmed message set. This may be the same as what was passed in or it may not.
*/
private def trimInvalidBytes(messages: ByteBufferMessageSet, info: LogAppendInfo): ByteBufferMessageSet = {
val messageSetValidBytes = info.validBytes
if(messageSetValidBytes < 0)
throw new InvalidMessageSizeException("Illegal length of message set " + messageSetValidBytes + " Message set cannot be appended to log. Possible causes are corrupted produce requests")
if(messageSetValidBytes == messages.sizeInBytes) {
messages
} else {
// trim invalid bytes
val validByteBuffer = messages.buffer.duplicate()
validByteBuffer.limit(messageSetValidBytes)
new ByteBufferMessageSet(validByteBuffer)
}
}
/**
* Read messages from the log
*
* @param startOffset The offset to begin reading at
* @param maxLength The maximum number of bytes to read
* @param maxOffset -The offset to read up to, exclusive. (i.e. the first offset NOT included in the resulting message set).
*
* @throws OffsetOutOfRangeException If startOffset is beyond the log end offset or before the base offset of the first segment.
* @return The fetch data information including fetch starting offset metadata and messages read
*/
def read(startOffset: Long, maxLength: Int, maxOffset: Option[Long] = None): FetchDataInfo = {
trace("Reading %d bytes from offset %d in log %s of length %d bytes".format(maxLength, startOffset, name, size))
// Because we don't use lock for reading, the synchronization is a little bit tricky.
// We create the local variables to avoid race conditions with updates to the log.
val currentNextOffsetMetadata = nextOffsetMetadata
val next = currentNextOffsetMetadata.messageOffset
if(startOffset == next)
return FetchDataInfo(currentNextOffsetMetadata, MessageSet.Empty)
var entry = segments.floorEntry(startOffset)
// attempt to read beyond the log end offset is an error
if(startOffset > next || entry == null)
throw new OffsetOutOfRangeException("Request for offset %d but we only have log segments in the range %d to %d.".format(startOffset, segments.firstKey, next))
// Do the read on the segment with a base offset less than the target offset
// but if that segment doesn't contain any messages with an offset greater than that
// continue to read from successive segments until we get some messages or we reach the end of the log
while(entry != null) {
// If the fetch occurs on the active segment, there might be a race condition where two fetch requests occur after
// the message is appended but before the nextOffsetMetadata is updated. In that case the second fetch may
// cause OffsetOutOfRangeException. To solve that, we cap the reading up to exposed position instead of the log
// end of the active segment.
val maxPosition = {
if (entry == segments.lastEntry) {
val exposedPos = nextOffsetMetadata.relativePositionInSegment.toLong
// Check the segment again in case a new segment has just rolled out.
if (entry != segments.lastEntry)
// New log segment has rolled out, we can read up to the file end.
entry.getValue.size
else
exposedPos
} else {
entry.getValue.size
}
}
val fetchInfo = entry.getValue.read(startOffset, maxOffset, maxLength, maxPosition)
if(fetchInfo == null) {
entry = segments.higherEntry(entry.getKey)
} else {
return fetchInfo
}
}
// okay we are beyond the end of the last segment with no data fetched although the start offset is in range,
// this can happen when all messages with offset larger than start offsets have been deleted.
// In this case, we will return the empty set with log end offset metadata
FetchDataInfo(nextOffsetMetadata, MessageSet.Empty)
}
/**
* Given a message offset, find its corresponding offset metadata in the log.
* If the message offset is out of range, return unknown offset metadata
*/
def convertToOffsetMetadata(offset: Long): LogOffsetMetadata = {
try {
val fetchDataInfo = read(offset, 1)
fetchDataInfo.fetchOffsetMetadata
} catch {
case e: OffsetOutOfRangeException => LogOffsetMetadata.UnknownOffsetMetadata
}
}
/**
* Delete any log segments matching the given predicate function,
* starting with the oldest segment and moving forward until a segment doesn't match.
* @param predicate A function that takes in a single log segment and returns true iff it is deletable
* @return The number of segments deleted
*/
def deleteOldSegments(predicate: LogSegment => Boolean): Int = {
// find any segments that match the user-supplied predicate UNLESS it is the final segment
// and it is empty (since we would just end up re-creating it
val lastSegment = activeSegment
val deletable = logSegments.takeWhile(s => predicate(s) && (s.baseOffset != lastSegment.baseOffset || s.size > 0))
val numToDelete = deletable.size
if(numToDelete > 0) {
lock synchronized {
// we must always have at least one segment, so if we are going to delete all the segments, create a new one first
if(segments.size == numToDelete)
roll()
// remove the segments for lookups
deletable.foreach(deleteSegment(_))
}
}
numToDelete
}
/**
* The size of the log in bytes
*/
def size: Long = logSegments.map(_.size).sum
/**
* The earliest message offset in the log
*/
def logStartOffset: Long = logSegments.head.baseOffset
/**
* The offset metadata of the next message that will be appended to the log
*/
def logEndOffsetMetadata: LogOffsetMetadata = nextOffsetMetadata
/**
* The offset of the next message that will be appended to the log
*/
def logEndOffset: Long = nextOffsetMetadata.messageOffset
/**
* Roll the log over to a new empty log segment if necessary.
*
* @param messagesSize The messages set size in bytes
* logSegment will be rolled if one of the following conditions met
* <ol>
* <li> The logSegment is full
* <li> The maxTime has elapsed
* <li> The index is full
* </ol>
* @return The currently active segment after (perhaps) rolling to a new segment
*/
private def maybeRoll(messagesSize: Int): LogSegment = {
val segment = activeSegment
if (segment.size > config.segmentSize - messagesSize ||
segment.size > 0 && time.milliseconds - segment.created > config.segmentMs - segment.rollJitterMs ||
segment.index.isFull) {
debug("Rolling new log segment in %s (log_size = %d/%d, index_size = %d/%d, age_ms = %d/%d)."
.format(name,
segment.size,
config.segmentSize,
segment.index.entries,
segment.index.maxEntries,
time.milliseconds - segment.created,
config.segmentMs - segment.rollJitterMs))
roll()
} else {
segment
}
}
/**
* Roll the log over to a new active segment starting with the current logEndOffset.
* This will trim the index to the exact size of the number of entries it currently contains.
* @return The newly rolled segment
*/
def roll(): LogSegment = {
val start = time.nanoseconds
lock synchronized {
val newOffset = logEndOffset
val logFile = logFilename(dir, newOffset)
val indexFile = indexFilename(dir, newOffset)
for(file <- List(logFile, indexFile); if file.exists) {
warn("Newly rolled segment file " + file.getName + " already exists; deleting it first")
file.delete()
}
segments.lastEntry() match {
case null =>
case entry => {
entry.getValue.index.trimToValidSize()
entry.getValue.log.trim()
}
}
val segment = new LogSegment(dir,
startOffset = newOffset,
indexIntervalBytes = config.indexInterval,
maxIndexSize = config.maxIndexSize,
rollJitterMs = config.randomSegmentJitter,
time = time,
fileAlreadyExists = false,
initFileSize = initFileSize,
preallocate = config.preallocate)
val prev = addSegment(segment)
if(prev != null)
throw new KafkaException("Trying to roll a new log segment for topic partition %s with start offset %d while it already exists.".format(name, newOffset))
// We need to update the segment base offset and append position data of the metadata when log rolls.
// The next offset should not change.
updateLogEndOffset(nextOffsetMetadata.messageOffset)
// schedule an asynchronous flush of the old segment
scheduler.schedule("flush-log", () => flush(newOffset), delay = 0L)
info("Rolled new log segment for '" + name + "' in %.0f ms.".format((System.nanoTime - start) / (1000.0*1000.0)))
segment
}
}
/**
* The number of messages appended to the log since the last flush
*/
def unflushedMessages() = this.logEndOffset - this.recoveryPoint
/**
* Flush all log segments
*/
def flush(): Unit = flush(this.logEndOffset)
/**
* Flush log segments for all offsets up to offset-1
* @param offset The offset to flush up to (non-inclusive); the new recovery point
*/
def flush(offset: Long) : Unit = {
if (offset <= this.recoveryPoint)
return
debug("Flushing log '" + name + " up to offset " + offset + ", last flushed: " + lastFlushTime + " current time: " +
time.milliseconds + " unflushed = " + unflushedMessages)
for(segment <- logSegments(this.recoveryPoint, offset))
segment.flush()
lock synchronized {
if(offset > this.recoveryPoint) {
this.recoveryPoint = offset
lastflushedTime.set(time.milliseconds)
}
}
}
/**
* Completely delete this log directory and all contents from the file system with no delay
*/
private[log] def delete() {
lock synchronized {
removeLogMetrics()
logSegments.foreach(_.delete())
segments.clear()
CoreUtils.rm(dir)
}
}
/**
* Truncate this log so that it ends with the greatest offset < targetOffset.
* @param targetOffset The offset to truncate to, an upper bound on all offsets in the log after truncation is complete.
*/
private[log] def truncateTo(targetOffset: Long) {
info("Truncating log %s to offset %d.".format(name, targetOffset))
if(targetOffset < 0)
throw new IllegalArgumentException("Cannot truncate to a negative offset (%d).".format(targetOffset))
if(targetOffset > logEndOffset) {
info("Truncating %s to %d has no effect as the largest offset in the log is %d.".format(name, targetOffset, logEndOffset-1))
return
}
lock synchronized {
if(segments.firstEntry.getValue.baseOffset > targetOffset) {
truncateFullyAndStartAt(targetOffset)
} else {
val deletable = logSegments.filter(segment => segment.baseOffset > targetOffset)
deletable.foreach(deleteSegment(_))
activeSegment.truncateTo(targetOffset)
updateLogEndOffset(targetOffset)
this.recoveryPoint = math.min(targetOffset, this.recoveryPoint)
}
}
}
/**
* Delete all data in the log and start at the new offset
* @param newOffset The new offset to start the log with
*/
private[log] def truncateFullyAndStartAt(newOffset: Long) {
debug("Truncate and start log '" + name + "' to " + newOffset)
lock synchronized {
val segmentsToDelete = logSegments.toList
segmentsToDelete.foreach(deleteSegment(_))
addSegment(new LogSegment(dir,
newOffset,
indexIntervalBytes = config.indexInterval,
maxIndexSize = config.maxIndexSize,
rollJitterMs = config.randomSegmentJitter,
time = time,
fileAlreadyExists = false,
initFileSize = initFileSize,
preallocate = config.preallocate))
updateLogEndOffset(newOffset)
this.recoveryPoint = math.min(newOffset, this.recoveryPoint)
}
}
/**
* The time this log is last known to have been fully flushed to disk
*/
def lastFlushTime(): Long = lastflushedTime.get
/**
* The active segment that is currently taking appends
*/
def activeSegment = segments.lastEntry.getValue
/**
* All the log segments in this log ordered from oldest to newest
*/
def logSegments: Iterable[LogSegment] = {
import JavaConversions._
segments.values
}
/**
* Get all segments beginning with the segment that includes "from" and ending with the segment
* that includes up to "to-1" or the end of the log (if to > logEndOffset)
*/
def logSegments(from: Long, to: Long): Iterable[LogSegment] = {
import JavaConversions._
lock synchronized {
val floor = segments.floorKey(from)
if(floor eq null)
segments.headMap(to).values
else
segments.subMap(floor, true, to, false).values
}
}
override def toString() = "Log(" + dir + ")"
/**
* This method performs an asynchronous log segment delete by doing the following:
* <ol>
* <li>It removes the segment from the segment map so that it will no longer be used for reads.
* <li>It renames the index and log files by appending .deleted to the respective file name
* <li>It schedules an asynchronous delete operation to occur in the future
* </ol>
* This allows reads to happen concurrently without synchronization and without the possibility of physically
* deleting a file while it is being read from.
*
* @param segment The log segment to schedule for deletion
*/
private def deleteSegment(segment: LogSegment) {
info("Scheduling log segment %d for log %s for deletion.".format(segment.baseOffset, name))
lock synchronized {
segments.remove(segment.baseOffset)
asyncDeleteSegment(segment)
}
}
/**
* Perform an asynchronous delete on the given file if it exists (otherwise do nothing)
* @throws KafkaStorageException if the file can't be renamed and still exists
*/
private def asyncDeleteSegment(segment: LogSegment) {
segment.changeFileSuffixes("", Log.DeletedFileSuffix)
def deleteSeg() {
info("Deleting segment %d from log %s.".format(segment.baseOffset, name))
segment.delete()
}
scheduler.schedule("delete-file", deleteSeg, delay = config.fileDeleteDelayMs)
}
/**
* Swap a new segment in place and delete one or more existing segments in a crash-safe manner. The old segments will
* be asynchronously deleted.
*
* The sequence of operations is:
* <ol>
* <li> Cleaner creates new segment with suffix .cleaned and invokes replaceSegments().
* If broker crashes at this point, the clean-and-swap operation is aborted and
* the .cleaned file is deleted on recovery in loadSegments().
* <li> New segment is renamed .swap. If the broker crashes after this point before the whole
* operation is completed, the swap operation is resumed on recovery as described in the next step.
* <li> Old segment files are renamed to .deleted and asynchronous delete is scheduled.
* If the broker crashes, any .deleted files left behind are deleted on recovery in loadSegments().
* replaceSegments() is then invoked to complete the swap with newSegment recreated from
* the .swap file and oldSegments containing segments which were not renamed before the crash.
* <li> Swap segment is renamed to replace the existing segment, completing this operation.
* If the broker crashes, any .deleted files which may be left behind are deleted
* on recovery in loadSegments().
* </ol>
*
* @param newSegment The new log segment to add to the log
* @param oldSegments The old log segments to delete from the log
* @param isRecoveredSwapFile true if the new segment was created from a swap file during recovery after a crash
*/
private[log] def replaceSegments(newSegment: LogSegment, oldSegments: Seq[LogSegment], isRecoveredSwapFile : Boolean = false) {
lock synchronized {
// need to do this in two phases to be crash safe AND do the delete asynchronously
// if we crash in the middle of this we complete the swap in loadSegments()
if (!isRecoveredSwapFile)
newSegment.changeFileSuffixes(Log.CleanedFileSuffix, Log.SwapFileSuffix)
addSegment(newSegment)
// delete the old files
for(seg <- oldSegments) {
// remove the index entry
if(seg.baseOffset != newSegment.baseOffset)
segments.remove(seg.baseOffset)
// delete segment
asyncDeleteSegment(seg)
}
// okay we are safe now, remove the swap suffix
newSegment.changeFileSuffixes(Log.SwapFileSuffix, "")
}
}
/**
* remove deleted log metrics
*/
private[log] def removeLogMetrics(): Unit = {
removeMetric("NumLogSegments", tags)
removeMetric("LogStartOffset", tags)
removeMetric("LogEndOffset", tags)
removeMetric("Size", tags)
}
/**
* Add the given segment to the segments in this log. If this segment replaces an existing segment, delete it.
* @param segment The segment to add
*/
def addSegment(segment: LogSegment) = this.segments.put(segment.baseOffset, segment)
}
/**
* Helper functions for logs
*/
object Log {
/** a log file */
val LogFileSuffix = ".log"
/** an index file */
val IndexFileSuffix = ".index"
/** a file that is scheduled to be deleted */
val DeletedFileSuffix = ".deleted"
/** A temporary file that is being used for log cleaning */
val CleanedFileSuffix = ".cleaned"
/** A temporary file used when swapping files into the log */
val SwapFileSuffix = ".swap"
/** Clean shutdown file that indicates the broker was cleanly shutdown in 0.8. This is required to maintain backwards compatibility
* with 0.8 and avoid unnecessary log recovery when upgrading from 0.8 to 0.8.1 */
/** TODO: Get rid of CleanShutdownFile in 0.8.2 */
val CleanShutdownFile = ".kafka_cleanshutdown"
/**
* Make log segment file name from offset bytes. All this does is pad out the offset number with zeros
* so that ls sorts the files numerically.
* @param offset The offset to use in the file name
* @return The filename
*/
def filenamePrefixFromOffset(offset: Long): String = {
val nf = NumberFormat.getInstance()
nf.setMinimumIntegerDigits(20)
nf.setMaximumFractionDigits(0)
nf.setGroupingUsed(false)
nf.format(offset)
}
/**
* Construct a log file name in the given dir with the given base offset
* @param dir The directory in which the log will reside
* @param offset The base offset of the log file
*/
def logFilename(dir: File, offset: Long) =
new File(dir, filenamePrefixFromOffset(offset) + LogFileSuffix)
/**
* Construct an index file name in the given dir using the given base offset
* @param dir The directory in which the log will reside
* @param offset The base offset of the log file
*/
def indexFilename(dir: File, offset: Long) =
new File(dir, filenamePrefixFromOffset(offset) + IndexFileSuffix)
/**
* Parse the topic and partition out of the directory name of a log
*/
def parseTopicPartitionName(dir: File): TopicAndPartition = {
val name: String = dir.getName
if (name == null || name.isEmpty || !name.contains('-')) {
throwException(dir)
}
val index = name.lastIndexOf('-')
val topic: String = name.substring(0, index)
val partition: String = name.substring(index + 1)
if (topic.length < 1 || partition.length < 1) {
throwException(dir)
}
TopicAndPartition(topic, partition.toInt)
}
def throwException(dir: File) {
throw new KafkaException("Found directory " + dir.getCanonicalPath + ", " +
"'" + dir.getName + "' is not in the form of topic-partition\\n" +
"If a directory does not contain Kafka topic data it should not exist in Kafka's log " +
"directory")
}
}
| eljefe6a/kafka | core/src/main/scala/kafka/log/Log.scala | Scala | apache-2.0 | 39,302 |
package org.jetbrains.plugins.scala
package refactoring.rename3
import java.io.File
import java.util
import com.intellij.codeInsight.TargetElementUtil
import com.intellij.openapi.editor.Editor
import com.intellij.openapi.fileEditor.FileDocumentManager
import com.intellij.openapi.util.text.StringUtil
import com.intellij.openapi.vfs.impl.VirtualFilePointerManagerImpl
import com.intellij.openapi.vfs.pointers.VirtualFilePointerManager
import com.intellij.openapi.vfs.{LocalFileSystem, VfsUtil, VirtualFile}
import com.intellij.psi.impl.source.tree.injected.InjectedLanguageUtil
import com.intellij.psi.{PsiDocumentManager, PsiFile}
import com.intellij.refactoring.rename.{RenameProcessor, RenamePsiElementProcessor}
import com.intellij.testFramework.{LightPlatformCodeInsightTestCase, LightPlatformTestCase, PlatformTestUtil, PsiTestUtil}
import org.jetbrains.plugins.scala.base.ScalaLightPlatformCodeInsightTestCaseAdapter
import org.jetbrains.plugins.scala.extensions.inWriteAction
import org.jetbrains.plugins.scala.lang.refactoring.util.ScalaNamesUtil
import org.jetbrains.plugins.scala.util.TestUtils
import scala.collection.JavaConverters._
import scala.collection.mutable.ListBuffer
/**
* Nikolay.Tropin
* 9/6/13
*/
abstract class ScalaRenameTestBase extends ScalaLightPlatformCodeInsightTestCaseAdapter {
val caretMarker = "/*caret*/"
private var myEditors: Map[VirtualFile, Editor] = null
private var myDirectory: VirtualFile = null
protected val folderPath: String = TestUtils.getTestDataPath + "/rename3/"
private def rootBefore = (folderPath + getTestName(true) + "/before").replace(File.separatorChar, '/')
private def rootAfter = (folderPath + getTestName(true) + "/after").replace(File.separatorChar, '/')
override protected def afterSetUpProject() = {
super.afterSetUpProject()
myDirectory = PsiTestUtil.createTestProjectStructure(projectAdapter, moduleAdapter, rootBefore, new util.HashSet[File]())
}
protected def doTest(newName: String = "NameAfterRename") {
LocalFileSystem.getInstance().refresh(false)
val filesBefore =
VfsUtil.collectChildrenRecursively(myDirectory.findChild("tests")).asScala
.filter(!_.isDirectory)
.toArray
val caretPositions = findCaretsAndRemoveMarkers(filesBefore)
PsiDocumentManager.getInstance(projectAdapter).commitAllDocuments()
myEditors = createEditors(filesBefore)
for {
CaretPosition(vFile, offset) <- caretPositions
} {
val file = getPsiManagerAdapter.findFile(vFile)
val editor = myEditors(vFile)
editor.getCaretModel.moveToOffset(offset)
val oldName = doRename(editor, file, newName)
val dirAfter = LocalFileSystem.getInstance.refreshAndFindFileByPath(rootAfter)
PlatformTestUtil.assertDirectoriesEqual(dirAfter, myDirectory)
//rename back for next caret position
doRename(editor, file, oldName)
}
}
private def fileText(file: VirtualFile): String = {
val text = FileDocumentManager.getInstance().getDocument(file).getText
StringUtil.convertLineSeparators(text)
}
case class CaretPosition(file: VirtualFile, offset: Int)
private def findCaretsAndRemoveMarkers(files: Array[VirtualFile]): Seq[CaretPosition] = {
val caretsInFile: VirtualFile => Seq[CaretPosition] = { file =>
var text = fileText(file)
val fileLength = text.length
def findOffsets(s: String): Seq[Int] = {
val result = ListBuffer[Int]()
val length = caretMarker.length
var occ = text.indexOf(caretMarker)
while(occ > 0) {
result += occ
text = text.substring(0, occ) + text.substring(occ + length)
occ = text.indexOf(caretMarker)
}
result
}
val result = findOffsets(text).map(offset => CaretPosition(file, offset))
if (result.nonEmpty) {
inWriteAction(FileDocumentManager.getInstance().getDocument(file).replaceString(0, fileLength, text))
}
result
}
files.flatMap(caretsInFile)
}
private def createEditors(files: Array[VirtualFile]): Map[VirtualFile, Editor] = {
files.map(f => f -> createEditor(f)).toMap
}
private def createEditor(file: VirtualFile) = {
LightPlatformCodeInsightTestCase.createEditor(file)
}
protected override def tearDown() {
super.tearDown()
LightPlatformTestCase.closeAndDeleteProject()
}
private def projectAdapter = getProjectAdapter
private def moduleAdapter = getModuleAdapter
private def doRename(editor: Editor, file: PsiFile, newName: String): String = {
val element = TargetElementUtil.findTargetElement(
InjectedLanguageUtil.getEditorForInjectedLanguageNoCommit(editor, file),
TargetElementUtil.REFERENCED_ELEMENT_ACCEPTED | TargetElementUtil.ELEMENT_NAME_ACCEPTED)
assert(element != null, "Reference is not specified.")
val searchInComments = element.getText != null && element.getText.contains("Comments")
var oldName: String = ""
inWriteAction {
val subst = RenamePsiElementProcessor.forElement(element).substituteElementToRename(element, getEditorAdapter)
if (subst != null) {
oldName = ScalaNamesUtil.scalaName(subst)
new RenameProcessor(projectAdapter, subst, newName, searchInComments, false).run()
}
}
PsiDocumentManager.getInstance(getProjectAdapter).commitAllDocuments()
val document = PsiDocumentManager.getInstance(getProjectAdapter).getDocument(file)
PsiDocumentManager.getInstance(getProjectAdapter).doPostponedOperationsAndUnblockDocument(document)
FileDocumentManager.getInstance.saveAllDocuments()
oldName
}
}
| triplequote/intellij-scala | scala/scala-impl/test/org/jetbrains/plugins/scala/refactoring/rename3/ScalaRenameTestBase.scala | Scala | apache-2.0 | 5,631 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.api.scala.hadoop.mapreduce
import org.apache.flink.annotation.Public
import org.apache.flink.api.java.hadoop.mapreduce.HadoopOutputFormatBase
import org.apache.hadoop.mapreduce.{Job, OutputFormat}
@Public
class HadoopOutputFormat[K, V](mapredOutputFormat: OutputFormat[K, V], job: Job)
extends HadoopOutputFormatBase[K, V, (K, V)](mapredOutputFormat, job) {
def writeRecord(record: (K, V)) {
this.recordWriter.write(record._1, record._2)
}
}
| fanzhidongyzby/flink | flink-scala/src/main/scala/org/apache/flink/api/scala/hadoop/mapreduce/HadoopOutputFormat.scala | Scala | apache-2.0 | 1,287 |
package com.sksamuel.elastic4s
import org.elasticsearch.search.highlight.HighlightBuilder
import scala.language.implicitConversions
/** @author Stephen Samuel */
trait HighlightDsl {
implicit def string2highlightfield(name: String): HighlightDefinition = new HighlightDefinition(name)
def options = new HighlightOptionsDefinition
}
class HighlightOptionsDefinition {
var _preTags: Seq[String] = Nil
var _postTags: Seq[String] = Nil
var _encoder: Option[HighlightEncoder] = None
var _order: Option[HighlightOrder] = None
var _tagSchema: Option[TagSchema] = None
var _requireFieldMatch: Boolean = false
var _boundary_chars: Option[String] = None
var _boundary_max_scan: Int = 20
def boundaryMaxScan(max: Int): this.type = {
_boundary_max_scan = max
this
}
def boundaryChars(chars: String): this.type = {
_boundary_chars = Option(chars)
this
}
def requireFieldMatch(requireFieldMatch: Boolean): this.type = {
_requireFieldMatch = requireFieldMatch
this
}
def tagSchema(tagSchema: TagSchema): this.type = {
_tagSchema = Option(tagSchema)
this
}
def order(order: HighlightOrder): this.type = {
_order = Option(order)
this
}
def encoder(encoder: HighlightEncoder): this.type = {
this._encoder = Option(encoder)
this
}
def postTags(iterable: Iterable[String]): this.type = postTags(iterable.toSeq: _*)
def postTags(tags: String*): this.type = {
this._postTags = tags
this
}
def preTags(iterable: Iterable[String]): this.type = preTags(iterable.toSeq: _*)
def preTags(tags: String*): this.type = {
this._preTags = tags
this
}
}
abstract class HighlightOrder(val elastic: String)
object HighlightOrder {
case object Score extends HighlightOrder("score")
}
abstract class TagSchema(val elastic: String)
object TagSchema {
case object Styled extends TagSchema("styled")
}
abstract class HighlightEncoder(val elastic: String)
object HighlightEncoder {
case object Default extends HighlightEncoder("default")
case object Html extends HighlightEncoder("html")
}
class HighlightDefinition(field: String) {
val builder = new HighlightBuilder.Field(field)
def boundaryChars(boundaryChars: String): this.type = {
builder.boundaryChars(boundaryChars.toCharArray)
this
}
def boundaryMaxScan(boundaryMaxScan: Int): this.type = {
builder.boundaryMaxScan(boundaryMaxScan)
this
}
def forceSource(forceSource: Boolean): this.type = {
builder.forceSource(forceSource)
this
}
def fragmenter(fragmenter: String): this.type = {
builder.fragmenter(fragmenter)
this
}
def fragmentOffset(n: Int): this.type = {
builder.fragmentOffset(n)
this
}
def fragmentSize(f: Int): this.type = {
builder.fragmentSize(f)
this
}
def highlightFilter(filter: Boolean): this.type = {
builder.highlightFilter(filter)
this
}
def highlighterType(`type`: String): this.type = {
builder.highlighterType(`type`)
this
}
def matchedFields(fields: String*): this.type = matchedFields(fields)
def matchedFields(fields: Iterable[String]): this.type = {
builder.matchedFields(fields.toSeq: _*)
this
}
def noMatchSize(size: Int): this.type = {
builder.noMatchSize(size)
this
}
def numberOfFragments(n: Int): this.type = {
builder.numOfFragments(n)
this
}
def order(order: String): this.type = {
builder.order(order)
this
}
def query(query: QueryDefinition): this.type = {
builder.highlightQuery(query.builder)
this
}
def phraseLimit(limit: Int): this.type = {
builder.phraseLimit(limit)
this
}
def preTag(tags: String*): this.type = {
builder.preTags(tags: _*)
this
}
def postTag(tags: String*): this.type = {
builder.postTags(tags: _*)
this
}
def requireFieldMatchScan(requireFieldMatch: Boolean): this.type = {
builder.requireFieldMatch(requireFieldMatch)
this
}
}
| tototoshi/elastic4s | elastic4s-core/src/main/scala/com/sksamuel/elastic4s/highlighting.scala | Scala | apache-2.0 | 3,978 |
package pirate
import pirate.internal._
sealed trait ParseError
case class ParseErrorShowHelpText(sub: Option[String]) extends ParseError
case class ParseErrorOkMessage(s: String) extends ParseError
case class ParseErrorLeftOver(s: List[String]) extends ParseError
case class ParseErrorMessage(s: String) extends ParseError
case class ParseErrorMissing(s: ParseTree[Info]) extends ParseError
case class ParseErrorInvalidOption(s: String) extends ParseError
case class ParseErrorInvalidArgument(s: String) extends ParseError
| markhibberd/pirate | src/main/scala/pirate/ParseError.scala | Scala | bsd-3-clause | 526 |
package org.semagrow.sevod.scraper
import org.apache.spark.rdd.RDD
import org.apache.spark.{HashPartitioner, SparkConf, SparkContext}
import org.semagrow.sevod.scraper.io.TriplesIOOps._
import org.apache.jena.graph._
import scala.reflect.ClassTag
/**
* Created by angel on 25/7/2016.
*/
object Scraper {
import org.semagrow.sevod.model.TriplifierImplicits._
val subjectTrieParameterDefault = "15"
val objectTrieParameterDefault = "150"
val usage = "USAGE:" +
"\\n\\t scala " + Scraper.getClass + " [input] [endpoint_url] [output]" +
"\\n\\t scala " + Scraper.getClass + " [input] [endpoint_url] [subjectBound] [objectBound] [output]"
def main(args : Array[String]) {
if (args.length != 5 && args.length != 3) {
throw new IllegalArgumentException(usage)
}
else {
val path = args(0)
val endpoint = args(1)
val flags = "-spov"
val subjectTrieParameter = if (args.length == 3) subjectTrieParameterDefault else args(2)
val objectTrieParameter = if (args.length == 3) objectTrieParameterDefault else args(3)
val output = args(args.length-1)
val datasetId = System.currentTimeMillis.toString
val sparkConfig = new SparkConf()
//.setMaster("local[*]")
.setAppName("SEVOD Stats")
.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
.set("spark.closure.serializer", "org.apache.spark.serializer.KryoSerializer")
.set("spark.kryo.registrator", "org.semagrow.sevod.scraper.io.TriplesIOOps$JenaKryoRegistrator")
.set("sparqlEndpoint", endpoint)
.set("datasetId", datasetId)
.set("subjectTrieParameter", subjectTrieParameter)
.set("objectTrieParameter", objectTrieParameter)
val sc = new SparkContext(sparkConfig)
val triples = sc
.inputFile(path)
.scrape(flags)
.saveAsNTriplesFile(output)
sc.stop()
}
}
implicit def rddToScraper(triples : RDD[Triple]): Scraper = new Scraper(triples)
def getVocabulary(node : Node) : Node =
NodeFactory.createURI(node.getURI.substring(0, node.getURI.indexOf('/', "http://".size + 1) + 1))
}
class Scraper (triples : RDD[Triple]) {
import Statistics._
import Utils._
def countTriples[T: ClassTag](trdd: RDD[(T,Triple)]) =
trdd
.mapValues(t => 1).reduceByKey(_+_)
def countDistSubjects[T: ClassTag](trdd: RDD[(T,Triple)]) =
trdd
.mapValues(_.getSubject).distinct()
.mapValues(t => 1).reduceByKey(_+_)
def countDistObjects[T: ClassTag](trdd: RDD[(T,Triple)]) =
trdd.mapValues(_.getObject).distinct()
.mapValues(t => 1).reduceByKey(_+_)
/* scrape Predicates (simple and with vocabularies) */
def scrapePredicates() : RDD[Stats] = {
val predicatePartitioner = new HashPartitioner(triples.context.defaultParallelism)
val triplesByPred = triples
.keyBy(t => t.getPredicate)
.partitionBy(predicatePartitioner).persist()
val count = countTriples(triplesByPred)
val subjectCount = countDistSubjects(triplesByPred)
val objectCount = countDistObjects(triplesByPred)
val datasetId = triples.context.getConf.get("datasetId")
count.join(subjectCount).join(objectCount)
.coalesce(triples.context.defaultMinPartitions)
.map {
case (n,((c,s),o)) => VoidStats(datasetId, n, c, s, o)
}
}
def scrapePredicatesVoc() : RDD[Stats] = {
val predicatePartitioner = new HashPartitioner(triples.context.defaultParallelism)
val triplesByPred = triples
.keyBy(t => t.getPredicate)
.partitionBy(predicatePartitioner).persist()
val count = countTriples(triplesByPred)
val subjectCount = countDistSubjects(triplesByPred)
val objectCount = countDistObjects(triplesByPred)
def vocabulariesOf(elems : RDD[(Node, Node)]) : RDD[(Node, Iterable[Node])] =
elems
.filter(_._2.isURI).filter(_._2.getURI.startsWith("http://"))
.mapValues(Scraper.getVocabulary(_))
.distinct().groupByKey()
val subjVocab = vocabulariesOf(triplesByPred.mapValues(_.getSubject))
val objVocab = vocabulariesOf(triplesByPred.mapValues(_.getObject))
val datasetId = triples.context.getConf.get("datasetId")
count.join(subjectCount).join(objectCount).leftOuterJoin(subjVocab).leftOuterJoin(objVocab)
.coalesce(triples.context.defaultMinPartitions)
.map {
case (n,((((c,s),o),sv),ov)) => PredStats(VoidStats(datasetId, n, c, s, o), sv, ov)
}
}
/* scrape Subjects and Objects */
def scrapeUris(f: Triple => Node, trieParameter: Integer, label: String) : RDD[Stats] = {
val prefixPartitioner = new HashPartitioner(triples.context.defaultParallelism)
val uris = triples.map(f(_)).filter(_.isURI).map(_.getURI)
val prefixes = PathTrie.getPrefixes(uris, trieParameter)
var prefixMap = uris.context.broadcast(prefixes.collect())
val urisByPrefix = triples.filter(_.getObject.isURI)
.flatMap(t => prefixMap.value.filter(p => t.getObject.getURI.startsWith(p)).map(p => (t, p)))
.keyBy(_._2).mapValues(_._1)
.partitionBy(prefixPartitioner).persist()
val count = countTriples(urisByPrefix)
val subjectCount = countDistSubjects(urisByPrefix)
val objectCount = countDistObjects(urisByPrefix)
val datasetId = triples.context.getConf.get("datasetId")
count.join(subjectCount).join(objectCount)
.coalesce(triples.context.defaultMinPartitions)
.map {
case (n, ((c,s),o)) => PrefixStats(datasetId, label, n, c, s, o)
}
}
def scrapeSubjects() : RDD[Stats] = {
val subjectTrieParameter = Integer.valueOf(triples.context.getConf.get("subjectTrieParameter"))
scrapeUris(_.getSubject, subjectTrieParameter, "subject")
}
def scrapeObjects() : RDD[Stats] = {
val objectTrieParameter = Integer.valueOf(triples.context.getConf.get("objectTrieParameter"))
scrapeUris(_.getObject, objectTrieParameter, "object")
}
/* scrape Classes */
def scrapeClasses() : RDD[Stats] = {
val classPartitioner = new HashPartitioner(triples.context.defaultParallelism)
val triplesByClass = triples
.filter(_.getPredicate.equals(u(rdf, "type")))
.keyBy(t => t.getObject)
.partitionBy(classPartitioner).persist()
val subjectCount = countDistSubjects(triplesByClass)
val datasetId = triples.context.getConf.get("datasetId")
subjectCount
.coalesce(triples.context.defaultMinPartitions)
.map {
case (n,e) => ClssStats(datasetId, n, e)
}
}
/* scrape General Stats */
def scrapeGeneral() : RDD[Stats] = {
val cnt = triples.count()
val prp = triples.map(_.getPredicate).distinct().count()
val ent = triples.filter(_.getPredicate.equals(u(rdf, "type"))).map(_.getSubject).distinct().count()
val cls = triples.filter(_.getPredicate.equals(u(rdf, "type"))).map(_.getObject).distinct().count()
val sjc = triples.map(_.getSubject).distinct().count()
val ojc = triples.map(_.getObject).distinct().count()
val endpoint = triples.context.getConf.get("sparqlEndpoint")
val datasetid = triples.context.getConf.get("datasetId")
triples.context.makeRDD(Seq(GenStats(datasetid, endpoint, cnt, prp, cls, ent, sjc, ojc)))
}
/* scrape main function */
def scrape(flags: String) : RDD[Stats] = {
val emptyrdd = triples.context.emptyRDD[Stats]
val u = scrapeGeneral()
val v = if (flags.contains("p")) {
if (flags.contains("v"))
scrapePredicatesVoc()
else
scrapePredicates()
}
else emptyrdd
val x = if (flags.contains("p")) scrapeClasses() else emptyrdd
val y = if (flags.contains("s")) scrapeSubjects() else emptyrdd
val z = if (flags.contains("o")) scrapeObjects() else emptyrdd
u.union(v).union(x).union(y).union(z)
}
}
| semagrow/sevod-scraper | rdfdump-spark/src/main/scala/org/semagrow/sevod/scraper/Scraper.scala | Scala | apache-2.0 | 7,855 |
package services
import com.github.scribejava.core.oauth.OAuth20Service
import spray.json.RootJsonFormat
import scala.concurrent.{ExecutionContext, Future}
trait ExternalApiService {
def getUserInfo[T](code: String, userInfoUrl: String, service: OAuth20Service)(
implicit formatter: RootJsonFormat[T],
executionContext: ExecutionContext
): Future[T]
}
| sysgears/apollo-universal-starter-kit | modules/authentication/server-scala/src/main/scala/services/ExternalApiService.scala | Scala | mit | 371 |
package de.aaschmid.sbtplugin.testnotifier
import sbt.{Result, TestEvent, TestsListener}
class NotifyingTestsListener(settings: TestNotifySettings) extends TestsListener {
import scala.collection.mutable.ListBuffer
private[testnotifier] val testResults = new ListBuffer[TestResult]()
private var startMillies: Long = 0
override def doInit() {
testResults.clear()
startMillies = System.currentTimeMillis
}
override def startGroup(name: String) = ()
override def testEvent(event: TestEvent) {
event.detail foreach {
testResults += TestResult(_)
}
}
override def endGroup(name: String, t: Throwable) = ()
override def endGroup(name: String, result: Result.Value) = ()
override def doComplete(result: Result.Value) {
import sbt.Process._
val duration = System.currentTimeMillis - startMillies
val worst = Severity.worst(testResults.toList map { _.severity })
val summary = settings.summary(worst)
val body = settings.body(duration, testResults.toList)
if (settings.printNotification) {
println(summary + "\\n" + body)
}
settings.notificationCommand :: settings.notificationParams(worst, summary, body) !
}
}
| aaschmid/sbt-test-notifier | src/main/scala/de/aaschmid/sbtplugin/testnotifier/NotifyingTestsListener.scala | Scala | apache-2.0 | 1,203 |
package app.agent
import app.server.CurrentRunStatusUpdate
import im.mange.jetboot._
import im.mange.jetpac._
case class ChecksProgressAgent() extends Renderable {
private val body = div(id = Some("checksProgress"))
private val panel = div(body).styles(marginBottom("10px"))
def render = panel.render
//TODO: jetboot this up ....
//TODO: introduce ProgressBar into jetboot
//TODO: row this up
def onCurrentRunStatusUpdate(update: CurrentRunStatusUpdate) = body.fill(progressBar(update))
private def progressBar(update: CurrentRunStatusUpdate) =
div(id = Some("progress"), R(
<div class="progress-bar progress-bar-success" style={"width: " + update.successPercent + "%"}>
<span class="sr-only"> {update.successPercent}% Success</span>
</div>
<div class="progress-bar progress-bar-warning" style={"width: " + update.inactivePercent + "%"}>
<span class="sr-only">{update.inactivePercent}% Inactive</span>
</div>
<div class="progress-bar progress-bar-danger" style={"width: " + update.failurePercent + "%"}>
<span class="sr-only">{update.failurePercent}% Failure</span>
</div>
)
).classes("progress", "progress-striped", "active").styles(clear(both), marginBottom("0px"))
}
| alltonp/reprobate | src/main/scala/app/agent/ChecksProgressAgent.scala | Scala | apache-2.0 | 1,294 |
package com.twitter.finagle.loadbalancer.distributor
import com.twitter.finagle.ServiceFactory
/**
* A load balancer and its associated weight. Size refers to the
* size of the balancers backing collection. The [[Distributor]]
* operates over these.
*/
case class WeightClass[Req, Rep](
balancer: ServiceFactory[Req, Rep],
endpoints: BalancerEndpoints[Req, Rep],
weight: Double,
size: Int)
| twitter/finagle | finagle-core/src/main/scala/com/twitter/finagle/loadbalancer/distributor/WeightClass.scala | Scala | apache-2.0 | 404 |