diff --git "a/https:/huggingface.co/datasets/iamgroot42/mimir/tree/main/test/github_ngram_13_0.2.jsonl" "b/https:/huggingface.co/datasets/iamgroot42/mimir/tree/main/test/github_ngram_13_0.2.jsonl"
new file mode 100644--- /dev/null
+++ "b/https:/huggingface.co/datasets/iamgroot42/mimir/tree/main/test/github_ngram_13_0.2.jsonl"
@@ -0,0 +1,740 @@
+"\nTo build a new release, a few steps are required. This will be enhanced in the future.\n\n1. Update FireCamp software release version\n1) Change the version in Makefile from \"latest\" to the release version such as \"1.0\".\n2) Chagne the version in common/types.go from \"latest\" to the release version such as \"1.0\".\n3) Update the \"Release\" to such as \"1.0\" and \"QSS3KeyPrefix\" to \"firecamp/releases/1.0\" in firecamp-master.template and firecamp.template\n\nUpdate the README.md and docs/installation/README.md to the new version as well.\n\nCheck whether need to update AMI ID in firecamp-autoscalegroup.template, https://aws.amazon.com/amazon-linux-ami/\n\n2. Upload the new files\nCreate the new release folder in the \"cloudstax\" bucket, such as firecamp/releases/1.0. Create subfolders: \"templates\", \"scripts\", \"packages\". Upload all templates under packages/aws-cloudformation/ to \"templates\", upload packages/aws-cloudformation/init.sh to \"scripts\", and upload $GOPATH/bin/firecamp-service-cli.tgz and $GOPATH/bin/firecamp-swarminit.tgz to \"packages\". The swarminit command is used by packaging/aws-cloudformation/init.sh to initialize the Docker Swarm cluster.\n\nNote: the default template allows '.' in the QSS3KeyPrefix. AWS QuickStart always points to the latest version, such as aws/vpc/latest for QSAWSVPCS3KeyPrefix, and '.' is not allowed in the key prefix. For the new release, when updating the AWS QuickStart git, remove '.' in the QSS3KeyPrefix and point to the latest version in QuickStart.\nAlso change \"QSS3BucketName\""
+"\ufeffusing System;\n\nnamespace RGiesecke.DllExport\n{\n public class ProblemSolver: IProblemSolver\n {\n /// \n /// Found problem.\n /// \n public Problems.IProblem Found\n {\n get;\n protected set;\n }\n\n /// \n /// Initial exception.\n /// \n public Exception Raw\n {\n get;\n protected set;\n }\n\n /// \n /// Formatted message.\n /// \n public string FMsg\n {\n get\n {\n var p = Found;\n if(p == null || (p.Message == null && p.HowToSolve == null)) {\n return null;\n }\n\n if(p.Message == null) {\n return $\"To solve problem: {p.HowToSolve}\";\n }\n\n var L = Environment.NewLine;\n return $\"{p.Title} ::::::{L}{p.Message}{ (p.HowToSolve != null ? L + p.HowToSolve : \"\") }\";\n }\n }\n\n public ProblemSolver(Exception ex)\n {\n init(ex);\n }\n\n protected void init(Exception ex)\n {\n Raw = ex;\n selector(ex);\n }\n\n protected void selector(Exception ex)\n {\n if(ex.GetType() == typeof(InvalidOperationException)) {\n exinit((InvalidOperationException)ex);\n return;\n }\n }\n\n protected void exinit(InvalidOperationException ex)\n {\n if(ex.Message.Contains(\"0x80070005\")) {\n Found = new Problems.AccessDenied(ex);\n return;\n }\n }\n }\n}"
+"%SJGET_EXAMPLE a demo for SJget.\n% This example script gets the index file of the SJSU Singular matrix collection,\n% and then loads in all symmetric non-binary matrices, in increasing order of\n% number of rows in the matrix.\n%\n% Example:\n% type SJget_example ; % to see an example of how to use SJget\n%\n% See also SJget, SJweb, SJgrep.\n\n% Derived from the UFget toolbox on March 18, 2008.\n% Copyright 2007, Tim Davis, University of Florida.\n\n% modified by L. Foster 09/17/2008\n\ntype SJget_example ;\n\nindex = SJget ;\n% find all symmetric matrices that are not binary,\n% have at least 4000 column and have a gap in the singular\n% values at the numerical rank of at least 1000\nf = find (index.numerical_symmetry == 1 & ~index.isBinary & ...\n index.ncols >= 4000 & index.gap >= 1000 );\n% sort by the dimension of the numerical null space\n[y, j] = sort (index.ncols (f) - index.numrank (f) ) ;\nf = f (j) ;\n\nfor i = f\n fprintf ('Loading %s%s%s, please wait ...\\n', ...\n\tindex.Group {i}, filesep, index.Name {i}) ;\n Problem = SJget (i,index) ;\n %display the problem structure\n disp (Problem) ;\n %"
+"var assert = require('assert');\nvar appStatsParser = require('../../lib/parsers/appStats');\n\nsuite('app stats parser', function() {\n test('single metric', function(){\n var date = new Date();\n var timestamp = date.getTime();\n var postData = {\n host: \"the-host\",\n appId: \"the-app-id\",\n startTime: timestamp,\n appStats: {\n release: \"the-release\",\n packageVersions: {\n foo: null,\n bar: null\n },\n appVersions: [\n {name: 'webapp', version: \"v-webapp\"},\n {name: 'refreshable', version: \"v-refreshable\"},\n {name: 'cordova', version: \"v-cordova\"},\n ]\n }\n };\n var expectedResult = [\n {\n value: {\n host: \"the-host\",\n appId: \"the-app-id\",\n startTime: timestamp,\n release: \"the-release\",\n packageVersions: {\n foo: null,\n bar: null\n },\n appVersions: [\n {name: 'webapp', version: \"v-webapp\"},\n {name: 'refreshable', version: \"v-refreshable\"},\n {name: 'cordova', version: \"v-cordova\"},\n ]\n }\n }\n ];\n var out = appStatsParser(postData);\n out[0].value.startTime = out[0].value.startTime.getTime();\n delete out[0]._id;\n assert.deepEqual(out, expectedResult);\n });\n});"
+"package internal\n\nimport (\n\t\"github.com/johnfercher/maroto/pkg/props\"\n\t\"github.com/jung-kurt/gofpdf\"\n)\n\n// Signature is the abstraction which deals of how to add a signature space inside PDF\ntype Signature interface {\n\tAddSpaceFor(label string, cell Cell, textProp props.Text)\n}\n\ntype signature struct {\n\tpdf gofpdf.Pdf\n\tmath Math\n\ttext Text\n}\n\n// NewSignature create a Signature\nfunc NewSignature(pdf gofpdf.Pdf, math Math, text Text) *signature {\n\treturn &signature{\n\t\tpdf,\n\t\tmath,\n\t\ttext,\n\t}\n}\n\n// AddSpaceFor create a space for a signature inside a cell\nfunc (s *signature) AddSpaceFor(label string, cell Cell, textProp props.Text) {\n\tleft, top, _, _ := s.pdf.GetMargins()\n\tspace := 4.0\n\n\tlineCenterY := cell.Height / 1.33\n\tcell.Y += lineCenterY\n\n\ts.pdf.Line(cell.X+left+space, cell.Y+top, cell.X+cell.Width+left-space, cell.Y+top)\n\n\tcell.Y += 2.0\n\ts.text.Add(label, cell, textProp)\n}"
+"### Skydock - Automagic Service Discovery for [Docker](https://github.com/dotcloud/docker)\n[](https://travis-ci.org/crosbymichael/skydock)\n\n\n## NOTICE\n\nI plan on making some breaking changes soon to help skydns and skydock scale better. To stay up-to-date either\nwatch this repo or follow me on twitter @crosbymichael. \n\n\nSkydock monitors docker events when containers start, stop, die, kill, etc and inserts records into a dynamic\nDNS server [skydns](https://github.com/skynetservices/skydns1). This allows standard DNS queries for services\nrunning inside docker containers. Because lets face it, if you have to modify your application code to work\nwith other service discovery solutions you might as well just give up. DNS just works and it works well. \nAlso you cannot be expected to modify application code that you don't own. Passing service urls via the\ncli or in static config files (nginx) will not be possible if your service discovery solution requires\na client library just to fetch an IP. \n\n\n[Skydns](https://github.com/skynetservices/skydns1) is a very small and simple server that does DNS for \ndiscovery very well. The authors and contributors to skydns helped a lot to make this project possible.\nSkydns exposes a very simple REST API to add, update, and remove services.\n\n\n#### The Details\n\nWhen you start a container with docker an"
+"# Container Hierarchy\n\nA container hierarchy is a tree of containers for the purposes of sharing the registrations of dependency injections. Service types registered to a parent container can be resolved in its child containers too. Use `init(parent: Container)` to instantiate a child container while specifying its parent container:\n\n```swift\nlet parentContainer = Container()\nparentContainer.register(Animal.self) { _ in Cat() }\nlet childContainer = Container(parent: parentContainer)\n\nlet cat = childContainer.resolve(Animal.self)\nprint(cat != nil) // prints \"true\"\n```\n\nIn contrast, service types registered to a child container are _not_ resolved in its parent container:\n\n```swift\nlet parentContainer = Container()\nlet childContainer = Container(parent: parentContainer)\nchildContainer.register(Animal.self) { _ in Cat() }\n\nlet cat = parentContainer.resolve(Animal.self)\nprint(cat == nil) // prints \"true\"\n```\n\n_[Next page: Modularizing Service Registration (Assembly)](Assembler.md)_\n\n_[Table of Contents](README.md)_"
+"using System;\r\nusing System.Drawing;\r\nusing System.Globalization;\r\nusing System.Xml;\r\nnamespace MSR.CVE.BackMaker\r\n{\r\n\tpublic class XMLUtils\r\n\t{\r\n\t\tpublic const string sizeTag = \"Size\";\r\n\t\tprivate const string widthAttr = \"Width\";\r\n\t\tprivate const string heightAttr = \"Height\";\r\n\t\tpublic static Size ReadSize(MashupParseContext context)\r\n\t\t{\r\n\t\t\tXMLTagReader xMLTagReader = context.NewTagReader(\"Size\");\r\n\t\t\tSize result = new Size(context.GetRequiredAttributeInt(\"Width\"), context.GetRequiredAttributeInt(\"Height\"));\r\n\t\t\txMLTagReader.SkipAllSubTags();\r\n\t\t\treturn result;\r\n\t\t}\r\n\t\tpublic static void WriteSize(Size size, XmlTextWriter writer)\r\n\t\t{\r\n\t\t\twriter.WriteStartElement(\"Size\");\r\n\t\t\twriter.WriteAttributeString(\"Width\", size.Width.ToString(CultureInfo.InvariantCulture));\r\n\t\t\twriter.WriteAttributeString(\"Height\", size.Height.ToString(CultureInfo.InvariantCulture));\r\n\t\t\twriter.WriteEndElement();\r\n\t\t}\r\n\t\tpublic static void WriteStringXml(XmlTextWriter writer, string TagName, string value)\r\n\t\t{\r\n\t\t\twriter.WriteStartElement(TagName);\r\n\t\t\twriter.WriteString(value);\r\n\t\t\twriter.WriteEndElement();\r\n\t\t}\r\n\t\tpublic static string ReadStringXml(MashupParseContext context, string TagName)\r\n\t\t{\r\n\t\t\tXMLTagReader xMLTagReader = context.NewTagReader(TagName);\r\n\t\t\txMLTagReader.SkipAllSubTags();\r\n\t\t\treturn xMLTagReader.GetContent();\r\n\t\t}\r\n\t}\r\n}"
+"// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n// This node built-in must be shimmed for the browser.\nimport EventEmitter from \"events\";\n// This is a node dependency that needs to be replaced with a\n// different implementation in the browser.\nimport print from \"./print\";\nexport { print };\n\nexport { AzureKeyCredential, KeyCredential } from \"@azure/core-auth\";\n\n// this is a utility function from a library that should be external\n// for both node and web\nimport { isNode } from \"@azure/core-http\";\n\n// exporting some value from a dependency\nexport { URLBuilder } from \"@azure/core-http\";\n\nexport function createEventEmitter(): EventEmitter {\n // use event emitter\n const e = new EventEmitter();\n\n // Dynamic Node and browser-specific code\n if (isNode) {\n console.log(\"Node \ud83d\udc4a\");\n } else {\n console.log(\"Browser \u2764\");\n }\n\n print(\"Created event emitter\");\n\n return e;\n}"
+"var data = [{\n name: \"creativity\",\n weight: 31\n}, {\n name: \"creative\",\n weight: 22\n}, {\n name: \"intelligence\",\n weight: 15\n}, {\n name: \"more\",\n weight: 12\n}, {\n name: \"people\",\n weight: 12\n}, {\n name: \"theory\",\n weight: 11\n}, {\n name: \"problem\",\n weight: 11\n}, {\n name: \"thinking\",\n weight: 11\n}, {\n name: \"been\",\n weight: 11\n}, {\n name: \"can\",\n weight: 11\n}, {\n name: \"process\",\n weight: 11\n}, {\n name: \"new\",\n weight: 10\n}, {\n name: \"individual\",\n weight: 10\n}, {\n name: \"model\",\n weight: 10\n}, {\n name: \"ideas\",\n weight: 9\n}, {\n name: \"levels\",\n weight: 9\n}, {\n name: \"processes\",\n weight: 9\n}, {\n name: \"different\",\n weight: 9\n}, {\n name: \"high\",\n weight: 9\n}, {\n name: \"motivation\",\n weight: 9\n}, {\n name: \"research\",\n weight: 9\n}, {\n name: \"work\",\n weight: 8\n}, {\n name: \"cognitive\",\n weight: 8\n}, {\n name: \"team\",\n weight: 8\n}, {\n name: \"divergent\",\n weight: 8\n}, {\n name: \"tests\",\n weight: 8\n}, {\n name: \"study\",\n weight: 8\n}, {\n name: \"measures\",\n weight: 8\n}, {\n name: \"theories\",\n weight: 8\n}, {\n name: \"found\",\n weight: 8\n}, {\n name: \"solving\",\n weight: 7\n}, {\n name: \"knowledge\",\n weight: 7\n}, {\n name: \"iq\",\n weight: 7"
+"require 'java'\nrequire 'purugin/predicate'\n\nclass cb::CraftWorld\n extend Purugin::Predicate\n \n ##\n # Get the block at the given coordinates\n # === Parameters\n # * _x_,_y_,_z_ - Give three coord. location\n # * _location_ - Provide a location object\n # === Examples\n # e.player.world.block_at(20, 20, 20) #=> Block instance\n # e.player.world.block_at(Location.new(20, 20, 20)) #=> ditto\n #\n def block_at(*r)\n getBlockAt *r\n end\n \n ##\n # Gets the chunk that this location is within.\n # === Parameters\n # * _location_ - location within the chunk you are asking for\n # === Examples\n # e.player.world.chunk_at(me) #=> Location coerced from your location\n # e.player.world.chunk_at(some_location) #=> Give an explicit location\n def chunk_at(location)\n location = location.respond_to?(:to_loc) ? location.to_loc : location\n \n getChunkAt(location)\n end\n \n ##\n # Is the provided chunk currently loaded by this world. This also can be called\n # with the\n # === Parameters\n # * _chunk_or_x_ * - is the chunk instance you are inquiring about or it is x coordinate\n # * z * - is optional for the [x,y] version of this method.\n # === Examples\n # e.player.world.chunk_loaded? some_chunk\n # e.player.world.chunk_loaded?(30, 13)\n def chunk_loaded?(chunk_or_x, z=nil)\n z ? isChunkLoaded(chunk_or_x, z) : isChunkLoaded(chunk_or_x)\n end\n\n ##\n # Is the chunk being actively used by players (also must be loaded).\n #"
+"import { isFunction } from 'lodash'\nimport { vuex as Auth } from '../auth'\n\n// start extraction data from vuex modules\nconst vuex = { Auth };\nconst keys = Object.keys(vuex);\n\n// process and extract data (modules and plugins)\n/**\n * this is a full functional approach\n * this code use reduce end immutability with spread operator to generate new object and array\n * refs\n * - https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/Reduce\n * - https://developer.mozilla.org/pt-BR/docs/Web/JavaScript/Reference/Operators/Spread_operator\n * - https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Functions/Arrow_functions\n *\n * Immutability is very important concept from functional programming, that's prevents side effects\n * Together with the syntax of arrow function make the code more concise\n *\n * plugins have additional treatment, with `.filter`, because not every module has plugins\n */\nconst modules = keys.reduce((acc, key) => ({ ...acc, [key]: vuex[key].module }), {})\nconst plugins = keys.reduce((acc, key) => [...acc, vuex[key].plugin], []).filter(isFunction)\n/**\n * semi-functional version\n * const modules = keys.reduce((acc, key) => {\n * acc[key] = vuex[key].module\n * return acc // without immutability\n * return { ...acc } // with immutability\n * }, {})\n *\n * const plugins = keys.reduce((acc, key) => {\n * acc.push(vuex[key].plugins)\n * return acc // without immutability\n * return [...acc] // with immutability\n * }).filter(plugin => isFunction(plugin))\n */\n// end"
+"% BTF ordering toolbox:\n%\n% Primary functions:\n%\n% btf - permute a square sparse matrix into upper block triangular form\n% maxtrans - permute the columns of a sparse matrix so it has a zero-free diagonal\n% strongcomp - symmetric permutation to upper block triangular form\n%\n% Other:\n% btf_install - compile and install BTF for use in MATLAB.\n% btf_demo - demo for BTF\n% drawbtf - plot the BTF form of a matrix\n% btf_make - compile BTF for use in MATLAB\n%\n% Example:\n% q = maxtrans (A)\n% [p,q,r] = btf (A)\n% [p,r] = strongcomp (A)\n\n% Copyright 2004-2007, University of Florida"
+"@title Support Resources\n@short Support\n@group intro\n\nResources for reporting bugs, requesting features, and getting support.\n\nOverview\n========\n\nThis document describes available support resources.\n\nThe upstream provides free support for a narrow range of problems (primarily,\nsecurity issues and reproducible bugs) and paid support for virtually anything.\n\nThe upstream does not provide free support for general problems with installing\nor configuring Phabricator. You may be able to get some help with these\nkinds of issues from the community.\n\n\nPaid Support\n============\n\nIf you'd like upstream support, see ((pacts)).\n\nThis is the only way to request features and the only way to get guaranteed\nanswers from experts quickly.\n\n\nReporting Security Vulnerabilities\n==================================\n\nThe upstream accepts, fixes, and awards bounties for reports of material\nsecurity issues with the software.\n\nTo report security issues, see @{article:Reporting Security Vulnerabilities}.\n\n\nReporting Bugs\n==============\n\nThe upstream will accept **reproducible** bug reports in modern, first-party\nproduction code running in reasonable environments. Before submitting a bug\nreport you **must update** to the latest version of Phabricator.\n\nTo report bugs, see @{article:Contributing Bug Reports}.\n\n\n\nContributing\n============\n\nPhabricator is a very difficult project to contribute to. New contributors\nwill face a high barrier to entry.\n\nIf you'd like to contribute"
+"{% load nav_tags %}\n\n{% if is_site_map %}\n
\n {% for child in item.children %}\n {% nav_item child %}\n {% endfor %}\n
\n {% endif %}\n
\n{% endif %}"
+"Intro\n-----\nThis example demonstrates how to simply set up a database for export in a\nread-only manner.\n\nExecute with -h or --help to see the available command-line options.\n\nSetup\n-----\n\nThe country table (country.sql) can be used for sample data in the database\nof your choice.\n\nSet up the connection in code (method SetUpConnection), \nor in a connection.ini file with the following format:\n\n[database]\nType=postgresql\nDatabaseName=YourDatabase\nHost=localhost\nusername=me\npassword=secret\nport=nnnn\n\nYou can use another name as connection.ini, but then you must specify the\nname with the -c or --config option.\n\nOnce started, the server will display connection info and available resources.\n\n\nThings to try\n-------------\nThe following list possible queries, using either wget or curl:\n(obviously, you can do the same in a browser)\n\nGet a list of available resources:\nwget -q -O - \"http://localhost:3000/metadata/\"\ncurl -o - \"http://localhost:3000/metadata/\"\n\nSame, but only the names, in compact format:\nwget -q -O - \"http://localhost:3000/metadata/?fl=name&fmt=csv&metadata=0\"\ncurl -o - \"http://localhost:3000/metadata/?fl=name&fmt=csv&metadata=0\"\n\n\nGet metadata for country table:\n\nwget -q -O - \"http://localhost:3000/metadata/country\"\ncurl -o - \"http://localhost:3000/metadata/country\"\n\nOnly get fieldnames:\nwget -q -O - \"http://localhost:3000/metadata/country?fl=name&fmt=csv\"\ncurl -o - \"http://localhost:3000/metadata/country?fl=name&fmt=csv\"\n\nGet a list of all countries:\n\nwget -q -O - http://localhost:3000/country\ncurl -o - http://localhost:3000/country\n\nGet a"
+"configs:\n buildkitd-config:\n buildkitd.toml: |\n debug=true\n [gc]\n enabled=false\n [worker.oci]\n enabled=true\n gc=false\n gckeepstorage=1073741824\n [grpc]\n address = [ \"tcp://0.0.0.0:8080\" ]\n # debugAddress is address for attaching go profiles and debuggers.\n debugAddress = \"0.0.0.0:6060\"\n\nservices:\n buildkitd:\n version: v0\n serviceMesh: false\n image: \"moby/buildkit:v0.6.1\"\n ports:\n - 8080/http,buildkit,expose=false\n privileged: true\n configs:\n - buildkitd-config:/etc/buildkit\n containers:\n - name: registry\n image: \"registry:2\"\n env:\n - REGISTRY_HTTP_ADDR=0.0.0.0:80\n ports:\n - 80:80/tcp,registry,expose=false\n volume:\n - rio-registry:/var/lib/registry,persistent=${PERSISTENT}\n webhook:\n version: v0\n global_permissions:\n - \"* gitwatcher.cattle.io/gitwatchers\"\n - \"* gitwatcher.cattle.io/gitcommits\"\n - '* configmaps'\n - '* events'\n - get,list pods\n - create,get,list /pods/log\n - secrets\n image: rancher/gitwatcher:v0.4.5\n args:\n - gitwatcher\n - --listen-address\n - :8090\n imagePullPolicy: always\n ports:\n - 8090/http,http-webhook\n\ntemplate:\n envSubst: true\n questions:\n - variable: PERSISTENT\n description: \"Use PV to store registry data\""
+"# frozen_string_literal: true\n\nclass Pry\n class Command\n class Ls < Pry::ClassCommand\n module MethodsHelper\n include Pry::Command::Ls::JRubyHacks\n\n private\n\n # Get all the methods that we'll want to output.\n def all_methods(instance_methods = false)\n methods = if instance_methods || @instance_methods_switch\n Pry::Method.all_from_class(@interrogatee)\n else\n Pry::Method.all_from_obj(@interrogatee)\n end\n\n if Pry::Helpers::Platform.jruby? && !@jruby_switch\n methods = trim_jruby_aliases(methods)\n end\n\n methods.select { |method| @ppp_switch || method.visibility == :public }\n end\n\n def resolution_order\n if @instance_methods_switch\n Pry::Method.instance_resolution_order(@interrogatee)\n else\n Pry::Method.resolution_order(@interrogatee)\n end\n end\n\n def format(methods)\n methods.sort_by(&:name).map do |method|\n if method.name == 'method_missing'\n color(:method_missing, 'method_missing')\n elsif method.visibility == :private\n color(:private_method, method.name)\n elsif method.visibility == :protected\n color(:protected_method, method.name)\n else\n color(:public_method, method.name)\n end\n end\n end\n end\n end\n end\nend"
+"\ufeffusing static PKHeX.Core.LegalityCheckStrings;\n\nnamespace PKHeX.Core\n{\n /// \n /// Verifies the data.\n /// \n public sealed class NHarmoniaVerifier : Verifier\n {\n protected override CheckIdentifier Identifier => CheckIdentifier.Trainer;\n\n public override void Verify(LegalityAnalysis data)\n {\n var pkm = data.pkm;\n var EncounterMatch = data.EncounterMatch;\n\n bool checksRequired = EncounterMatch is EncounterStatic5N;\n if (pkm is PK5 pk5)\n {\n bool has = pk5.NPok\u00e9mon;\n if (checksRequired && !has)\n data.AddLine(GetInvalid(LG5SparkleRequired, CheckIdentifier.Fateful));\n if (!checksRequired && has)\n data.AddLine(GetInvalid(LG5SparkleInvalid, CheckIdentifier.Fateful));\n }\n\n if (!checksRequired)\n return;\n\n if (pkm.IVTotal != 30*6)\n data.AddLine(GetInvalid(LG5IVAll30, CheckIdentifier.IVs));\n if (!VerifyNsPKMOTValid(pkm))\n data.AddLine(GetInvalid(LG5ID_N, CheckIdentifier.Trainer));\n if (pkm.IsShiny)\n data.AddLine(GetInvalid(LG5PIDShinyN, CheckIdentifier.Shiny));\n }\n\n private static bool VerifyNsPKMOTValid(PKM pkm)\n {\n if (pkm.TID != 00002 || pkm.SID != 00000)\n return false;\n var ot = pkm.OT_Name;\n if (ot.Length != 1)\n return false;\n var c = Legal.GetG5OT_NSparkle(pkm.Language);\n return c == ot;\n }\n }\n}"
+"open! Import\n\nlet failwithf = Printf.failwithf\n\nmodule Stable = struct\n module V1 = struct\n module T = struct\n type t =\n | Sun\n | Mon\n | Tue\n | Wed\n | Thu\n | Fri\n | Sat\n [@@deriving bin_io, compare, hash, quickcheck]\n\n let to_string t =\n match t with\n | Sun -> \"SUN\"\n | Mon -> \"MON\"\n | Tue -> \"TUE\"\n | Wed -> \"WED\"\n | Thu -> \"THU\"\n | Fri -> \"FRI\"\n | Sat -> \"SAT\"\n ;;\n\n let to_string_long t =\n match t with\n | Sun -> \"Sunday\"\n | Mon -> \"Monday\"\n | Tue -> \"Tuesday\"\n | Wed -> \"Wednesday\"\n | Thu -> \"Thursday\"\n | Fri -> \"Friday\"\n | Sat -> \"Saturday\"\n ;;\n\n let of_string_internal s =\n match String.uppercase s with\n | \"SUN\" | \"SUNDAY\" -> Sun\n | \"MON\" | \"MONDAY\" -> Mon\n | \"TUE\" | \"TUESDAY\" -> Tue\n | \"WED\" | \"WEDNESDAY\" -> Wed\n | \"THU\" | \"THURSDAY\" -> Thu\n | \"FRI\" | \"FRIDAY\" -> Fri\n | \"SAT\" | \"SATURDAY\" -> Sat\n | _ -> failwithf \"Day_of_week.of_string: %S\" s ()\n ;;\n\n let of_int_exn i =\n match i with\n | 0 -> Sun\n | 1 -> Mon\n | 2 -> Tue\n | 3 -> Wed\n | 4 -> Thu\n | 5"
+"[](https://codeclimate.com/github/codelitt/launchpage-rails)\n[](https://codeclimate.com/github/codelitt/launchpage-rails/coverage)\n[](https://semaphoreci.com/codelitt/launchpage-rails) \n\n\nThis is a quick application to get up and running quickly with your new\nstartup idea so you can focus on your actual product. It is a prelaunch\nMVP landing page aimed at gathering signups and testing market interest.\nIt was originally written as an open source alternative to LaunchRock.\nIt is written with Ruby on Rails. Originally, we needed an application\nthat provided signup for two types of users for a two-sided market. It's\nout of the box, ready to go. Just add styling. Fork and enjoy!\n\n*It may have a bit of our content, but it wouldn't take you too long to\nchange it to fit your need. Just a heads up.*\n\n### Example\n\nHere is an example of the launchpage once it's all styled/designed\n(although, both the project and design are old):\n[Backstagr](http://www.backsta.gr)\n\n### Features\n\n1. Email collection for two types of users\n\n2. Social sharing\n\n3. Auto mailer\n\n4. Ability to export user emails via CSV\n\n5. Post signup survey and questionaire to gather more market research\n from your beta users.\n\n**Coming soon**\n\n6. Waiting list social actions (i.e. move up the list if you share to 3\n friends or"
+"\n \u3000\u4e94 \u4e4b\u540e \u80a1\u5e02 \u706b\u7206 \u8fde\u7eed \u5929\u5927 \u4e00\u4e3e \u7a81\u7834 \u4e94\u5927 \u6295\u8d44\u8005 \u878d\u901a \u57fa\u91d1 \u7ba1\u7406 \u516c\u53f8 \u9996\u5e2d \u5206\u6790\u5e08 \u878d\u901a \u884c\u4e1a \u666f\u6c14 \u57fa\u91d1 \u7ecf\u7406 \u51af\u5b87\u8f89 \u8868\u793a \u706b\u7206 \u57fa\u672c\u9762 \u91cd\u4e8e \u6700\u91cd\u8981 \u9009\u80a1 \n \u6295\u8d44\u8005 \u660e\u767d \u575a\u6301 \n \u3000\u51af \u8868\u793a \u57fa\u91d1 \u7ecf\u7406 \u540c\u6837 \u82e6\u82e6 \u575a\u6301 \u7ecf\u9a8c \u53bb\u5e74 \u878d\u901a \u884c\u4e1a \u7814\u7a76\u5458 \u5b9e\u5730 \u8c03\u7814 \u63a8\u8350 \u5e7f\u8239 \u56fd\u9645 \u5f53\u65f6 \u53d7\u7d2f \u94a2\u6750 \u4ef7\u683c \u8fde\u5e74 \u4e0a\u6da8 \u56fd\u5185 \u9020\u8239\u4e1a \u4e00\u76f4 \u5904\u4e8e \u4e0b\u964d \u901a\u9053 \u5e7f\u8239 \u56fd\u9645 \u4e1a\u7ee9 \u60c5\u51b5 \u7406\u60f3 \u6ca1\u6709 \u4e00\u5bb6 \u57fa\u91d1 \u770b\u597d \u6295\u8d44 \u5e7f\u8239 \u56fd\u9645 \u9999\u6e2f \u5e02\u573a \u4e00\u76f4 \u4f4e\u8ff7 \n \u516c\u53f8 \u53ec\u5f00 \u4e13\u9898 \u8bba\u8bc1\u4f1a \u6fc0\u70c8 \u8ba8\u8bba \u516c\u53f8 \u5185\u90e8 \u5f62\u6210 \u5171\u8bc6 \u9020\u8239\u4e1a \u6b63\u5904\u4e8e \u884c\u4e1a \u666f\u6c14 \u5468\u671f \u62d0\u70b9 \u5e7f\u8239 \u56fd\u9645 \u8ba2\u5355 \u60c5\u51b5 \u672a\u6765 \u4e1a\u7ee9 \u5927\u5e45\u5ea6 \u63d0\u5347 \u878d\u901a \u884c\u4e1a \u666f\u6c14 \u57fa\u91d1 \u57fa\u91d1 \u901a\u4e7e \u51b3\u5b9a \u91cd\u4ed3 \u4ecb\u5165 \n \u5176\u540e \u76f8\u5f53 \u65f6\u95f4 \u878d\u901a \u57fa\u91d1 \u4e00\u5bb6 \u673a\u6784 \u770b\u597d \u5e7f\u8239 \u56fd\u9645 \u4ef7\u683c \u4f9d\u7136 \u4f4e\u8ff7 \u53bb\u5e74 \u60c5\u51b5 \u53d8\u5f97 \u4ef7\u683c \u6700\u4f4e \u4e0b\u8dcc \u4ef7\u683c 60% \u5de6\u53f3 \u5f53\u65f6 \u62e5\u6709 \u4e0a\u5e02\u516c\u53f8 \u80a1\u4ef7 \u666e\u904d \u63a5\u8f68 \u4f4e\u4e8e \u4e0d\u5728\u5c11\u6570 \u57fa\u91d1 \u7ecf\u7406 \u5b87\u8f89 \u5766\u627f \u611f\u5230 \u538b\u529b \u5de8\u5927 \u575a\u6301 \u4e0b\u6765 \u516c\u53f8 \u57fa\u672c\u9762 \u4e86\u89e3 \u6e05\u695a \u76f8\u4fe1 \u6700\u7ec8 \u5e02\u573a \u8ba4\u53ef \n \u3000\u51af \u5f88\u5feb \u7ed3\u675f \u96be\u71ac \u65e5\u5b50 \u9999\u6e2f \u5e02\u573a \u7ec8\u4e8e \u8ba4\u8bc6\u5230 \u5e7f\u8239 \u56fd\u9645 \u6295\u8d44 \u4ef7\u503c \u7387\u5148 \u5e26\u52a8 \u8fc5\u901f \u8d70\u9ad8 \u8fdb\u5165 \u5e7f\u8239 \u56fd\u9645 \u4ef7\u683c \u8d85\u8fc7 \u76ee\u524d \u5e7f\u8239 \u56fd\u9645 \u80a1\u6539 \u505c\u724c \u505c\u724c \u6536\u62a5 \u53bb\u5e74 \u4ef7\u683c \u76f8\u6bd4 \u51fa\u5934 \u878d\u901a \u65d7\u4e0b \u57fa\u91d1 \u5e7f\u8239 \u56fd\u9645 \u83b7\u5229 \u8d85\u8fc7 100% \n \u5e02\u573a \u5df2\u7ecf \u80fd\u591f"
+"control-character : Vez\u00e9rl\u0151karakterek\nbasic-latin : Latin (alap)\nlatin-1-supplement : Latin-1 kieg\u00e9sz\u00edt\u0151 karakterek\nlatin-extended-a : B\u0151v\u00edtett Latin (A)\nlatin-extended-b : B\u0151v\u00edtett Latin (B)\nipa-extensions : IPA kiterjeszt\u00e9sek\nspacing-modifier-letters : Fonetikus jelek\ncombining-diacritical-marks : Kombin\u00e1lt diakritikus jelek\ngreek-coptic : G\u00f6r\u00f6g \u00e9s Kopt\ncyrillic : Cirill\ncyrillic-supplement : Cirill kieg\u00e9sz\u00edt\u0151 karakterek\narmenian : \u00d6rm\u00e9ny\nhebrew : H\u00e9ber\narabic : Arab\nsyrian : Sz\u00edr\narabic-supplement : Arab kieg\u00e9sz\u00edt\u0151 karakterek\nthaana : Thaana\nnko : Nko\nsamaritan : Szamarit\u00e1n\nmandaic : Mandaic\narabic-extended-a : Kiterjesztett Arab (A)\ndevanagari : D\u00e9van\u00e1gari\nbengali : Beng\u00e1li\ngurmukhi : Gurmukhi\ngujarati : Gudzsarati\noriya : Orija\ntamil : Tamil\ntelugu : Telugu\nkannada : Kannada\nmalayalam : Malayalam\nsinhala : Sinhala\nthai : Thai\nlao : Lao\ntibetan : Tibeti\nmyanmar : Burmai\ngeorgian : Gr\u00faz\nhangul-jamo : Hangul Jamo\nethiopic : Eti\u00f3p\nethiopic-supplement : Eti\u00f3p kieg\u00e9sz\u00edt\u0151 karakterek\ncherokee : Cseroki\nunified-canadian-aboriginal-syllabics : Egyszer\u0171s\u00edtett kanadai bennsz\u00fcl\u00f6tt jelek\nogham : Ogham\nrunic : Runic\ntagalog : Tagalog\nhanunoo : Hanun\u00f3o\nbuhid : Buhid\ntagbanwa : Tagbanwa\nkhmer : Khmer\nmongolian : Mongol\nunified-canadian-aboriginal-syllabics-extended : Egyszer\u0171s\u00edtett kanadai bennsz\u00fcl\u00f6tt jelek kieg\u00e9sz\u00edt\u00e9se\nlimbu : Limbu\ntai-le : Tai Le\nnew-tai-lue : \u00daj Tai L\u00fc\nkhmer-symbols : Khmer szimb\u00f3lumok\nbuginese : Bugin\u00e9z\ntai-tham :"
+"//! Processor state stored in the EFLAGS register.\n\nuse bitflags::*;\n\nuse crate::Ring;\n\nbitflags! {\n /// The EFLAGS register.\n pub struct EFlags: u32 {\n /// ID Flag (ID)\n const FLAGS_ID = 1 << 21;\n /// Virtual Interrupt Pending (VIP)\n const FLAGS_VIP = 1 << 20;\n /// Virtual Interrupt Flag (VIF)\n const FLAGS_VIF = 1 << 19;\n /// Alignment Check (AC)\n const FLAGS_AC = 1 << 18;\n /// Virtual-8086 Mode (VM)\n const FLAGS_VM = 1 << 17;\n /// Resume Flag (RF)\n const FLAGS_RF = 1 << 16;\n /// Nested Task (NT)\n const FLAGS_NT = 1 << 14;\n /// I/O Privilege Level (IOPL) 0\n const FLAGS_IOPL0 = 0b00 << 12;\n /// I/O Privilege Level (IOPL) 1\n const FLAGS_IOPL1 = 0b01 << 12;\n /// I/O Privilege Level (IOPL) 2\n const FLAGS_IOPL2 = 0b10 << 12;\n /// I/O Privilege Level (IOPL) 3\n const FLAGS_IOPL3 = 0b11 << 12;\n /// Overflow Flag (OF)\n const FLAGS_OF = 1 << 11;\n /// Direction Flag (DF)\n const FLAGS_DF = 1 << 10;\n /// Interrupt Enable Flag (IF)\n const FLAGS_IF = 1 << 9;\n /// Trap Flag (TF)\n const FLAGS_TF = 1 << 8;\n /// Sign Flag (SF)\n const FLAGS_SF = 1 << 7;\n /// Zero Flag (ZF)\n const FLAGS_ZF"
+"\ufeffusing DotNet;\r\n\r\nusing Nemerle.Collections;\r\n\r\nusing Nitra.AstUtils;\r\nusing Nitra.Declarations;\r\nusing Nitra.Utils;\r\n\r\nusing System.Drawing;\r\n\r\nusing R = Nitra.Ast.RegexExpressions;\r\n\r\nnamespace Nitra.Ast.RegexExpressions\r\n{\r\n abstract ast Expression : BindableAst\r\n {\r\n }\r\n\r\n abstract ast Unary : R.Expression\r\n {\r\n Expression.Scope = Scope;\r\n Expression : R.Expression;\r\n }\r\n\r\n abstract ast Binary : R.Expression\r\n {\r\n Expression1.Scope = Scope;\r\n Expression2.Scope = Scope;\r\n\r\n Expression1 : R.Expression;\r\n Expression2 : R.Expression;\r\n }\r\n\r\n abstract ast List : R.Unary\r\n {\r\n Expressions.Scope = Scope;\r\n\r\n Expressions : R.Expression*;\r\n }\r\n\r\n ast Sequence : R.List { }\r\n ast Choice : R.List { }\r\n ast Subtract : R.Binary { }\r\n ast Optional : R.Unary { }\r\n ast Repeat : R.Unary { }\r\n ast RepeatWithSeparator : R.Unary\r\n {\r\n Separator.Scope = Scope;\r\n Separator : R.Expression;\r\n }\r\n\r\n ast Call : R.Expression\r\n {\r\n RuleReference.Scope = Scope;\r\n\r\n RuleReference : QualifiedReference;\r\n }\r\n\r\n ast Char : R.Expression { Literal : CharLiteral; }\r\n ast String : R.Expression { Literal : StringLiteral; }\r\n ast Range : R.Expression { }\r\n ast InvertedRange : R.Expression { }\r\n}"
+"# - Try to find fts headers and libraries for alpine linux 3.3\n#\n# Usage of this module as follows:\n#\n# find_package(Fts)\n#\n# Variables used by this module, they can change the default behaviour and need\n# to be set before calling find_package:\n#\n# fts_ROOT_DIR Set this variable to the root installation of\n# fts if the module has problems finding the\n# proper installation path.\n#\n# Variables defined by this module:\n#\n# FTS_FOUND System has fts libraries and headers\n# fts_LIBRARY The fts library\n# fts_INCLUDE_DIR The location of fts headers\n\n\n\nfind_path(fts_ROOT_DIR\n NAMES include/fts.h\n)\n\nfind_library(fts_LIBRARY\n NAMES libfts.a fts\n HINTS ${fts_ROOT_DIR}/lib\n)\n\nfind_path(fts_INCLUDE_DIR\n NAMES fts.h\n HINTS ${fts_ROOT_DIR}/include\n)\n\ninclude(FindPackageHandleStandardArgs)\nfind_package_handle_standard_args(fts DEFAULT_MSG\n fts_LIBRARY\n fts_INCLUDE_DIR\n)\n\nmark_as_advanced(\n fts_ROOT_DIR\n fts_LIBRARY\n fts_INCLUDE_DIR\n)"
+"\"\"\"\nCS131 - Computer Vision: Foundations and Applications\nAssignment 4\nAuthor: Donsuk Lee (donlee90@stanford.edu)\nDate created: 09/2017\nLast modified: 10/19/2018\nPython Version: 3.5+\n\"\"\"\n\nimport numpy as np\nfrom skimage import color\n\n\ndef energy_function(image):\n \"\"\"Computes energy of the input image.\n\n For each pixel, we will sum the absolute value of the gradient in each direction.\n Don't forget to convert to grayscale first.\n\n Hint: Use np.gradient here\n\n Args:\n image: numpy array of shape (H, W, 3)\n\n Returns:\n out: numpy array of shape (H, W)\n \"\"\"\n H, W, _ = image.shape\n out = np.zeros((H, W))\n gray_image = color.rgb2gray(image)\n\n ### YOUR CODE HERE\n pass\n ### END YOUR CODE\n\n return out\n\n\ndef compute_cost(image, energy, axis=1):\n \"\"\"Computes optimal cost map (vertical) and paths of the seams.\n\n Starting from the first row, compute the cost of each pixel as the sum of energy along the\n lowest energy path from the top.\n\n We also return the paths, which will contain at each pixel either -1, 0 or 1 depending on\n where to go up if we follow a seam at this pixel.\n\n In the case that energies are equal, choose the left-most path. Note that\n np.argmin returns the index of the first ocurring minimum of the specified"
+"---\ndescription: How to to tune the computation of forces and stresses\nauthors: FJ\n---\n\n\nThis page gives hints on how to to tune the computation of forces and stresses with the ABINIT package.\n\n## Introduction\n\nHellman-Feynman forces are computed from an analytical formula, and\ncorresponds exactly to the limit of finite differences of energy for\ninfinitesimally small atomic displacements when the ground-state calculation\nis at convergence. This feature is available for all the cases where the total\nenergy can be computed. A correction for non-converged cases allows to get\naccurate forces with less converged wavefunctions than without it. The\ndecomposition of the forces in their different components can be provided.\n\nStress can also be computed. This feature is available for all the cases where\nthe total energy can be computed (except wavelets). The decomposition of the\nstresses in their different components can be provided. A smearing scheme\napplied to the kinetic energy [[ecutsm]] allows one to get smooth energy\ncurves as a function of lattice parameters and angles. A target stress can be\ngiven by the user ([[strtarget]]), the geometry optimization algorithm will\ntry to find"
+"import * as vscode from 'vscode';\nimport { VimState } from '../state/vimState';\nimport { TokenType, Token } from './token';\n\ntype LineRefOperation = TokenType.Plus | TokenType.Minus;\n\n/**\n * Represents a range of lines, as expressed on the command line.\n *\n * http://vimdoc.sourceforge.net/htmldoc/cmdline.html#cmdline-ranges\n */\nexport class LineRange {\n left: Token[];\n separator: Token | undefined;\n right: Token[];\n\n constructor() {\n this.left = [];\n this.right = [];\n }\n\n public addToken(tok: Token): void {\n if (tok.type === TokenType.Comma) {\n this.separator = tok;\n return;\n }\n\n if (!this.separator) {\n if (this.left.length > 0) {\n switch (tok.type) {\n case TokenType.Offset:\n case TokenType.Plus:\n case TokenType.Minus:\n break;\n default:\n throw Error('Trailing characters');\n }\n }\n this.left.push(tok);\n } else {\n if (this.right.length > 0) {\n switch (tok.type) {\n case TokenType.Offset:\n case TokenType.Plus:\n case TokenType.Minus:\n break;\n default:\n throw Error('Trailing characters');\n }\n }\n this.right.push(tok);\n }\n }\n\n get isEmpty(): boolean {\n return this.left.length === 0 && this.right.length === 0 && !this.separator;\n }\n\n public toString(): string {\n return this.left.toString() + (this.separator?.content ?? '') + this.right.toString();\n }\n\n /**\n * Resolves the line range to concrete line numbers\n *\n * @param vimState\n * @returns Inclusive line number range [start, end]. Will always be in order.\n */\n public resolve(vimState: VimState): [number, number] {\n if (this.left.length > 0 && this.left[0].type === TokenType.Percent)"
+"# pylint: disable=missing-module-docstring\nimport numpy as np\nimport pandas as pd\n\n\nclass RiskMetrics:\n \"\"\"\n This class contains methods for calculating common risk metrics used in trading and asset management.\n \"\"\"\n\n def __init__(self):\n\n pass\n\n @staticmethod\n def calculate_variance(covariance, weights):\n \"\"\"\n Calculate the variance of a portfolio.\n\n :param covariance: (pd.DataFrame/np.matrix) Covariance matrix of assets\n :param weights: (list) List of asset weights\n :return: (float) Variance of a portfolio\n \"\"\"\n\n\n pass\n\n @staticmethod\n def calculate_value_at_risk(returns, confidence_level=0.05):\n \"\"\"\n Calculate the value at risk (VaR) of a portfolio/asset.\n\n :param returns: (pd.DataFrame/np.array) Historical returns for an asset / portfolio\n :param confidence_level: (float) Confidence level (alpha)\n :return: (float) VaR\n \"\"\"\n\n pass\n\n def calculate_expected_shortfall(self, returns, confidence_level=0.05):\n \"\"\"\n Calculate the expected shortfall (CVaR) of a portfolio/asset.\n\n :param returns: (pd.DataFrame/np.array) Historical returns for an asset / portfolio\n :param confidence_level: (float) Confidence level (alpha)\n :return: (float) Expected shortfall\n \"\"\"\n\n pass\n\n @staticmethod\n def calculate_conditional_drawdown_risk(returns, confidence_level=0.05):\n \"\"\"\n Calculate the conditional drawdown of risk (CDaR) of a portfolio/asset.\n\n :param returns: (pd.DataFrame/np.array) Historical returns for an asset / portfolio\n :param confidence_level: (float) Confidence level (alpha)\n :return: (float) Conditional drawdown risk\n \"\"\"\n\n pass"
+"import ENTRY from \"../constant/entry.js\"\n\nimport Loader from \"../loader.js\"\nimport Package from \"../package.js\"\n\nimport { dirname } from \"../safe/path.js\"\nimport errors from \"../errors.js\"\nimport dualResolveFilename from \"../module/internal/dual-resolve-filename.js\"\nimport esmParseLoad from \"../module/esm/parse-load.js\"\nimport makeRequireFunction from \"../module/internal/make-require-function.js\"\nimport shared from \"../shared.js\"\nimport validateString from \"../util/validate-string.js\"\n\nconst {\n TYPE_CJS\n} = ENTRY\n\nconst {\n ERR_INVALID_ARG_VALUE\n} = errors\n\nfunction hook(parent) {\n function requirer(request) {\n validateString(request, \"request\")\n\n if (request === \"\") {\n throw new ERR_INVALID_ARG_VALUE(\"request\", request, \"must be a non-empty string\")\n }\n\n const filename = dualResolveFilename(request, parent)\n const defaultPkg = Loader.state.package.default\n const dirPath = dirname(filename)\n\n if (Package.get(dirPath) === defaultPkg) {\n // Clone the default package to avoid the parsing phase fallback path\n // of module/internal/compile.\n Package.set(dirPath, defaultPkg.clone())\n }\n\n const entry = esmParseLoad(request, parent)\n const exported = entry.module.exports\n\n if (entry.type !== TYPE_CJS) {\n shared.bridged.set(exported, entry)\n }\n\n return exported\n }\n\n function resolver(request, options) {\n return dualResolveFilename(request, parent, false, options)\n }\n\n const req = makeRequireFunction(parent, requirer, resolver)\n\n req.main = Loader.state.module.mainModule\n\n return req\n}\n\nexport default hook"
+"#!/usr/bin/env ruby\n# vim: set nosta noet ts=4 sw=4:\n#\n# Script to automatically move partitioned tables and their indexes\n# to a separate area on disk.\n#\n# Mahlon E. Smith \n#\n# Example use case:\n#\n# - You've got a heavy insert table, such as syslog data.\n# - This table has a partitioning trigger (or is manually partitioned)\n# by date, to separate incoming stuff from archival/report stuff.\n# - You have a tablespace on cheap or slower disk (maybe even\n# ZFS compressed, or some such!)\n#\n# The only assumption this script makes is that your tables are dated, and\n# the tablespace they're moving into already exists.\n#\n# A full example, using the syslog idea from above, where each child\n# table is date partitioned by a convention of \"syslog_YEAR-WEEKOFYEAR\":\n#\n# syslog # <--- parent\n# syslog_2012_06 # <--- inherited\n# syslog_2012_07 # <--- inherited\n# syslog_2012_08 # <--- inherited\n# ...\n#\n# You'd run this script like so:\n#\n# ./warehouse_partitions.rb -F syslog_%Y_%U\n#\n# Assuming this was week 12 of the year, tables syslog_2012_06 through\n# syslog_2012_11 would start sequentially migrating into the tablespace\n# called 'warehouse'."
+"defmodule Cachex.Actions.Expire do\n @moduledoc false\n # Command module to allow setting entry expiration.\n #\n # This module is a little more involved than it would be as it's used as a\n # binding for other actions (such as removing expirations). As such, we have\n # to handle several edge cases with nil values.\n alias Cachex.Actions\n alias Cachex.Services.Locksmith\n\n # add required imports\n import Cachex.Spec\n\n ##############\n # Public API #\n ##############\n\n @doc \"\"\"\n Sets the expiration time on a given cache entry.\n\n If a negative expiration time is provided, the entry is immediately removed\n from the cache (as it means we have already expired). If a positive expiration\n time is provided, we update the touch time on the entry and update the expiration\n to the one provided.\n\n If the expiration provided is nil, we need to remove the expiration; so we update\n in the exact same way. This is done passively due to the fact that Erlang term order\n determines that `nil > -1 == true`.\n\n This command executes inside a lock aware context to ensure that the key isn't currently\n being used/modified/removed from another process in the application.\n \"\"\"\n def execute(cache() = cache, key, expiration, _options) do\n Locksmith.write(cache, [ key ],"
+"-- | Halogen does not support writing an HTML string to the DOM. This component allows us to do this\n-- | at a particular controlled HTML node.\nmodule Conduit.Component.RawHTML where\n\nimport Prelude\n\nimport Conduit.Foreign.Marked (RawHTML, marked)\nimport Data.Foldable (for_)\nimport Data.Maybe (Maybe(..))\nimport Effect (Effect)\nimport Effect.Aff.Class (class MonadAff)\nimport Halogen as H\nimport Halogen.HTML as HH\nimport Halogen.HTML.Properties as HP\nimport Web.HTML (HTMLElement)\n\n-- | For an explanation of how to properly use the PureScript FFI with JavaScript, please see the\n-- | `src/Foreign/Marked.js` file and the `Conduit.Foreign.Marked` module.\nforeign import unsafeSetInnerHTML :: HTMLElement -> RawHTML -> Effect Unit\n\ntype State =\n { elemRef :: H.RefLabel\n , markdown :: String\n }\n\ntype Input =\n { markdown :: String }\n\ndata Action\n = SetInnerHTML\n | Receive Input\n\ncomponent :: forall q o m. MonadAff m => H.Component HH.HTML q Input o m\ncomponent = H.mkComponent\n { initialState: \\{ markdown } -> { elemRef: H.RefLabel \"markdown\", markdown }\n , render\n , eval: H.mkEval $ H.defaultEval\n { handleAction = handleAction\n , receive = Just <<< Receive\n , initialize = Just SetInnerHTML\n }\n }\n where\n handleAction :: Action -> H.HalogenM State Action () o m Unit\n handleAction = case _ of\n SetInnerHTML"
+"/**\n * WordPress dependencies\n */\nimport { getPathAndQueryString } from '@wordpress/url';\nimport { useSelect } from '@wordpress/data';\nimport {\n\tTooltip,\n\tDropdownMenu,\n\tMenuGroup,\n\tMenuItemsChoice,\n} from '@wordpress/components';\nimport { Icon, home } from '@wordpress/icons';\nimport { __ } from '@wordpress/i18n';\nimport { __experimentalLinkControl as LinkControl } from '@wordpress/block-editor';\n\nexport default function PageSwitcher( {\n\tshowOnFront,\n\tactivePage,\n\tonActivePageChange,\n} ) {\n\tconst { pages = [], categories = [], posts = [] } = useSelect(\n\t\t( select ) => {\n\t\t\tconst { getEntityRecords } = select( 'core' );\n\t\t\tconst pageGroups = {\n\t\t\t\tpages: getEntityRecords( 'postType', 'page' )?.map(\n\t\t\t\t\t( _page ) => {\n\t\t\t\t\t\tconst path = getPathAndQueryString( _page.link );\n\t\t\t\t\t\treturn {\n\t\t\t\t\t\t\tlabel:\n\t\t\t\t\t\t\t\tpath === '/' ? (\n\t\t\t\t\t\t\t\t\t<>\n\t\t\t\t\t\t\t\t\t\t{ _page.title.rendered }\n\t\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t\t\t
\n\t\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t>\n\t\t\t\t\t\t\t\t) : (\n\t\t\t\t\t\t\t\t\t_page.title.rendered\n\t\t\t\t\t\t\t\t),\n\t\t\t\t\t\t\ttype: 'page',\n\t\t\t\t\t\t\tslug: _page.slug,\n\t\t\t\t\t\t\tvalue: path,\n\t\t\t\t\t\t\tcontext: {\n\t\t\t\t\t\t\t\tpostType: 'page',\n\t\t\t\t\t\t\t\tpostId: _page.id,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t};\n\t\t\t\t\t}\n\t\t\t\t),\n\t\t\t\tcategories: getEntityRecords( 'taxonomy', 'category' )?.map(\n\t\t\t\t\t( category ) => {\n\t\t\t\t\t\tconst path = getPathAndQueryString( category.link );\n\t\t\t\t\t\treturn {\n\t\t\t\t\t\t\tlabel: category.name,\n\t\t\t\t\t\t\ttype: 'category',\n\t\t\t\t\t\t\tslug: category.slug,\n\t\t\t\t\t\t\tvalue: path,\n\t\t\t\t\t\t\tcontext: {\n\t\t\t\t\t\t\t\tquery: { categoryIds: [ category.id ] },\n\t\t\t\t\t\t\t\tqueryContext: { page: 1 },\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t};\n\t\t\t\t\t}\n\t\t\t\t),\n\t\t\t\tposts: [],\n\t\t\t};"
+"import collections\nimport contextlib\nimport shutil\nimport sys\nimport tempfile\n\nimport numpy\nimport six\n\nimport chainer\n# import classes and functions\nfrom chainer.utils.array import size_of_shape # NOQA\nfrom chainer.utils.array import sum_to # NOQA\nfrom chainer.utils.conv import get_conv_outsize # NOQA\nfrom chainer.utils.conv import get_deconv_outsize # NOQA\nfrom chainer.utils.error import _format_array_props # NOQA\nfrom chainer.utils.experimental import experimental # NOQA\nfrom chainer.utils.meta import enable_final # NOQA\nfrom chainer.utils.meta import final # NOQA\nfrom chainer.utils.nondeterministic import nondeterministic # NOQA\nfrom chainer.utils.sparse import CooMatrix # NOQA\nfrom chainer.utils.sparse import get_order # NOQA\nfrom chainer.utils.sparse import to_coo # NOQA\n\n# The following alias has been moved to chainer/__init__.py in order to break\n# circular imports in Python 2.\n# from chainer.utils.walker_alias import WalkerAlias\n\n\n# TODO(kmaehashi) remove this when `six.moves.collections_abc` is implemented.\n# See: https://github.com/chainer/chainer/issues/5097\ntry:\n collections_abc = collections.abc # type: ignore\nexcept AttributeError: # python <3.3\n collections_abc = collections # type: ignore\n\n\ndef force_array(x, dtype=None):\n # numpy returns a float value (scalar) when a return value of an operator\n # is a 0-dimension array.\n # We need to convert such a value to a 0-dimension array because `Function`\n # object needs to return an `numpy.ndarray`.\n if numpy.isscalar(x):\n if dtype is None:\n return numpy.array(x)\n else:\n return numpy.array(x,"
+"#ifndef INC_NMEA_PARSE_H\n#define INC_NMEA_PARSE_H\n\n#define _XOPEN_SOURCE /* glibc2 needs this */\n#include \n#include \n#include \n#include \"../nmea/nmea.h\"\n\n#define NMEA_TIME_FORMAT\t\"%H%M%S\"\n#define NMEA_TIME_FORMAT_LEN\t6\n\n#define NMEA_DATE_FORMAT\t\"%d%m%y\"\n#define NMEA_DATE_FORMAT_LEN\t6\n\n#ifdef __cplusplus\nextern \"C\" {\n#endif\n\n/**\n * Parse GPS position longitude or latitude\n *\n * s string containing the position. Ex: \"4712.55\", 47 degrees and\n * 12.55 minutes. Will be modified.\n * pos is a pointer to a nmea_position struct where the result should be stored.\n *\n * Returns 0 on success, otherwise -1.\n */\nint nmea_position_parse(char *s, nmea_position *pos);\n\n/**\n * Parse cardinal direction\n *\n * s is a string containing the letter representing the cardinal direction.\n *\n * Returns the cardinal direction (nmea_cardinal_t). On failure,\n * NMEA_CARDINAL_DIR_UNKNOWN is returned.\n */\nnmea_cardinal_t nmea_cardinal_direction_parse(char *s);\n\n/**\n * Parse time from a string\n *\n * s is a string containing the time in format \"HHMMSS\".\n * time is a pointer to a tm struct where the parser time will be stored.\n *\n * Returns 0 on success, otherwise -1.\n */\nint nmea_time_parse(char *s, struct tm *time);\n\n/**\n * Parse date from a string\n *\n * s is a string containing the time in format \"DDMMYY\".\n * time is a"
+"// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n#ifdef __ANDROID__\n#include \n\n#include \"core/common/logging/capture.h\"\n#include \"core/common/logging/isink.h\"\n#include \"core/platform/android/logging/android_log_sink.h\"\n\nnamespace onnxruntime {\nnamespace logging {\n\nvoid AndroidLogSink::SendImpl(const Timestamp& /* timestamp */, const std::string& logger_id, const Capture& message) {\n std::ostringstream msg;\n\n int severity = ANDROID_LOG_INFO;\n switch (message.Severity()) {\n case Severity::kVERBOSE:\n severity = ANDROID_LOG_VERBOSE;\n break;\n case Severity::kINFO:\n severity = ANDROID_LOG_INFO;\n break;\n case Severity::kWARNING:\n severity = ANDROID_LOG_WARN;\n break;\n case Severity::kERROR:\n severity = ANDROID_LOG_ERROR;\n break;\n case Severity::kFATAL:\n severity = ANDROID_LOG_FATAL;\n break;\n }\n\n msg << \" [\" << message.SeverityPrefix() << \":\" << message.Category() << \":\" << logger_id << \", \"\n << message.Location().ToString() << \"] \" << message.Message() << std::endl;\n\n __android_log_print(severity, message.Category(), \"%s\", msg.str().c_str());\n}\n\n} // namespace logging\n} // namespace onnxruntime\n#endif"
+"\ufeffusing System;\nusing System.Collections.Concurrent;\n\nnamespace Framework\n{\n public class ServiceLocator\n {\n private ServiceLocator()\n {\n\n }\n\n public static ServiceLocator Instance => new ServiceLocator();\n\n private static ConcurrentDictionary s_types = new ConcurrentDictionary();\n public bool Register(Type serviceType, object instance) =>\n s_types.TryAdd(serviceType, instance);\n\n public bool Register(Type serviceType) =>\n s_types.TryAdd(serviceType, null);\n\n public bool Register(Type serviceType, Func factory) =>\n s_types.TryAdd(serviceType, factory);\n\n public T Resolve()\n where T : class, new()\n {\n object val;\n if (s_types.TryGetValue(typeof(T), out val))\n {\n if (val == null)\n {\n Type t = typeof(T).GetGenericTypeDefinition();\n }\n else if (val is Func)\n {\n return ((Func)val)(); // invoke factory\n }\n else\n {\n return val as T;\n }\n }\n return null;\n }\n\n }\n}"
+"@validation\nFeature: parameter validation\n Scenario: identifier is required with failover\n When I execute \"cli53 rrcreate --failover PRIMARY $domain 'a A 127.0.0.1'\"\n Then the exit code was 1\n\n Scenario: identifier is required with weight\n When I execute \"cli53 rrcreate --weight 10 $domain 'a A 127.0.0.1'\"\n Then the exit code was 1\n\n Scenario: identifier is required with region\n When I execute \"cli53 rrcreate --region us-west-1 $domain 'a A 127.0.0.1'\"\n Then the exit code was 1\n\n Scenario: identifier alone is invalid\n When I execute \"cli53 rrcreate -i id $domain 'a A 127.0.0.1'\"\n Then the exit code was 1\n\n Scenario: failover must be PRIMARY/SECONDARY\n When I execute \"cli53 rrcreate -i id --failover JUNK $domain 'a A 127.0.0.1'\"\n Then the exit code was 1\n\n Scenario: failover and weight are mutually exclusive\n When I execute \"cli53 rrcreate -i id --failover PRIMARY --weight 10 $domain 'a A 127.0.0.1'\"\n Then the exit code was 1\n\n Scenario: passing --append and --replace at the same time makes no sense\n When I execute \"cli53 rrcreate --append --replace $domain 'a A 127.0.0.2'\"\n Then the exit code was 1\n\n Scenario: create requires one argument\n When I execute \"cli53 create a b\"\n Then the exit code was 1\n\n Scenario: delete requires one argument\n When"
+"package service\n\nimport (\n\t\"fmt\"\n\t\"testing\"\n\n\t\"go-common/app/service/main/vip/model\"\n\n\t. \"github.com/smartystreets/goconvey/convey\"\n)\n\nfunc TestServiceaddOpenBind(t *testing.T) {\n\tConvey(\" TestServiceaddOpenBind \", t, func() {\n\t\terr := s.addOpenBind(c, 1, \"xxxx\", 2, &model.OpenBindInfo{}, &model.OpenBindInfo{})\n\t\tSo(err, ShouldBeNil)\n\t})\n}\n\nfunc TestServiceOpenBindByOutOpenID(t *testing.T) {\n\tConvey(\" TestServiceOpenBindByOutOpenID \", t, func() {\n\t\terr := s.OpenBindByOutOpenID(c, &model.ArgBind{\n\t\t\tAppID: 32,\n\t\t\tOpenID: \"bdca8b71e7a6726885d40a395bf9ccd1\",\n\t\t\tOutOpenID: \"7a6726885d40a395bf9ccd2\",\n\t\t})\n\t\tSo(err, ShouldBeNil)\n\t})\n}\n\nfunc TestServiceOpenBindByMid(t *testing.T) {\n\tConvey(\" TestServiceOpenBindByMid \", t, func() {\n\t\terr := s.OpenBindByMid(c, &model.ArgOpenBindByMid{\n\t\t\tAppID: 32,\n\t\t\tOutOpenID: \"7a6726885d40a395bf9ccd3\",\n\t\t\tMid: 1,\n\t\t})\n\t\tSo(err, ShouldBeNil)\n\t})\n}\n\nfunc TestServiceBindInfoByMid(t *testing.T) {\n\tConvey(\" TestServiceBindInfoByMid \", t, func() {\n\t\tres, err := s.BindInfoByMid(c, &model.ArgBindInfo{\n\t\t\tAppID: 30,\n\t\t\tMid: 1,\n\t\t})\n\t\tSo(err, ShouldBeNil)\n\t\tfmt.Println(\"res\", res.Account, res.Outer)\n\t\tSo(res, ShouldNotBeNil)\n\t})\n}"
+"- name: IRPP - D\u00e9ficits des revenus de capitaux mobiliers - C\u00e9libataire pour un revenu salarial de 20 000 \u20ac\n keywords: rcm\n period: 2009\n absolute_error_margin: 0.5\n input:\n salaire_imposable: 20000\n f2dc: 5000\n f2aa: 1000\n f2al: 1000\n f2am: 1000\n f2an: 1000\n f2aq: 1000\n f2ar: 1000\n output:\n irpp: -1086\n- name: IRPP - D\u00e9ficits des revenus de capitaux mobiliers - C\u00e9libataire pour un revenu salarial de 20 000 \u20ac\n keywords: rcm\n period: 2010\n absolute_error_margin: 0.5\n input:\n salaire_imposable: 20000\n f2dc: 5000\n f2aa: 1000\n f2al: 1000\n f2am: 1000\n f2an: 1000\n f2aq: 1000\n f2ar: 1000\n output:\n irpp: -1181\n- name: IRPP - D\u00e9ficits des revenus de capitaux mobiliers - C\u00e9libataire pour un revenu salarial de 20 000 \u20ac\n keywords: rcm\n period: 2011\n absolute_error_margin: 0.5\n input:\n salaire_imposable: 20000\n f2dc: 5000\n f2aa: 1000\n f2al: 1000\n f2am: 1000\n f2an: 1000\n f2aq: 1000\n f2ar: 1000\n output:\n irpp: -1181\n- name: IRPP - D\u00e9ficits des revenus de capitaux mobiliers - C\u00e9libataire pour un revenu salarial de 20 000 \u20ac\n keywords: rcm\n period: 2012\n absolute_error_margin: 0.5\n input:\n salaire_imposable: 20000\n f2dc: 5000\n f2aa: 1000\n f2al: 1000\n f2am: 1000\n f2an: 1000\n f2aq: 1000\n f2ar: 1000\n output:\n irpp: -1181\n- name: IRPP - D\u00e9ficits des revenus de capitaux mobiliers - C\u00e9libataire pour un revenu salarial"
+"using System;\nusing sd = System.Drawing;\nusing swf = System.Windows.Forms;\nusing Eto.Drawing;\nusing Eto.Forms;\nusing Eto.WinForms.Drawing;\nusing System.Collections.Generic;\nusing System.Linq;\nusing Eto.WinForms.Forms.Menu;\nusing System.Reflection;\nusing System.Diagnostics;\n\nnamespace Eto.WinForms.Forms\n{\n\tpublic interface IWindowsControl: Control.IHandler\n\t{\n\t\tbool InternalVisible { get; }\n\n\t\tswf.DockStyle DockStyle { get; }\n\n\t\tswf.Control ContainerControl { get; }\n\n\t\tSize ParentMinimumSize { get; set; }\n\n\t\tSize GetPreferredSize(Size availableSize, bool useCache = false);\n\n\t\tbool SetMinimumSize(bool updateParent = false, bool useCache = false);\n\n\t\tvoid SetScale(bool xscale, bool yscale);\n\n\t\tbool ShouldCaptureMouse { get; }\n\n\t\tbool XScale { get; }\n\n\t\tbool YScale { get; }\n\n\t\tbool BackgroundColorSet { get; }\n\n\t\tControl.ICallback Callback { get; }\n\n\t\tvoid BeforeAddControl(bool top = true);\n\n\t\tbool ShouldBubbleEvent(swf.Message msg);\n\n\t\tbool UseShellDropManager { get; set; }\n\t}\n\n\tpublic static class WindowsControlExtensions\n\t{\n\t\tpublic static IWindowsControl GetWindowsHandler(this Control control)\n\t\t{\n\t\t\tif (control == null)\n\t\t\t\treturn null;\n\n\t\t\tvar handler = control.Handler as IWindowsControl;\n\t\t\tif (handler != null)\n\t\t\t\treturn handler;\n\n\t\t\tvar controlObject = control.ControlObject as Control;\n\t\t\treturn controlObject != null ? controlObject.GetWindowsHandler() : null;\n\n\t\t}\n\n\t\tpublic static Size GetPreferredSize(this Control control, Size? availableSize = null)\n\t\t{\n\t\t\tvar handler = control.GetWindowsHandler();\n\t\t\treturn handler != null ? handler.GetPreferredSize(availableSize ?? Size.Empty) : Size.Empty;\n\t\t}\n\n\t\tpublic static swf.Control GetContainerControl(this Control control)\n\t\t{\n\t\t\tif (control == null)\n\t\t\t\treturn null;"
+"module Lowkiq\n class Server\n def self.build(options)\n require options[:require]\n Lowkiq.on_server_init.call\n\n splitter = Lowkiq.build_splitter.call\n shard_handlers_by_thread = splitter.call Lowkiq.shard_handlers\n scheduler = Lowkiq.build_scheduler.call\n new shard_handlers_by_thread, scheduler\n end\n\n def initialize(shard_handlers_by_thread, scheduler)\n @shard_handlers_by_thread = shard_handlers_by_thread\n @scheduler = scheduler\n @threads = []\n end\n\n def start\n Lowkiq.server_redis_pool.with do |redis|\n Script.load! redis\n end\n\n @shard_handlers_by_thread.each do |handlers|\n handlers.each(&:restore)\n end\n\n @threads = @shard_handlers_by_thread.map do |handlers|\n job = @scheduler.build_job handlers\n Thread.new do\n job.call until exit_from_thread?\n end\n end\n end\n\n def stop\n @stopped = true\n end\n\n def join\n @threads.each(&:join)\n end\n\n def exit_from_thread?\n stopped? || failed?\n end\n\n def stopped?\n @stopped\n end\n\n def failed?\n @threads.map(&:status).any? do |status|\n status != \"run\" && status != \"sleep\"\n end\n end\n end\nend"
+"const Edge = require('../lib/edge.js')\nconst t = require('tap')\n\n// slight hack to snapshot the getters\n// would be nice if tcompare.format showed these by default,\n// but it's tricky to know when to show non-iterables and\n// when not to. Really, it'd be best if class getters were\n// iterable by default, or had some syntax to allow it, but\n// that's outside my sphere of direct influence, and using\n// Object.defineProperty(this, 'foo', { get ... }) is a pita.\nt.formatSnapshot = obj =>\n obj instanceof Edge ? {\n ...obj,\n spec: obj.spec,\n name: obj.name,\n type: obj.type,\n valid: obj.valid,\n error: obj.error,\n from: obj.from,\n to: obj.to,\n peer: obj.peer,\n dev: obj.dev,\n optional: obj.optional,\n workspace: obj.workspace,\n missing: obj.missing,\n peerLocal: obj.peerLocal,\n invalid: obj.invalid,\n __proto__: { constructor: Edge },\n } : obj\n\nconst reset = node => {\n node.edgesOut = new Map()\n node.edgesIn = new Set()\n}\n\n// mock nodes\nconst top = {\n edgesOut: new Map(),\n edgesIn: new Set(),\n package: { name: 'top', version: '1.2.3' },\n isTop: true,\n parent: null,\n resolve (n) {\n return n === 'a' ? a : n === 'b' ? b : null\n },\n addEdgeOut (edge) {\n this.edgesOut.set(edge.name, edge)\n },\n addEdgeIn (edge) {\n this.edgesIn.add(edge)\n },\n}\n\nconst a = {\n edgesOut:"
+"--- GNUmakefile.orig\t2019-01-02 03:35:46 UTC\n+++ GNUmakefile\n@@ -3,7 +3,7 @@\n \n CPPFLAGS = -std=c++14 -O3 -DNDEBUG -ffast-math -fno-builtin-malloc -Wall -Wextra -Wshadow -Wconversion -Wuninitialized\n #CPPFLAGS = -std=c++14 -g -O0 -ffast-math -fno-builtin-malloc -Wall -Wextra -Wshadow -Wconversion -Wuninitialized\n-CXX = clang++\n+#CXX = clang++\n \n # Prefix for installations (Unix / Mac)\n \n@@ -80,7 +80,7 @@ WIN_INCLUDES = /I. /Iinclude /Iinclude/util /Iinclude/\n # Compile commands for individual targets.\n #\n \n-FREEBSD_COMPILE = $(CXX) -g $(CPPFLAGS) -DNDEBUG -fPIC $(INCLUDES) -D_REENTRANT=1 -shared $(SUNW_SRC) -Bsymbolic -o libhoard.so -lpthread\n+FREEBSD_COMPILE = $(CXX) $(CXXFLAGS) -DNDEBUG -fPIC $(INCLUDES) -D_REENTRANT=1 -shared $(SUNW_SRC) -Bsymbolic -o libhoard.so -pthread\n \n DEBIAN_COMPILE = $(CXX) -g -O3 -fPIC -DNDEBUG -I. -Iinclude -Iinclude/util -Iinclude/hoard -Iinclude/superblocks -IHeap-Layers -D_REENTRANT=1 -shared source/libhoard.cpp source/unixtls.cpp Heap-Layers/wrappers/wrapper.cpp -Bsymbolic -o libhoard.so -lpthread -lstdc++ -ldl"
+".\\\" Copyright (c) 1993 Martin Birgmeier\n.\\\" All rights reserved.\n.\\\"\n.\\\" You may redistribute unmodified or modified versions of this source\n.\\\" code provided that the above copyright notice and this and the\n.\\\" following conditions are retained.\n.\\\"\n.\\\" This software is provided ``as is'', and comes with no warranties\n.\\\" of any kind. I shall in no event be liable for anything that happens\n.\\\" to anyone/anything when using this software.\n.\\\"\n.\\\" @(#)rand48.3 V1.0 MB 8 Oct 1993\n.\\\" $FreeBSD: src/lib/libc/gen/rand48.3,v 1.17 2005/01/20 09:17:02 ru Exp $\n.\\\"\n.Dd October 8, 1993\n.Dt RAND48 3\n.Os\n.Sh NAME\n.Nm drand48 ,\n.Nm erand48 ,\n.Nm jrand48 ,\n.Nm lcong48 ,\n.Nm lrand48 ,\n.Nm mrand48 ,\n.Nm nrand48 ,\n.Nm seed48 ,\n.Nm srand48\n.Nd pseudo random number generators and initialization routines\n.Sh LIBRARY\n.Lb libc\n.Sh SYNOPSIS\n.In stdlib.h\n.Ft double\n.Fo drand48\n.Fa void\n.Fc\n.Ft double\n.Fo erand48\n.Fa \"unsigned short xsubi[3]\"\n.Fc\n.Ft long\n.Fo jrand48\n.Fa \"unsigned short xsubi[3]\"\n.Fc\n.Ft void\n.Fo lcong48\n.Fa \"unsigned short param[7]\"\n.Fc\n.Ft long\n.Fo lrand48\n.Fa void\n.Fc\n.Ft long\n.Fo mrand48\n.Fa void\n.Fc\n.Ft long\n.Fo nrand48\n.Fa \"unsigned short xsubi[3]\""
+"\n\n\n\n\n \n \n\n\n\n \n \n \n\n\n\n"
+"#!/bin/bash -e\n\n# Copyright 2016 tsuru authors. All rights reserved.\n# Use of this source code is governed by a BSD-style\n# license that can be found in the LICENSE file.\n\nstatus=0\nfor f in `git ls-files | xargs grep -L \"Copyright\" | grep \".go\" | grep -v vendor/`\ndo\n echo $f\n status=1\ndone\n\nif [ $status != 0 ]\nthen\n exit $status\nfi\n\ntofix=\naddallyears=\nwhile [ \"${1-}\" != \"\" ]; do\n case $1 in\n \"-f\" | \"--fix\")\n tofix=true\n ;;\n \"--all\")\n addallyears=true\n ;;\n esac\n shift\ndone\n\noldIFS=$IFS\nIFS=$(echo -en \"\\n\\b\")\n\nfunction join_space { \n IFS=\" \"\n echo \"$*\"\n}\n\nfor f in $(git ls-files | grep -v vendor/ | grep -v check-license.sh | xargs -I{} bash -c '(egrep -Ho \"Copyright [0-9 ]+\" {})')\ndo\n IFS=\":\" read file copyright <<< \"$f\"\n IFS=\" \" read copy year <<< \"$copyright\"\n if [ -z $addallyears ]; then\n expectedYears=`git log --diff-filter=A --follow --format=%ad --date=format:%Y -1 -- $file`\n else\n expectedYears=$(join_space $(git log --follow --format=%ad --date=format:%Y -- $file | sort | uniq))\n fi\n if [[ $year != $expectedYears ]];\n then\n echo \"$file - Copyright $year, created: $expectedYears\"\n if [ -z \"$tofix\" ]; then\n status=1\n else\n sed -E -i \"\" \"s/Copyright [0-9 ]+/Copyright ${expectedYears} /g\" $file\n fi\n fi"
+"``django-google-maps`` is a simple application that provides the basic\nhooks into google maps V3 api for use in Django models from Django\nversion 1.11+.\n\nStarting with ``django-google-maps`` version (0.7.0), Django 1.11+ is\nrequired because Django changed their widget template rendering system.\nVersion 0.8.0 supports Django 2.0+, and as such removes support for\nPython 2.7\n\nI\u2019m using this to allow someone from the admin panels to type a freeform\naddress, have the address geocoded on change and plotted on the map. If\nthe location is not 100% correct, the user can drag the marker to the\ncorrect spot and the geo coordinates will update.\n\nStatus\n~~~~~~\n\n|Build Status|\n\nUSAGE:\n------\n\n- include the ``django_google_maps`` app in your ``settings.py``\n\n- Add your Google Maps API Key in your ``settings.py`` as\n ``GOOGLE_MAPS_API_KEY``\n\n- create a model that has both an address field and geolocation field\n\n .. code:: python\n\n from django.db import models\n from django_google_maps import fields as map_fields\n\n class Rental(models.Model):\n address = map_fields.AddressField(max_length=200)\n geolocation = map_fields.GeoLocationField(max_length=100)\n\n- in the ``admin.py`` include the following as a formfield_override\n\n .. code:: python\n\n from django.contrib import admin\n from django_google_maps import widgets as map_widgets\n from django_google_maps import fields as map_fields\n\n class RentalAdmin(admin.ModelAdmin):\n formfield_overrides = {\n map_fields.AddressField: {'widget': map_widgets.GoogleMapsAddressWidget},"
+"# Changelog\n\nAll notable changes to `laravel-google-calendar` will be documented in this file\n\n## 3.1.0 - 2020-09-08\n\n- add support for Laravel 8\n\n## 3.0.0 - 2020-08-24\n\n- add support for OAuth2 authentication, source property on events (#163)\n\n## 2.6.2 - 2020-07-19\n\n- allow `CarbonImmutable` date (#160)\n\n## 2.6.1 - 2020-04-17\n\n- revert changes of previous release\n\n## 2.6.0 - 2020-04-17\n\n- make factory more flexible\n\n## 2.5.3 - 2020-04-17\n\n- make factory more flexible\n\n## 2.5.2 - 2020-04-14\n\n- add quick save (#147)\n\n## 2.5.1 - 2020-04-01\n\n- allow usage of Carbon immutable (#141)\n\n## 2.5.0 - 2020-03-03\n\n- add support for Laravel 7\n\n## 2.4.0 - 2020-02-20\n\n- allow passing array of credentials (#139)\n\n## 2.3.2 - 2019-12-16\n- Fixed fetching more than 250 results of calendar events (#133)\n\n## 2.3.1 - 2019-12-15\n- Add getter for calendar ID per event (#131)\n\n## 2.3.0 - 2019-09-04\n- Laravel 6 compatibility; dropped support for older versions\n\n## 2.2.2 - 2019-02-27\n- allow carbon v2\n\n## 2.2.1 - 2018-09-27\n- `listEvents` now returns events sorted chronologically\n\n## 2.2.0 - 2018-01-10\n- add ability to add query params\n\n## 2.1.1 - 2017-10-16\n- improve sorting\n\n## 2.1.0 - 2017-10-15\n- add"
+"const format = require('string-format');\n\nexports.trim = value => (typeof value === 'string' ? value.trim() : value);\n\nexports.reverse = value =>\n typeof value === 'string'\n ? value\n .split('')\n .reverse()\n .join('')\n : value;\n\nexports.slice = (value, start, end) =>\n typeof value === 'string' ? value.slice(start, end) : value;\n\nexports.replace = (value, searchValue, replaceValue) =>\n typeof value === 'string' ? value.replace(searchValue, replaceValue || '') : value;\n\nexports.substr = (value, from, length) =>\n typeof value === 'string' ? value.substr(from, length) : value;\n\nexports.int = value => {\n const intValue = parseInt(value);\n return isNaN(intValue) ? value : intValue;\n};\n\nexports.split = (value, char, index) => {\n if (typeof value === 'string') {\n if (char === '%SPECIAL_CHAR%') {\n char = '|';\n }\n const results = value.split(char);\n if (results[index] !== undefined) {\n return results[index];\n }\n }\n return value;\n};\n\nexports.format = (value, formatStr) => format(formatStr, value);\n\nexports.until = (value, str) =>\n typeof value === 'string' && value.indexOf(str) > 0 ? value.substr(0, value.indexOf(str)) : value;\n\nexports.match = (value, str) =>\n typeof value === 'string' && value.match(new RegExp(str)) !== null\n ? value.match(new RegExp(str))[1]\n : value;\n\nexports.decodeURIComponent = value =>\n typeof value === 'string' ? decodeURIComponent(value) : value;"
+"#pragma once\n\n#include \"BaseTheme.hpp\"\n#include \"common/Singleton.hpp\"\n#include \"util/RapidJsonSerializeQString.hpp\"\n\n#include \n#include \n#include \n#include \n\nnamespace chatterino {\n\nclass WindowManager;\n\nclass Theme final : public Singleton, public BaseTheme\n{\npublic:\n Theme();\n\n /// SPLITS\n struct {\n QColor messageSeperator;\n QColor background;\n QColor dropPreview;\n QColor dropPreviewBorder;\n QColor dropTargetRect;\n QColor dropTargetRectBorder;\n QColor resizeHandle;\n QColor resizeHandleBackground;\n\n struct {\n QColor border;\n QColor background;\n QColor text;\n QColor focusedText;\n // int margin;\n } header;\n\n struct {\n QColor border;\n QColor background;\n QColor selection;\n QColor focusedLine;\n QColor text;\n QString styleSheet;\n // int margin;\n } input;\n } splits;\n\n void normalizeColor(QColor &color);\n\nprivate:\n void actuallyUpdate(double hue, double multiplier) override;\n void fillLookupTableValues(double (&array)[360], double from, double to,\n double fromValue, double toValue);\n\n double middleLookupTable_[360] = {};\n double minLookupTable_[360] = {};\n\n pajlada::Signals::NoArgSignal repaintVisibleChatWidgets_;\n\n friend class WindowManager;\n};\n\n} // namespace chatterino"
+" 'Import',\n 'start_import' => 'Start Import',\n 'import_running' => 'Import running...',\n 'import_file' => 'File for Import',\n\n 'import_help' => 'You can import your existing browser bookmarks here. Usually, bookmarks are exported into an .html file by your browser. Select the file here and start the import. Depending on the number of bookmarks this process may take some time.',\n\n 'import_networkerror' => 'Something went wrong while trying to import the bookmarks. Please check your browser console for details or consult the application logs.',\n 'import_error' => 'Something went wrong while trying to import the bookmarks. Please consult the application logs.',\n 'import_empty' => 'Could not import any bookmarks. Either the uploaded file is corrupt or empty.',\n 'import_successfully' => ':imported links imported successfully, :skipped skipped.',\n];"
+"A1\tA2\tN\tSNP\tZ\nA\tG\t10000.000\trs0\t-5.269\nA\tG\t10000.000\trs1\t0.500\nA\tG\t10000.000\trs2\t-1.079\nA\tG\t10000.000\trs3\t-0.574\nA\tG\t10000.000\trs4\t-0.460\nA\tG\t10000.000\trs5\t-0.671\nA\tG\t10000.000\trs6\t-1.189\nA\tG\t10000.000\trs7\t5.956\nA\tG\t10000.000\trs8\t-0.483\nA\tG\t10000.000\trs9\t-2.527\nA\tG\t10000.000\trs10\t3.342\nA\tG\t10000.000\trs11\t2.692\nA\tG\t10000.000\trs12\t0.048\nA\tG\t10000.000\trs13\t-2.656\nA\tG\t10000.000\trs14\t-2.447\nA\tG\t10000.000\trs15\t-3.625\nA\tG\t10000.000\trs16\t-4.099\nA\tG\t10000.000\trs17\t-0.403\nA\tG\t10000.000\trs18\t-4.030\nA\tG\t10000.000\trs19\t3.820\nA\tG\t10000.000\trs20\t7.138\nA\tG\t10000.000\trs21\t-1.128\nA\tG\t10000.000\trs22\t-0.504\nA\tG\t10000.000\trs23\t-1.229\nA\tG\t10000.000\trs24\t1.220\nA\tG\t10000.000\trs25\t-3.719\nA\tG\t10000.000\trs26\t-0.410\nA\tG\t10000.000\trs27\t-3.132\nA\tG\t10000.000\trs28\t-0.045\nA\tG\t10000.000\trs29\t1.881\nA\tG\t10000.000\trs30\t-0.869\nA\tG\t10000.000\trs31\t3.947\nA\tG\t10000.000\trs32\t5.499\nA\tG\t10000.000\trs33\t-0.183\nA\tG\t10000.000\trs34\t-2.974\nA\tG\t10000.000\trs35\t4.575\nA\tG\t10000.000\trs36\t2.838\nA\tG\t10000.000\trs37\t0.169\nA\tG\t10000.000\trs38\t3.906"
+"# coding: utf-8\n\n# https://forum.omz-software.com/topic/3039/share-draw-text-in-a-circle-not-earth-shattering/2\n\nimport ui\n# Pythonista Forum - @Phuket2\n# for @ccc , should be pep8 ok and pyflakes :)\n\n# No break through here. just expanded on the Pythonista help\n# code for ui.ImageContext\n\n\n# draw text in a circle, and return a ui.image\ndef text_in_circle(r,\n text,\n text_color = 'white',\n circle_color = 'teal',\n circle_alpha = 1.0,\n font_name = 'Arial Rounded MT Bold',\n inset_percent = 0):\n\n\t'''\n\ttext_in_circle - * denotes a param\n\t==============\n\t*r-ui.Rect or tuple (0, 0, 0, 0) - the bounding rect for the circle.\n\t\n\t*text-text to draw in the circle\n\t\n\t*text_color-color of the text drawn inside the circle\n\t\n\t*circle_color-color of the circle\n\t\n\t*circle_alpha-alpha setting applied to circle color. Note, did this\n\tfor a reason. easier to use string names for colors!\n\t\n\t*font_name-the font used to render the *text\n\t\n\t*inset_percent-reduces *r by a percentage for l,t,w,h for possible\n\tbetter placement of the text inside the circle. a.k.a margin\n\t\n\tRETURNS - a rendered uiImage\n\t\n\t'''\n\t\n\t# this inner function does not need to be here, was just to keep it\n\t# all together\n\tdef get_max_fontsize(r, text, font_name, inset_rect = ui.Rect()):\n\t\tr1 = ui.Rect(*r).inset(*inset_rect)\n\t\tfor i in xrange(5, 1000):\n\t\t\tw, h = ui.measure_string(text, max_width=0,\n\t\t\tfont=(font_name, i),"
+"// @flow\n\nimport * as React from \"react\";\nimport Grid from \"../Grid\";\nimport type { MouseEvents, PointerEvents, FocusEvents } from \"../../\";\n\ntype Props = {|\n ...MouseEvents,\n ...PointerEvents,\n ...FocusEvents,\n +className?: string,\n +value: string | number,\n +imageURL: string,\n +col?: {|\n +width?: number,\n +sm?: number,\n +md?: number,\n +lg?: number,\n |},\n|};\n\nfunction FormImageCheckItem({\n className,\n col: { width = 6, sm = 4, md = 0, lg = 0 } = {},\n imageURL,\n value,\n onClick,\n onMouseEnter,\n onMouseLeave,\n onPointerEnter,\n onPointerLeave,\n onFocus,\n onBlur,\n}: Props): React.Node {\n return (\n \n \n \n );\n}\n\nFormImageCheckItem.displayName = \"Form.ImageCheckItem\";\n\nexport default FormImageCheckItem;"
+"35\n15\n11\n13\n17\n11\n9\n3\n3\n5\n11\n15\n7\n3\n9\n7\n9\n49\n7\n7\n5\n3\n9\n21\n11\n5\n19\n11\n11\n13\n7\n23\n21\n13\n7\n5\n17\n0\n7\n3\n17\n15\n5\n7\n3\n3\n17\n21\n21\n25\n15\n3\n3\n13\n15\n5\n9\n9\n11\n9\n19\n27\n7\n13\n19\n9\n7\n7\n7\n3\n5\n3\n5\n31\n3\n21\n11\n21\n13\n7\n2\n7\n13\n5\n17\n7\n5\n23\n2\n13\n7\n21\n11\n5\n7\n9\n7\n11\n23\n7\n7\n9\n5\n11\n11\n7\n17\n15\n13\n7\n15\n5\n3\n19\n15\n7\n11\n11\n37\n7\n5\n5\n41\n9\n5\n11\n9\n17\n7\n11\n5\n19\n13\n15\n37\n9\n15\n27\n27\n7\n7\n9\n5\n5\n21\n7\n15\n13\n23\n21\n21\n2\n2\n21\n5\n0\n7\n27\n5\n3\n15\n21\n9\n5\n19\n7\n3\n11\n13\n9\n5\n25\n11\n13\n7\n11\n13\n28\n19\n5\n5\n11\n9\n31\n19\n5\n15\n19\n11\n21\n9\n3\n15\n3\n17\n9\n3\n27\n17\n3"
+"\"\"\"\nA type and singleton value (like None) to represent fields that\nhave not been initialized.\n\"\"\"\n\nfrom __future__ import unicode_literals, absolute_import\n\n\nclass UndefinedType(object):\n\n _instance = None\n\n def __str__(self):\n return 'Undefined'\n\n def __repr__(self):\n return 'Undefined'\n\n def __eq__(self, other):\n return self is other\n\n def __ne__(self, other):\n return self is not other\n\n def __bool__(self):\n return False\n\n __nonzero__ = __bool__\n\n def __lt__(self, other):\n self._cmp_err(other, '<')\n\n def __gt__(self, other):\n self._cmp_err(other, '>')\n\n def __le__(self, other):\n self._cmp_err(other, '<=')\n\n def __ge__(self, other):\n self._cmp_err(other, '>=')\n\n def _cmp_err(self, other, op):\n raise TypeError(\"unorderable types: {0}() {1} {2}()\".format(\n self.__class__.__name__, op, other.__class__.__name__))\n\n def __new__(cls, *args, **kwargs):\n if cls._instance is None:\n cls._instance = object.__new__(cls)\n elif cls is not UndefinedType:\n raise TypeError(\"type 'UndefinedType' is not an acceptable base type\")\n return cls._instance\n\n def __init__(self):\n pass\n\n def __setattr__(self, name, value):\n raise TypeError(\"'UndefinedType' object does not support attribute assignment\")\n\n\nUndefined = UndefinedType()"
+"/**\n * cn - \u8868\u5355\u5757 (\u65e7)\n * -- Form.Block\u5df2\u4e0d\u63a8\u8350\uff0c\u5efa\u8bae\u4f7f\u7528 FieldSet\n * -- Block \u7c7b\u4f3c Form\uff0c\u53ef\u4ee5\u5b58\u53d6\u6570\u636e\uff0c\u53ea\u662f\u6ca1\u6709 Submit \u80fd\u529b\u3002\u4e00\u822c\u7528\u5728 Form \u4e2d\u5904\u7406\u590d\u6742\u6570\u636e\u3002\n * -- Block \u5185\u7ec4\u4ef6\u8bbe\u7f6e\u7684 name \u53ea\u5728\u8fd9\u4e2a Block \u5185\u6709\u6548\uff0c\u53ea\u80fd\u5b58\u53d6 Block \u7684 value \u4e2d\u7684\u6570\u636e\uff0c\u4e0d\u80fd\u5b58\u53d6 Form \u7684\u6570\u636e\u3002\n * en - Block (Out of date)\n * -- Not recommend, use FieldSet instead.\n * -- Block is similar to Form except submit\n * -- The name set in the Block component is valid only in this block. It can only access the data in the value of instead of the Form.\n */\nimport React, { PureComponent } from 'react'\nimport { Form, Input } from 'shineout'\n\nexport default class extends PureComponent {\n constructor(props) {\n super(props)\n\n this.rules = {\n password: [\n { required: true, message: 'Please enter password.' },\n { min: 7, message: 'Password must be at least {min} characters.' },\n { regExp: /[a-z]+/i, message: 'Password at least has one letter.' },\n (value, formdata, callback) => {\n if (/\\d+/.test(value)) callback(true)\n else callback(new Error('Password at least has one numeral.'))\n },\n ],\n }\n\n this.colors = ['red', 'orange', 'yellow', 'green', 'cyan', 'blue', 'violet']\n }\n\n render() {\n return (\n
\n console.log(d)}>\n \n \n \n\n : ObservableObject where StateType: StateMachine {\n \n private let initialState: StateType\n private var subsequentStates: [StateType] = []\n\n public let objectWillChange = PassthroughSubject()\n \n public init(state: StateType) {\n initialState = state\n }\n \n var allStates: [StateType] {\n [[initialState], subsequentStates].flatMap({ $0 })\n }\n \n var stateCount: Int {\n 1 + subsequentStates.count\n }\n \n var currentStateIndex: Int = 0 {\n didSet {\n withAnimation {\n objectWillChange.send(())\n }\n }\n }\n \n /// The current state of the store. This will update as time traveling occurs.\n public var state: StateType {\n allStates[currentStateIndex]\n }\n \n /// Dispatches an event to be applied to the current state.\n public func dispatch(event: StateType.Event) {\n var newState = state\n newState.update(with: event)\n subsequentStates.append(newState)\n currentStateIndex = stateCount - 1\n }\n \n}"
+"prefix}postmeta WHERE meta_key = 'simplefavorites_count'\";\n\t\t$count = $wpdb->get_var( $query );\n\t\tif ( (is_multisite()) && (isset($site_id) && ($site_id !== \"\")) ) restore_current_blog();\n\t\treturn intval($count);\n\t}\n}"
+"[![Maintenance Status][maintenance-image]](#maintenance-status)\n\n\n
react-game-kit
\n\n
\n Make games with React & React Native!\n
\n\n***\n\n\n\n\n\n\n## Install\n\n`npm install react-game-kit --save`\n\n## Get Started\n\n`react-game-kit` provides a set of helper components to make it easier to create games with React and React Native.\n\nYou'll want to begin by importing the components you need:\n\n```js\nimport { Loop, Stage } from 'react-game-kit';\n```\n\n### Loop & Stage\n\nNext, in your render method of your top level component, you'll want to put the `Loop` component at the top level, optionally followed by the `Stage` component:\n\n```js\nrender() {\n return (\n \n \n // Game specific components go here\n \n \n );\n}\n```\n\nThe `Loop` component uses `context` to pass a subscribable game tick down your component tree. The `Stage` component does the same with game scale.\n\n### World\n\nIf you intend on using physics in your game, a good next component would be the `World` component, which creates and provides a physics engine & world:\n\n```js\nrender() {\n return (\n \n \n \n // Game specific components go here\n \n \n \n );\n}\n```\n\n### Physics Bodies\n\nOnce you have a"
+"var keys = require('./keys')\n\n/**\n * \u63a5\u6536\u4e00\u4e2a\u51fd\u6570\u4f5c\u4e3a\u7d2f\u52a0\u5668\uff0c\u6570\u7ec4\u4e2d\u7684\u6bcf\u4e2a\u503c\uff08\u4ece\u5de6\u5230\u53f3\uff09\u5f00\u59cb\u5408\u5e76\uff0c\u6700\u7ec8\u4e3a\u4e00\u4e2a\u503c\u3002\n *\n * @param {Array} array \u6570\u7ec4\n * @param {Function} callback \u65b9\u6cd5\n * @param {Object} initialValue \u521d\u59cb\u503c\n * @return {Number}\n */\nfunction reduce (array, callback, initialValue) {\n if (array) {\n var len, reduceMethod\n var index = 0\n var context = null\n var previous = initialValue\n var isInitialVal = arguments.length > 2\n var keyList = keys(array)\n if (array.length && array.reduce) {\n reduceMethod = function () {\n return callback.apply(context, arguments)\n }\n if (isInitialVal) {\n return array.reduce(reduceMethod, previous)\n }\n return array.reduce(reduceMethod)\n }\n if (isInitialVal) {\n index = 1\n previous = array[keyList[0]]\n }\n for (len = keyList.length; index < len; index++) {\n previous = callback.call(context, previous, array[keyList[index]], index, array)\n }\n return previous\n }\n}\n\nmodule.exports = reduce"
+"// PRUSA Mendel\n// Endstop holder extra adapter rotator\n// Used to rotate endstops for Prusa i2/i3 endstop holders\n// GNU GPL v3\n// Ethan Sherman\n// ethan@blackguest.net\n\ninclude <../configuration.scad>\n\n/**\n * This endstop adapter has 3 holes for endstops with either 10mm or 20mm spacing.\n * It is designed to fit on the original endstop-holder to rotate a mechanical endstop 90 degrees.\n *\n * @id endstop-holder-extra\n * @name Endstop holder extra\n * @category Printed\n */\nmodule endstop_extra(shaft_radius){\n screw_hole_spacing = 20;\n screw_hole_spacing2 = 10;\n\n segments=64;\n\n difference(){\n\t union(){\n // for reference, here is the main endstop arm\n\t\t //translate([-30, 0, 0]) cube([40, 4, 10]);\n\n // endstop arm mount\n translate([-20, -35.99, -5]) cube([10, 35, 5]);\n // main sliding endstop mount slider plate\n translate([-30, -5, -5]) cube([30, 5, 15]);\n // extra overhang support (may not be easy to print in this orientation)\n //#translate([-30, -0, -5]) cube([30, 6, 5]);\n\n // extra curved arm support (optional)\n difference(){\n translate([-30, -15, -5]) cube([30, 11, 5]);\n translate([-10, -10, -10]) rotate([0, 0, 90])\n translate([-5, -10, -1]) rotate([0, 0, 0]) cylinder(h =20, r = 10, $fn = segments);\n translate([-40, -10, -10]) rotate([0, 0, 90])\n translate([-5, -10, -1]) rotate([0, 0, 0]) cylinder(h =20, r = 10, $fn = segments);\n }\n\t }\n\n //"
+"#include \"decode.h\"\n\n#define MAX_OGG_PAGE_LEN 100000\n\n\ntypedef struct {\n FILE *fp;\n uint8_t *page_buf;\n int page_len;\n int consumed;\n int raw_opus;\n uint8_t version;\n uint8_t header_type;\n uint8_t seg_length;\n uint8_t page_segs;\n uint64_t granule_pos;\n uint32_t bs_serial;\n uint32_t page_sn;\n uint32_t checksum;\n uint8_t seg_len_table[255];\n uint8_t current_segment;\n uint8_t packets_in_page;\n}opus_obj_t;\n\ntypedef struct _nalu_item_t {\n\tuint8_t *buf;\n\tint len;\n\tstruct slice_header_t slice;\n\tstruct nalu_t nalu;\n}nalu_item_t;\n\ntypedef struct {\n FILE *fp;\n h264_dec_obj_t *h264_handle;\n nalu_item_t *curr_nalu;\n nalu_item_t *next_nalu;\n}h264_obj_t;\n\n\nint init_video(h264_obj_t *handle, const char *video_file);\nint reset_video(h264_obj_t *handle);\nint get_video_frame(h264_obj_t *handle, uint8_t *buf, uint32_t *length, int *end_of_frame);\nint init_audio(opus_obj_t *handle, const char *audio_file, int raw_opus);\nvoid close_audio(opus_obj_t *handle);\nint reset_audio(opus_obj_t *handle);\nint get_audio_packet(opus_obj_t *handle, uint8_t *buf, uint32_t *length);"
+"---\ntitle: Travis CI for R?\ndate: '2013-04-07'\nslug: travis-ci-for-r\n---\n\nI'm always worried about [CRAN](http://cran.r-project.org): a system maintained by FTP and emails from real humans (basically one of Uwe, Kurt or Prof Ripley). I'm worried for two reasons:\n\n1. The number of R packages is growing _exponentially_;\n2. Time and time again I see frustrations from both parties (CRAN maintainers and package authors);\n\nI have a good solution for 2, which is to keep silent when your submission passes the check system, and say \"Sorry!\" no matter if you agree with the reason or not when it did not pass (which made one maintainer unhappy), but do not argue -- just go back and fix the problem if you know what is the problem; or use dark voodoo to hide (yes, _hide_, not solve) the problem if you are sure you are right. If you read the mailing list frequently, you probably remember that `if (CRAN)` discussion. The solution in my mind was `if (Sys.getenv('USER') == 'ripley')`.\n\nThe key is, do not argue. Silence is gold.\n\n\n\nThe CRAN maintainers have been volunteering their time, and we should respect them. The question is, will this approach"
+"PEP: 638\nTitle: Syntactic Macros\nAuthor: Mark Shannon \nStatus: Draft\nType: Standards Track\nContent-Type: text/x-rst\nCreated: 24-Sep-2020\n\nAbstract\n========\n\nThis PEP adds support for syntactic macros to Python.\nA macro is a compile-time function that transforms\na part of the program to allow functionality that cannot be\nexpressed cleanly in normal library code.\n\nThe term \"syntactic\" means that this sort of macro operates on the program's\nsyntax tree. This reduces the chance of mistranslation that can happen\nwith text-based substitution macros, and allows the implementation\nof `hygienic macros`__.\n\n__ https://en.wikipedia.org/wiki/Hygienic_macro\n\nSyntactic macros allow libraries to modify the abstract syntax tree during compilation,\nproviding the ability to extend the language for specific domains without\nadding to complexity to the language as a whole.\n\nMotivation\n==========\n\nNew language features can be controversial, disruptive and sometimes divisive.\nPython is now sufficiently powerful and complex, that many proposed additions \nare a net loss for the language due to the additional complexity.\n\nAlthough a language change may make certain patterns easy to express,\nit will have a cost. Each new feature makes the language larger,\nharder to learn and harder to understand.\nPython was once described as `Python Fits Your Brain`__,\nbut that becomes"
+"module.exports = [\n\t[\n\t\t/Should not import the named export '2' \\(imported as 'c'\\) from default-exporting module \\(only default export is available soon\\)/\n\t],\n\t[\n\t\t/Should not import the named export 'aa' \\(imported as 'aa'\\) from default-exporting module \\(only default export is available soon\\)/\n\t],\n\t[\n\t\t/Should not import the named export 'bb' \\(imported as 'bb'\\) from default-exporting module \\(only default export is available soon\\)/\n\t],\n\t[\n\t\t/Should not import the named export 'named' \\(imported as 'named'\\) from default-exporting module \\(only default export is available soon\\)/\n\t],\n\t[\n\t\t/Should not import the named export 'named' \\(imported as 'gnamed'\\) from default-exporting module \\(only default export is available soon\\)/\n\t]\n];"
+"# -*- coding: utf-8 -*-\n\"\"\"A X509Adapter for use with the requests library.\n\nThis file contains an implementation of the X509Adapter that will\nallow users to authenticate a request using an arbitrary\nX.509 certificate without needing to convert it to a .pem file\n\n\"\"\"\n\nfrom OpenSSL.crypto import PKey, X509\nfrom cryptography import x509\nfrom cryptography.hazmat.primitives.serialization import (load_pem_private_key,\n load_der_private_key)\nfrom cryptography.hazmat.primitives.serialization import Encoding\nfrom cryptography.hazmat.backends import default_backend\n\nfrom datetime import datetime\nfrom requests.adapters import HTTPAdapter\nimport requests\n\nfrom .._compat import PyOpenSSLContext\nfrom .. import exceptions as exc\n\n\"\"\"\nimporting the protocol constants from _ssl instead of ssl because only the\nconstants are needed and to handle issues caused by importing from ssl on\nthe 2.7.x line.\n\"\"\"\ntry:\n from _ssl import PROTOCOL_TLS as PROTOCOL\nexcept ImportError:\n from _ssl import PROTOCOL_SSLv23 as PROTOCOL\n\n\nclass X509Adapter(HTTPAdapter):\n r\"\"\"Adapter for use with X.509 certificates.\n\n Provides an interface for Requests sessions to contact HTTPS urls and\n authenticate with an X.509 cert by implementing the Transport Adapter\n interface. This class will need to be manually instantiated and mounted\n to the session\n\n :param pool_connections: The number of urllib3 connection pools to\n cache.\n :param pool_maxsize: The maximum number of connections to save in the\n pool.\n :param max_retries: The maximum"
+"/* eslint no-restricted-syntax: \"off\" */\n\nexport const isMessage = event => Boolean(event.type === 'message' && event.text);\n\nexport const isMessageToChannel = message => typeof message.channel === 'string' && message.channel[0] === 'C';\n\nexport const isFromUser = (event, userId) => event.user === userId;\n\nexport const messageContainsText = (message, possibleTexts) => {\n const messageText = message.text.toLowerCase();\n const texts = Array.isArray(possibleTexts) ? possibleTexts : [possibleTexts];\n for (const text of texts) {\n if (messageText.indexOf(text.toLowerCase()) > -1) {\n return true;\n }\n }\n\n return false;\n};\n\nexport const filterJokesByCategories = (jokes, categories) => jokes.filter((joke) => {\n if (joke.categories.length === 0) {\n return true;\n }\n\n for (const category of categories) {\n if (joke.categories.includes(category)) {\n return true;\n }\n }\n\n return false;\n});\n\nexport const pickRandom = arr => arr[Math.floor(Math.random() * arr.length)];"
+"require 'digest/sha1'\nrequire 'sequel'\nrequire 'sequel/extensions/migration'\n\nSequel::Model.db = \n if defined?(RUBY_ENGINE) && RUBY_ENGINE == 'jruby'\n require 'jdbc/sqlite3' \n Sequel.connect(\"jdbc:sqlite::memory:\")\n else\n require 'sqlite3'\n Sequel.sqlite(\":memory:\")\n end\n\nmigration = Sequel.migration do\n up do\n create_table :accounts do\n primary_key :id\n String :name\n String :surname\n String :email\n String :crypted_password\n String :role\n end\n\n create_table :sections do\n primary_key :id\n foreign_key :account_id\n String :name\n end\n\n create_table :friends do\n primary_key :id\n String :name\n String :age\n String :email\n end\n\n create_table :pages do\n primary_key :id\n String :name\n String :body\n end\n end\n\n down do\n drop_table :accounts\n end\nend\n\nmigration.apply(Sequel::Model.db, :up)\n\nclass Friend < Sequel::Model\nend\n\nclass Page < Sequel::Model\nend\n\n# Fake Section Model\nclass Section < Sequel::Model\n many_to_one :account\nend\n\n# Fake Account Model\nclass Account < Sequel::Model\n attr_accessor :password, :password_confirmation\n\n one_to_many :sections\n\n def self.admin; first(:role => \"admin\"); end\n def self.editor; first(:role => \"editor\"); end\n\n ##\n # Replace ActiveRecord method.\n #\n def self.find_by_id(id)\n self[id] rescue nil\n end\nend\n\n# We build some fake accounts\nadmin = Account.create(:name => \"DAddYE\", :role => \"admin\", :email => \"d.dagostino@lipsiasoft.com\",\n :password => \"some\", :password_confirmation => \"some\")\neditor = Account.create(:name => \"Dexter\", :role => \"editor\", :email => \"editor@lipsiasoft.com\",\n :password => \"some\", :password_confirmation => \"some\")\n\n%w(News Press HowTo).each do |c|\n admin.add_section(:name => c)\n editor.add_section(:name => c)\nend"
+"#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\"\"\"Simulator tests.\n\nExample on how to load different things with the simulators. This example is still in an experimental phase. For\nnow, only Bullet is fully-supported. We are working on the other ones, especially the Mujoco simulator.\n- Bullet: OK\n- Raisim: OK (todo: for collision bodies, it only accepts OBJ files)\n- MuJoCo: OK (todo: control still missing)\n- DART: OK, but capsules don't have collision shapes... (todo: fix some URDFs)\n- VREP: Not implemented yet + problem when importing PyRep with pybullet. Also, need to figure out how to call the\n 'loadURDF' plugin.\n- Isaac: not available yet.\n\"\"\"\n\nimport os\nfrom itertools import count\n\nfrom pyrobolearn.simulators.bullet import Bullet\nfrom pyrobolearn.simulators.raisim import Raisim\nfrom pyrobolearn.simulators.dart import Dart\nfrom pyrobolearn.simulators.mujoco import Mujoco\n# from pyrobolearn.simulators.vrep import VREP # Problem when importing PyRep with Pybullet\n# from pyrobolearn.simulators.isaac import Isaac # Not available yet\n\n\nsim = Bullet(render=True)\n# sim = Raisim(render=True)\n# sim = Dart(render=True)\n# sim = Mujoco(render=True)\n# sim = VREP(render=True)\n# sim = Isaac(render=True)\nprint(\"Gravity: {}\".format(sim.get_gravity()))\n\n# load floor\nfloor = sim.load_floor(dimension=20)\n\n# create box\nbox = sim.create_primitive_object(sim.GEOM_BOX, position=(0, 0, 2), mass=1, rgba_color=(1, 0, 0, 1))\nsphere = sim.create_primitive_object(sim.GEOM_SPHERE, position=(2,"
+"# Getting Started\n\n## Create an AWS account\n\nIn order to complete the hands-on content on this site, you'll need an AWS Account. We strongly recommend that you use a personal account or create a new AWS account to ensure you have the necessary access and that you do not accidentally modify corporate resources. Do **not** use an AWS account from the company you work for unless they provide sandbox accounts just for this purpose.\n\n## Create an IAM user (with admin permissions) \n\nIf you don't already have an AWS IAM user with admin permissions, please use the following instructions to create one:\n\n1. Browse to the AWS IAM console.\n2. Click **Users** on the left navigation and then click **Add User**.\n3. Enter a **User Name**, check the checkbox for **AWS Management Console access**, enter a **Custom Password**, and click **Next:Permissions**.\n4. Click **Attach existing policies directly**, click the checkbox next to the **AdministratorAccess**, and click **Next:review**.\n5. Click **Create User**\n6. Click **Dashboard** on the left navigation and use the **IAM users sign-in link** to login as the admin user you just created.\n\n## Add credits (optional) \n\nIf you are"
+"//:\n// \\file\n// \\author Noah Johnson\n// \\brief Lets CMake define where source and install directories are\n\n// Note: The make system (e.g. CMake) should generate a file - bres_where.h - from\n// this, in which the macro is set correctly.\n// For non-CMake systems this might cause a problem. In particular if there is\n// no brad_where.h, some other stuff might not compile.\n// If we supply a default brad_where.h, it would be changed by CMake, and\n// may get checked back into the repository by accident.\n\n/* #ifndef BRES_LIB_DIR // file guard */\n/* #define BRES_LIB_DIR \"@CMAKE_LIBRARY_OUTPUT_DIRECTORY@\" */\n/* #endif */\n\n#ifndef BRES_SOURCE_DIR // file guard\n#define BRES_SOURCE_DIR \"@VXL_ROOT_SOURCE_DIR@\"\n#endif\n\n#ifndef BRES_INSTALL_DIR // file guard\n#define BRES_INSTALL_DIR \"@CMAKE_INSTALL_FULL_DATAROOTDIR@\"\n#endif"
+"# Introduction to modules\n\nAngular apps are modular and Angular has its own modularity system called *NgModules*.\nNgModules are containers for a cohesive block of code dedicated to an application domain, a workflow, or a closely related set of capabilities. They can contain components, service providers, and other code files whose scope is defined by the containing NgModule. They can import functionality that is exported from other NgModules, and export selected functionality for use by other NgModules.\n\nEvery Angular app has at least one NgModule class, [the *root module*](guide/bootstrapping), which is conventionally named `AppModule` and resides in a file named `app.module.ts`. You launch your app by *bootstrapping* the root NgModule.\n\nWhile a small application might have only one NgModule, most apps have many more *feature modules*. The *root* NgModule for an app is so named because it can include child NgModules in a hierarchy of any depth.\n\n## NgModule metadata\n\nAn NgModule is defined by a class decorated with `@NgModule()`. The `@NgModule()` decorator is a function that takes a single metadata object, whose properties describe the module. The most important properties are as follows.\n\n* `declarations`: The [components](guide/architecture-components), *directives*, and *pipes* that belong to this NgModule.\n\n* `exports`: The subset"
+"#!/usr/bin/env bash\n\n# Creates DeepCpG data files.\n\n\n# Source dependencies.\nsource \"./lib.sh\"\n\n# Set to 1 for testing and 0 for real run.\ntest_mode=1\n# Directory with CpG profiles.\ncpg_dir=\"../data/cpg\"\n# Directory with DNA sequences.\ndna_dir=\"../data/dna/mm10\"\n\n# Create data files.\ncmd=\"dcpg_data.py\n --cpg_profiles $cpg_dir/*.tsv\n --dna_files $dna_dir\n --out_dir $data_dir\n --dna_wlen 1001\n --cpg_wlen 50\n \"\nif [[ $test_mode -eq 1 ]]; then\n cmd=\"$cmd --nb_sample 1000\"\nfi\nrun $cmd\n\n# Compute statistics, e.g. the total number of CpG sites and the mean\n# methylation rate of each cell. Change the input `./data/*` to\n# `./data/c{1,3,5}*.h5` to compute statistics for a subset of the data, which is\n# useful for deciding how to split the data into training, validation, and test\n# set.\ncmd=\"dcpg_data_stats.py $data_dir/* | tee $data_dir.txt\"\nrun $cmd"
+"using System;\nusing System.IO;\nusing System.Linq;\nusing System.Collections.Generic;\nusing System.Text.RegularExpressions;\nusing UnityEditor;\nusing UnityEditor.IMGUI.Controls;\nusing UnityEditor.VersionControl;\nusing UnityEngine;\nusing UObject = UnityEngine.Object;\n\nnamespace UnityEngine.ProBuilder.AssetIdRemapUtility\n{\n sealed class AssetTreeItem : TreeViewItem\n {\n string m_RelativePath;\n string m_FullPath;\n bool m_IsEnabled;\n bool m_IsDirectory;\n bool m_IsMixedState;\n\n public AssetTreeItem(int id, string fullPath, string relativePath) : base(id, 0)\n {\n m_IsDirectory = Directory.Exists(fullPath);\n m_FullPath = fullPath;\n m_RelativePath = relativePath;\n m_IsEnabled = true;\n displayName = m_FullPath.Replace(\"\\\\\", \"/\").Replace(Application.dataPath, \"Assets/\");\n }\n\n public bool enabled\n {\n get { return m_IsEnabled; }\n set { m_IsEnabled = value; }\n }\n\n public bool isDirectory\n {\n get { return m_IsDirectory; }\n set { m_IsDirectory = value; }\n }\n\n public string fullPath\n {\n get { return m_FullPath; }\n }\n\n public string relativePath\n {\n get { return m_RelativePath; }\n }\n\n public bool isMixedState { get { return m_IsMixedState; } }\n\n public void SetEnabled(bool isEnabled)\n {\n enabled = isEnabled;\n\n if (children != null)\n {\n foreach (var child in children)\n {\n AssetTreeItem asset = child as AssetTreeItem;\n\n if (asset != null)\n asset.SetEnabled(isEnabled);\n }\n }\n\n var upstream = parent;\n\n while (upstream != null)\n {\n var up = upstream as AssetTreeItem;\n\n if (up != null && up.children != null)\n {\n AssetTreeItem firstChild = up.children.FirstOrDefault() as AssetTreeItem;\n\n if (firstChild != null)\n {\n up.m_IsMixedState"
+"INCLUDES += -I$(RIOTBASE)/pkg/lvgl/include\nINCLUDES += -I$(PKGDIRBASE)\n\n# Don't use relative includes in lvgl\nCFLAGS += -DLV_CONF_INCLUDE_SIMPLE\n\nifneq (,$(filter lvgl_contrib,$(USEMODULE)))\n DIRS += $(RIOTBASE)/pkg/lvgl/contrib\nendif\n\n# Configuration options\n# Graphical settings\nLVGL_COLOR_DEPTH ?= 16\nLVGL_COLOR_16_SWAP ?= 1\n\n# Memory settings\nLVGL_MEM_SIZE ?= 5U*1024U\n\n# Engine settings\nLVGL_INACTIVITY_PERIOD_MS ?= 5*MS_PER_SEC # 5s\nLVGL_TASK_HANDLER_DELAY_US ?= 5*US_PER_MS # 5ms\nLVGL_TASK_THREAD_PRIO ?= THREAD_PRIORITY_MAIN-1\n\n# Set the CFLAGS variable accordingly\nCFLAGS += -DLV_COLOR_DEPTH=$(LVGL_COLOR_DEPTH)\nCFLAGS += -DLV_COLOR_16_SWAP=$(LVGL_COLOR_16_SWAP)\nCFLAGS += -DLV_MEM_SIZE=$(LVGL_MEM_SIZE)\nCFLAGS += -DLVGL_INACTIVITY_PERIOD_MS=$(LVGL_INACTIVITY_PERIOD_MS)\nCFLAGS += -DLVGL_TASK_HANDLER_DELAY_US=$(LVGL_TASK_HANDLER_DELAY_US)\nCFLAGS += -DLVGL_TASK_THREAD_PRIO=$(LVGL_TASK_THREAD_PRIO)\n\n# lvgl module is not a concrete module, so declare it as a pseudomodule\nPSEUDOMODULES += lvgl\n\n# touch capabilities are available via a pseudomodule\nPSEUDOMODULES += lvgl_contrib_touch"
+"package com.gentics.mesh.core.data.root;\n\nimport com.gentics.mesh.core.data.Branch;\nimport com.gentics.mesh.core.data.branch.HibBranch;\nimport com.gentics.mesh.core.data.project.HibProject;\nimport com.gentics.mesh.core.data.user.HibUser;\nimport com.gentics.mesh.core.rest.branch.BranchReference;\nimport com.gentics.mesh.core.rest.branch.BranchResponse;\nimport com.gentics.mesh.event.EventQueueBatch;\n\n/**\n * Aggregation vertex for Branches.\n */\npublic interface BranchRoot extends RootVertex, TransformableElementRoot {\n\n\tpublic static final String TYPE = \"branches\";\n\n\t/**\n\t * Get the project of this branch root.\n\t * \n\t * @return\n\t */\n\tHibProject getProject();\n\n\t/**\n\t * Create a new branch and make it the latest The new branch will be the initial branch, if it is the first created.\n\t *\n\t * @param name\n\t * branch name\n\t * @param creator\n\t * creator\n\t * @param batch\n\t * @return new Branch\n\t */\n\tdefault Branch create(String name, HibUser creator, EventQueueBatch batch) {\n\t\treturn create(name, creator, null, true, getLatestBranch(), batch);\n\t}\n\n\t/**\n\t * Create a new branch. The new branch will be the initial branch, if it is the first created.\n\t *\n\t * @param name\n\t * branch name\n\t * @param creator\n\t * creator\n\t * @param uuid\n\t * Optional uuid\n\t * @param setLatest\n\t * True to make it the latest branch\n\t * @param baseBranch\n\t * optional base branch. This can only be null if this is the first branch in the project.\n\t * @param batch\n\t * @return new Branch\n\t */\n\tBranch create(String name, HibUser creator, String uuid,"
+"====================\nAnsible project 2.11\n====================\n\nThis release schedule includes dates for the `ansible `_ package, with a few dates for the `ansible-base `_ package as well. All dates are subject to change. See :ref:`base_roadmap_2_11` for the most recent updates on ``ansible-base``.\n\n.. contents::\n :local:\n\nRelease schedule\n=================\n\n.. note:: Dates subject to change.\n\n- ????-??-?? Ansible 2.11 alpha freeze. No net new collections allowed after this date.\n- ????-??-?? Ansible collections freeze date for content moving between collections.\n- ????-??-?? Ansible 2.11 alpha.\n- ????-??-?? Ansible 2.11.0 beta1 and feature freeze.\n\n - No new modules or major features accepted after this date. In practice this means we will freeze the semver collection versions to compatible release versions. For example, if the version of community.crypto on this date was community-crypto-2.1.0; ansible-2.11.0 could ship with community-crypto-2.1.1. It would not ship with community-crypto-2.2.0.\n\n- ????-??-?? Ansible 2.11 final freeze/rc1.\n\n - After this date only changes blocking a release are accepted.\n - Collections will only be updated to a new version if a blocker is approved. Collection owners should discuss any blockers at a community IRC meeting (before this freeze) to decide whether to bump the version of the collection for a fix. See"
+"# Website for Deep Learning Camp Jeju\n\n- [Website for Deep Learning Camp Jeju](#website-for-deep-learning-camp-jeju)\n - [Requirements](#requirements)\n - [Development](#development)\n - [Deployment](#deployment)\n - [Project Structures](#project-structures)\n\n## Requirements\n\n1. Install [bundler](http://bundler.io/)\n2. Run `bundle install`\n\n## Development\n\n```bash\n# Run local server\n$ make serve\n```\n\n## Deployment\n\n- Install ghp-import (`pip install ghp-import`)\n\n```bash\n# it will push everything inside of _site to origin/master_\nmake github\n```\n\n## Project Structures\n\n- `_includes` contains components\n- `_layouts` contains page template\n- `_sass` contains sass\n- `2018` contains actual contents\n\n```bash\ntree -L 1 -I '*.org' .\n```\n\n .\n \u251c\u2500\u2500 2017\n \u251c\u2500\u2500 2018\n \u251c\u2500\u2500 assets\n \u251c\u2500\u2500 _config_dev.yml\n \u251c\u2500\u2500 _config.yml\n \u251c\u2500\u2500 favicon.ico\n \u251c\u2500\u2500 Gemfile\n \u251c\u2500\u2500 Gemfile.lock\n \u251c\u2500\u2500 _includes\n \u251c\u2500\u2500 index.md\n \u251c\u2500\u2500 _layouts\n \u251c\u2500\u2500 Makefile\n \u251c\u2500\u2500 README.md\n \u251c\u2500\u2500 _sass\n \u2514\u2500\u2500 _site\n\n 7 directories, 8 files"
+"The components in this area implement several file decompression\nmechanisms. They provide real-time decompression to permit inspection\n(rules) of the decompressed content.\n\nIn particular the components support these decompression options:\n\n1. Decompress SWF (Adobe Flash) files compressed with the ZLIB algorithm\n\n2. Optionally decompress SWF files compressed with the LZMA algorithm.\n This is only available if Snort ++ is built with the optional LZMA\n support.\n\n3. Decompress the Deflate compressed portions of PDF files.\n\nThe three modes are individually enabled/disabled at initialization time.\n\nAll parsing and decompression is incremental and allows inspection to\nproceed as the file is received and processed.\n\nSWF File Processing:\n\nSWF files exist in three forms: 1) uncompressed, 2) ZLIB compressed, and 3)\nLZMA compressed. SWF files begin with a file signature block (always\nuncompressed) to indicate the format of the balance of the file. The\nbalance of the file is formatted and processed as specified.\n\nPDF files are significantly more complex as the compressed content is\nembedded within the PDF syntax and one file may contain one or many\ncompressed segments.\n\nThus the PDF decompression engine implements a lightweight PDF file parser\nto locate the PDF Stream segments and then attempt to decompress Streams\nthat"
+"module Vine where\r\nimport Rumpus\r\n\r\nstart :: Start\r\nstart = do\r\n\r\n let nMax = 100\r\n let branch 0 = return ()\r\n branch n = do\r\n let hue = fromIntegral n / fromIntegral nMax\r\n [x,y,z,w] <- replicateM 4 (randomRange (0,1))\r\n child <- spawnChild $ do\r\n myShape ==> Cube\r\n myPose ==> positionRotation (V3 0 0.4 0)\r\n (axisAngle (V3 x y z) w)\r\n mySize ==> V3 0.1 0.4 0.1\r\n myColor ==> colorHSL hue 0.8 0.8\r\n myUpdate ==> do\r\n now <- (*0.1) <$> getNow\r\n --setSize (V3 0.1 (sin now) 0.1)\r\n setRotation (V3 x y z) (sin now * w)\r\n lift $ inEntity child $ branch (n - 1)\r\n branch nMax\r\n return ()"
+"// Type definitions for C3js 0.6\n// Project: http://c3js.org/, https://github.com/c3js/c3\n// Definitions by: Marc Climent \n// Gerin Jacob \n// Bernd Hacker \n// Dzmitry Shyndzin \n// Tim Niemueller \n// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped\n// TypeScript Version: 2.3\n\nimport * as d3 from \"d3\";\n\nexport as namespace c3;\n\nexport type PrimitiveArray = Array;\nexport type FormatFunction = (v: any, id: string, i: number, j: number) => void;\n\nexport interface TargetIds {\n ids: ArrayOrString;\n}\n\nexport type ArrayOrString = string[] | string;\n\nexport interface ChartConfiguration {\n /**\n * The CSS selector or the element which the chart will be set to. D3 selection object can be specified. If other chart is set already, it will be replaced with the new one (only one chart\n * can be set in one element).\n * If this option is not specified, the chart will be generated but not be set. Instead, we can access the element by chart.element and set it by ourselves.\n * Note: When chart is not binded, c3 starts observing if chart.element is binded by MutationObserver. In this case, polyfill is required in IE9 and IE10 becuase they do not support\n * MutationObserver. On"
+"# Create custom GCF field types\n\nGCF allows you to register your own custom field types. Defining a field type means defining the `editForm` function of the field (and potentially a `configForm` as well): a higher order component used to edit the field's value.\n\n```js\nconst myCustomFieldType = {\n // Identifier of your field type, a good practice is use a namespace\n name: \"myplugin/field\",\n\n // Label of your field type\n label: \"My Custom Field Type\",\n\n // Function returning a Component used to edit the field value.\n editForm: fieldConfig => ({ value, onChange }) => {\n return (\n \n );\n }\n};\n\nwp.data.dispatch(\"gcf/fields\").register(myCustomFieldType);\n```\n\nLast thing, make sure your script registering your custom fields is loaded in Gutenberg before the editor initialization and in the GCF Admin page before the `gcf-config-app` script."
+"6\n4\n8\n0\n4\n6\n2\n2\n4\n4\n4\n2\n2\n2\n4\n2\n0\n2\n6\n2\n2\n2\n4\n2\n2\n6\n4\n2\n33\n4\n2\n4\n6\n6\n6\n2\n6\n6\n2\n2\n2\n6\n0\n4\n10\n2\n6\n4\n4\n4\n4\n2\n2\n6\n8\n4\n2\n2\n2\n4\n6\n2\n2\n2\n2\n6\n6\n6\n4\n6\n6\n4\n6\n2\n4\n2\n8\n2\n4\n2\n2\n4\n8\n4\n2\n2\n4\n2\n4\n4\n6\n0\n2\n6\n4\n2\n4\n4\n4\n4\n2\n4\n6\n6\n8\n4\n4\n4\n2\n6\n8\n6\n6\n6\n8\n4\n8\n6\n4\n4\n6\n6\n6\n8\n8\n4\n6\n8\n4\n6\n8\n8\n8\n8\n6\n6\n4\n6\n4\n2\n4\n6\n6\n8\n4\n8\n6\n8\n10\n4\n8\n8\n8\n4\n4\n4\n6\n6\n10\n6\n8\n8\n15\n6\n8\n4\n6\n6\n6\n6\n2\n4\n6\n4\n8\n2\n8\n6\n4\n10\n6\n6\n6\n6\n4\n4\n8\n6\n8\n8\n4\n8\n4\n4\n6\n6\n4\n8\n6\n4"
+".. _community_gwc_sqlite:\n\nGWC SQLite Plugin\n=================\n\nThis plugin provides integration with GWC SQLite based blob stores. At the moment only one blob store of this type is available, the MBTiles blob store.\n\n\nMBTiles Blob Store\n++++++++++++++++++\n\nThis blob store allow us to store tiles using the `MBTiles `_ specification (version 1.1) which defines a schema for storing tiles in an `SQLite `_ database with some restrictions regarding tiles formats and projections.\n\nMBTiles specification only supports JPEG and PNG formats and projection EPSG:3857 is assumed. The implemented blob store will read and write MBTiles files compliant with the specification but will also be able to write and read MBTiles files that use others formats and projections.\n\nUsing the MBTiles blob store will bring several benefits at the cost of some performance loss. The MBTiles storage uses a significantly smaller number of files, which results in easier data handling (e.g., backups, moving tiles between environments). In some cases the stored data will be more compact reducing the size of the data on disk.\n\nWhen compared to the file blob store this store has two limitations:\n\n* This store does not integrate with disk quota, this is a consequence of using database files."
+"class Site::Client < Site\n validates_presence_of :url, :callback_url, :secret\n\n has_many :oauth2_tokens,\n foreign_key: 'site_id',\n dependent: :destroy\n\n has_many :authorization_codes,\n foreign_key: 'site_id',\n class_name: 'Oauth2Token::AuthorizationCode'\n\n has_many :access_tokens,\n foreign_key: 'site_id',\n class_name: 'Oauth2Token::AccessToken'\n\n has_many :refresh_tokens,\n foreign_key: 'site_id',\n class_name: 'Oauth2Token::RefreshToken'\n\n before_validation :set_secret,\n on: :create\n\n after_create :set_manager\n\n scope :managed_by, lambda { |actor|\n select(\"DISTINCT sites.*\").\n joins(actor: :sent_permissions).\n merge(Contact.received_by(actor)).\n merge(Permission.where(action: 'manage', object: nil))\n }\n\n %w{ url callback_url secret }.each do |m|\n define_method m do\n config[m]\n end\n\n define_method \"#{ m }=\" do |arg|\n config[m] = arg\n end\n end\n\n # Generate a new OAuth secret for this site client\n def refresh_secret!\n set_secret\n save!\n end\n\n private\n\n def set_secret\n self.secret = SecureRandom.hex(64)\n end\n\n def set_manager\n c = sent_contacts.create! receiver_id: author.id,\n user_author: author\n\n c.relation_ids = [ ::Relation::Manager.instance.id ]\n end\nend"
+"#pragma once\n\n#include \n#include \n#include \n\nnamespace nall {\n auto main(Arguments arguments) -> void;\n\n auto main(int argc, char** argv) -> int {\n #if defined(PLATFORM_WINDOWS)\n CoInitialize(0);\n WSAData wsaData{0};\n WSAStartup(MAKEWORD(2, 2), &wsaData);\n _setmode(_fileno(stdin ), O_BINARY);\n _setmode(_fileno(stdout), O_BINARY);\n _setmode(_fileno(stderr), O_BINARY);\n #endif\n\n main(move(Arguments{argc, argv}));\n\n //when a program is running, input on the terminal queues in stdin\n //when terminating the program, the shell proceeds to try and execute all stdin data\n //this is annoying behavior: this code tries to minimize the impact as much as it can\n //we can flush all of stdin up to the last line feed, preventing spurious commands from executing\n //however, even with setvbuf(_IONBF), we can't stop the last line from echoing to the terminal\n #if !defined(PLATFORM_WINDOWS)\n auto flags = fcntl(fileno(stdin), F_GETFL, 0);\n fcntl(fileno(stdin), F_SETFL, flags | O_NONBLOCK); //don't allow read() to block when empty\n char buffer[4096], data = false;\n while(read(fileno(stdin), buffer, sizeof(buffer)) > 0) data = true;\n fcntl(fileno(stdin), F_SETFL, flags); //restore original flags for the terminal\n if(data) putchar('\\r'); //ensures PS1 is printed at the start of the line\n #endif\n\n return EXIT_SUCCESS;\n }\n}\n\nauto main(int argc, char** argv) -> int {\n return nall::main(argc, argv);\n}"
+"import React, { useState } from \"react\";\nimport { useHotkeys } from \"react-hotkeys-hook\";\n\nimport SignUpModal from \"./SignUpModal\";\nimport SignUpCSS from \"./CSS\";\n\nimport SignUpImage from \"./SignUpImage\";\nimport SignUpForm from \"./SignUpForm\";\n\nexport default function SignUp() {\n\tconst [modalIsOpen, setModal] = useState(false);\n\tconst [data, setData] = useState({});\n\tconst {\n\t\tnome,\n\t\temail,\n\t\tcpf,\n\t\ttelefone,\n\t} = data;\n\n\t// Either click shadow or use esc when the main component is in focus to close the modal\n\t// Follow the example of https://johannesklauss.github.io/react-hotkeys-hook/docs-use-hotkeys#example\n\t// eslint-disable-next-line consistent-return\n\tuseHotkeys(\"esc\", () => setModal((prevModalIsOpen) => {\n\t\tif (prevModalIsOpen) {\n\t\t\treturn false;\n\t\t}\n\t}));\n\n\treturn (\n\t\t\n\t\t\t\n\t\t\t\t\n\t\t\t\n\t\t\t\n\t\t\t\t\n\t\t\t\t{/* Use redirect etc later */}\n\t\t\t\t\n\t\t\t\n\t\t\n\t);\n}"
+"# frozen_string_literal: true\n\nRSpec.describe RuboCop::Cop::Lint::IneffectiveAccessModifier do\n subject(:cop) { described_class.new }\n\n context 'when `private` is applied to a class method' do\n it 'registers an offense' do\n expect_offense(<<~RUBY)\n class C\n private\n\n def self.method\n ^^^ `private` (on line 2) does not make singleton methods private. Use `private_class_method` or `private` inside a `class << self` block instead.\n puts \"hi\"\n end\n end\n RUBY\n end\n end\n\n context 'when `protected` is applied to a class method' do\n it 'registers an offense' do\n expect_offense(<<~RUBY)\n class C\n protected\n\n def self.method\n ^^^ `protected` (on line 2) does not make singleton methods protected. Use `protected` inside a `class << self` block instead.\n puts \"hi\"\n end\n end\n RUBY\n end\n end\n\n context 'when `private_class_method` is used' do\n context 'when `private_class_method` contains all private method names' do\n it \"doesn't register an offense\" do\n expect_no_offenses(<<~RUBY)\n class C\n private\n\n def self.method\n puts \"hi\"\n end\n\n private_class_method :method\n end\n RUBY\n end\n end\n\n context 'when `private_class_method` does not contain the method' do\n it 'registers an offense' do\n expect_offense(<<~RUBY)\n class C\n private\n\n def self.method2\n ^^^ `private` (on line 2) does not make singleton methods private. Use `private_class_method` or `private` inside a `class << self` block instead.\n puts \"hi\"\n end\n\n private_class_method :method\n end\n RUBY\n end\n end\n end\n\n context 'when no access"
+"module Explorer.View.NotFound (notFoundView) where\n\nimport Prelude\n\nimport Data.Lens ((^.))\n\nimport Explorer.Lenses.State (lang)\nimport Explorer.Routes (Route(Dashboard), toUrl)\nimport Explorer.Types.Actions (Action(..))\nimport Explorer.Types.State (State)\n\nimport Pux.DOM.Events (onClick) as P\n\nimport Text.Smolder.HTML (div, a) as S\nimport Text.Smolder.HTML.Attributes (className, href) as S\nimport Text.Smolder.Markup ((!), (#!))\nimport Text.Smolder.Markup (text) as S\n\nimport Pux.DOM.HTML (HTML) as P\n\nnotFoundView :: State -> P.HTML Action\nnotFoundView state =\n let lang' = state ^. lang in\n S.div ! S.className \"explorer-404\"\n $ S.div ! S.className \"explorer-404__wrapper\"\n $ S.div ! S.className \"explorer-404__container\"\n $ S.a ! S.href (toUrl Dashboard)\n #! P.onClick (Navigate (toUrl Dashboard))\n ! S.className \"bg-image-404\"\n $ S.text \"\""
+"#!/usr/bin/env node\n\nimport commander from \"commander\";\nimport loglevel from \"loglevel\";\n\nimport { changelogs } from \"./changelogs\";\nimport { clean } from \"./clean\";\nimport { configs } from \"./configs\";\nimport { CLEAN, DEBUG, SILENT } from \"./constants\";\nimport { indexer } from \"./indexer\";\nimport { libsize } from \"./libsize\";\nimport { release, RELEASE_TYPES, toReleaseType } from \"./release\";\nimport { sandbox } from \"./sandbox\";\nimport { sassdoc } from \"./sassdoc\";\nimport { shared } from \"./shared\";\nimport { themes } from \"./themes\";\nimport { umd } from \"./umd\";\nimport { copyStyles } from \"./utils\";\nimport { variables } from \"./variables\";\nimport { watch } from \"./watch\";\n\nconst argv = process.argv.slice(2);\n\nif (argv.includes(DEBUG)) {\n loglevel.setLevel(\"debug\");\n} else if (argv.includes(SILENT)) {\n loglevel.setLevel(\"error\");\n} else {\n loglevel.setLevel(\"info\");\n}\n\nconst createCommand = (\n command: string,\n cleanable: boolean = false\n): commander.Command => {\n const instance = commander\n .command(command)\n .option(DEBUG, \"Enables the verbose logging to help debug errors.\")\n .option(SILENT, \"Disables all logging.\");\n\n if (cleanable) {\n return instance.option(\n CLEAN,\n \"Removes the existing files before executing.\"\n );\n }\n\n return instance;\n};\n\ncreateCommand(\"clean\")\n .description(\"Cleans all the distributables for all publishable packages.\")\n .action(() => clean());\n\ncreateCommand(\"styles\")\n .description(\n \"Copies all the SCSS files into the dist folder as well as creating non-webpack"
+"#include \"GlobalInclude.hlsli\"\n\nTexture2D flowmap : register(t1);\nTexture2D verticalFilter1 : register(t2);\nTexture2D obstacle : register(t7);\n\nSamplerState wrapSampler : register(s0);\nSamplerState clampSampler : register(s1);\n\n[domain(\"quad\")]\nDS_OUTPUT main(\n\tHS_CONSTANT_DATA_OUTPUT input,\n\tfloat2 domain : SV_DomainLocation, // float2 for quad\n\tconst OutputPatch patch)\n{\n DS_OUTPUT Output;\n \n float3 pos = BLERP3(patch[0].pos, patch[1].pos, patch[3].pos, patch[2].pos, domain);\n\t\n float2 texCoord = BLERP2(patch[0].texCoord, patch[1].texCoord, patch[3].texCoord, patch[2].texCoord, domain);\n //float3 nor = float3(0, 1, 0); //PER VERTEX NORMAL\n\n if(mode==0||mode==10||mode==11)//0 - default, 10 - normal\n {\n float ob = obstacle.SampleLevel(clampSampler, texCoord, 0).x;\n if (ob > obstacleThresholdWave)\n {\n //do nothing\n }\n else\n {\n float4 deviation = Flow(texCoord, time * timeScale * flowSpeed, flowmap, wrapSampler, verticalFilter1, wrapSampler);\n //float4 deviation = FlowHeightWithNormal(texCoord, time * timeScale * flowSpeed, t1, s2, t2, s2, nor); //PER VERTEX NORMAL\n pos.y += deviation.y;\n pos.x += deviation.x;\n pos.z += deviation.z;\n }\n }\n\t\n Output.pos = mul(mul(viewProj, model), float4(pos, 1));\n Output.texCoord = texCoord;\n\tOutput.PosW = pos;\n //Output.nor = nor; //PER VERTEX NORMAL\n\n return Output;\n}"
+"---\ntitle: Using Microsoft Cloud App Security controls in Power BI\ndescription: Learn how to use Microsoft Cloud App Security together with Power BI\nauthor: paulinbar\nms.reviewer: ''\n\nms.service: powerbi\nms.subservice: powerbi-eim\nms.topic: how-to\nms.date: 06/15/2020\nms.author: painbar\n\nLocalizationGroup: Data from files\n---\n# Using Microsoft Cloud App Security controls in Power BI\n\nUsing Cloud App Security with Power BI, you can help protect your Power BI reports, data, and services from unintended leaks or breaches. With Cloud App Security, you create conditional access policies for your organization\u2019s data, using real-time session controls in Azure Active Directory (Azure AD), that help to ensure your Power BI analytics are secure. Once these policies have been set, administrators can monitor user access and activity, perform real-time risk analysis, and set label-specific controls. \n\n\n\nYou can configure Cloud App Security for all sorts of apps and services, not only Power BI. You\u2019ll need to configure Cloud App Security to work with Power BI to benefit from Cloud App Security protections for your Power BI data and analytics. For more information about Cloud App Security, including an overview of how it works, the dashboard, and app risk scores, see"
+"structure Statistics : STATISTICS = struct\n\n type stat = {name: string, enabled: bool ref, table: (string, int ref) Util.alist ref}\n\n fun new s r = {name=s,enabled=r,table=ref (Util.emptyAlist())}\n\n fun incr {name,table,enabled} s = \n if !enabled then\n case Util.lookupAlist (!table) s of\n SOME r => r := !r + 1\n | NONE => table := Util.extendAlist (!table) (s,ref 1)\n else ()\n\n fun report {name,enabled,table} =\n if !enabled then\n let val sz = List.foldl (fn ((s,_),a) => (Int.max(size s,a))) 0 (!table)\n val () = Util.prln (\"[\" ^ name ^ \" Statistics Report]\")\n val () = List.app (fn (s,r) => \n let val line = StringCvt.padRight #\" \" sz s ^ \" -> \" ^ Int.toString(!r)\n in Util.prln (\" \" ^ line)\n end) (!table)\n in ()\n end\n else ()\nend"
+"---\nlayout: article_eclipse\npart: Reference\n---\n\n# Editor\n\nThe Erlang editor provides specialized features for editing Erlang related\nfiles.\n\n{: .frame }\n\nAssociated with the editor is an Erlang-specific outline view, which\nshows the structure of the active `.erl` or `.hrl` file. It is updated as you\nedit these files.\n\nThe editor includes the following features:\n\n* Syntax highlighting\n* Content/code assist; auto completion of function calls, display of\nfunction documentation)\n\nThe most common way to invoke the Erlang editor is to open a file from the\nErlang Navigator. If you want to open an Erlang module by name, the keyboard\nshortcut is **Ctrl+Shift+M** or **Ctrl+Alt+Shift+M**."
+"//\n// GooeyEffect.swift\n// gooey-cell\n//\n// Created by \u041f\u0440\u0435\u0433\u0435\u0440 \u0413\u043b\u0435\u0431 on 22/01/2019.\n// Copyright \u00a9 2019 Cuberto. All rights reserved.\n//\n\nimport UIKit\nimport pop\n\npublic class GooeyEffect {\n public enum Direction {\n case toRight, toLeft\n }\n \n public struct Config {\n let color: UIColor?\n let image: UIImage?\n \n public init(color: UIColor?, image: UIImage?) {\n self.color = color\n self.image = image\n }\n }\n \n private struct SnapshotLayerInfo {\n let position: CGPoint\n let opacity: Float\n }\n\n private struct CircleLayerInfo {\n let centerPoint: CGPoint\n let radius: CGFloat\n let leftPoint: CGPoint\n let rightPoint: CGPoint\n let path: CGPath?\n }\n \n private struct EdgeLayerInfo {\n let topPoint: CGPoint\n let topControlPoint: CGPoint\n let bottomPoint: CGPoint\n let bottomControlPoint: CGPoint\n let rightControlPoint: CGPoint\n let path: CGPath?\n }\n \n private struct JointLayerInfo {\n let path: CGPath?\n }\n \n private struct ImageLayerInfo {\n let position: CGPoint\n let opacity: Float\n let transform: CATransform3D\n }\n \n let direction: Direction\n let effectMaxWidth: CGFloat = 170\n let gapProgressValue: Float = 0.7\n \n private let edgeShapeWidthRate: CGFloat = 0.35\n private let effectMaxHeight: CGFloat = 150\n private let circleShapeRadius: CGFloat = 20\n private let jointShapeConstringencyRate: Float = 2\n private let fullAnimationDuration: CFTimeInterval = 0.35\n private let color: UIColor\n private let buttonImage: UIImage?\n\n private weak var container: UIView?\n private let effectCenterY: CGFloat\n private let"
+"using System;\nusing System.Text;\nusing System.Windows.Forms;\n\nnamespace IronAHK.Rusty\n{\n partial class Core\n {\n class HotkeyBox : TextBox\n {\n Keys key, mod;\n Limits limit;\n\n [Flags]\n public enum Limits\n {\n None = 0,\n PreventUnmodified = 1,\n PreventShiftOnly = 2,\n PreventControlOnly = 4,\n PreventAltOnly = 8,\n PreventShiftControl = 16,\n PreventShiftAlt = 32,\n PreventShiftControlAlt = 128,\n }\n\n public HotkeyBox()\n {\n key = mod = Keys.None;\n limit = Limits.None;\n Multiline = false;\n ContextMenu = new ContextMenu();\n Text = Enum.GetName(typeof(Keys), key);\n\n KeyPress += delegate(object sender, KeyPressEventArgs e)\n {\n e.Handled = true;\n };\n\n KeyUp += delegate(object sender, KeyEventArgs e)\n {\n if (e.KeyCode == Keys.None && e.Modifiers == Keys.None)\n key = Keys.None;\n };\n\n KeyDown += delegate(object sender, KeyEventArgs e)\n {\n if (e.KeyCode == Keys.Back || e.KeyCode == Keys.Delete)\n key = mod = Keys.None;\n else\n {\n key = e.KeyCode;\n mod = e.Modifiers;\n Validate();\n }\n\n SetText();\n };\n }\n\n public Limits Limit\n {\n get { return limit; }\n set { limit = value; }\n }\n\n void Validate()\n {\n Keys[,] sym = { { Keys.Control, Keys.ControlKey }, { Keys.Shift, Keys.ShiftKey }, { Keys.Alt, Keys.Menu } };\n\n for (int i = 0; i < 3; i++)\n {\n if (key == sym[i, 1] && (mod & sym[i, 0]) == sym[i, 0])\n mod &= ~sym[i,"
+"# tests for tickets below 1000 and for tests without a ticket reference\n0xxx\n\n# tests for tickets in range [1000, 2000[\n1xxx\n\n# tests for tickets in range [2000, 3000[\n2xxx\n\n# tests for tickets in range [3000, 4000[\n3xxx\n\n# tests for tickets in range [4000, 5000[\n4xxx\n\n# collision of long vehicles\n# collision_long_veh (automatically commented due to no test directory being found)\n\n# ticket672 (automatically commented due to no test directory being found)\n\n# ticket777 (automatically commented due to no test directory being found)\n\n# ticket1047 (automatically commented due to no test directory being found)\n\n5xxx\n6xxx\n7xxx"
+"require 'diffy'\n\nmodule Refinery\n module Pages\n # Knows how to build the html for a section. A section is part of the visible html, that has\n # content wrapped in some particular markup. Construct with the relevant options, and then\n # call wrapped_html to get the resultant html.\n #\n # The content rendered will usually be the value of fallback_html, unless an override_html\n # is specified. However, on rendering, you can elect not display sections that have no\n # override_html by passing in false for can_use_fallback.\n #\n # Sections may be hidden, in which case they wont display at all.\n class SectionPresenter\n include ActionView::Helpers::TagHelper\n include ActionView::Helpers::SanitizeHelper\n\n def initialize(initial_hash = {})\n { logger: Rails.logger }.merge(initial_hash).map do |key, value|\n send(\"#{key}=\", value)\n end\n end\n\n attr_reader :id, :fallback_html, :hidden\n alias_method :hidden?, :hidden\n attr_accessor :override_html\n\n def visible?\n !hidden?\n end\n\n def has_content?(can_use_fallback = true)\n visible? && content_html(can_use_fallback).present?\n end\n\n def wrapped_html(can_use_fallback = true)\n return if hidden?\n\n content = content_html(can_use_fallback)\n if content.present?\n wrap_content_in_tag(content)\n end\n end\n\n def hide\n self.hidden = true\n end\n\n def not_present_css_class\n \"no_#{id}\"\n end\n\n protected\n\n def content_html(can_use_fallback)\n override_html.presence || html_from_fallback(can_use_fallback)\n end\n\n def html_from_fallback(can_use_fallback)\n fallback_html.presence if can_use_fallback\n end\n\n private\n\n attr_accessor :logger\n attr_writer :id, :fallback_html, :hidden\n\n def wrap_content_in_tag(content)\n content_tag(:section, content_tag(:div, sanitize_content(content), :class => 'inner'), :id => id)\n end\n\n def"
+"\ufeff--random sample from msdn library: http://msdn.microsoft.com/en-us/library/bb630263.aspx\r\nwith paths (\r\n\tpath\r\n\t,EmployeeID\r\n\t)\r\nas (\r\n\t-- This section provides the value for the root of the hierarchy\r\n\tselect hierarchyid::GetRoot() as OrgNode\r\n\t\t,EmployeeID\r\n\tfrom #Children as C\r\n\twhere ManagerID is null\r\n\t\r\n\tunion all\r\n\t\r\n\t-- This section provides values for all nodes except the root\r\n\tselect CAST(p.path.ToString() + CAST(C.Num as varchar(30)) + '/' as hierarchyid)\r\n\t\t,C.EmployeeID\r\n\tfrom #Children as C\r\n\tjoin paths as p on C.ManagerID = P.EmployeeID\r\n\t)\r\ninsert NewOrg (\r\n\tOrgNode\r\n\t,O.EmployeeID\r\n\t,O.LoginID\r\n\t,O.ManagerID\r\n\t)\r\nselect P.path\r\n\t,O.EmployeeID\r\n\t,O.LoginID\r\n\t,O.ManagerID\r\nfrom EmployeeDemo as O\r\njoin Paths as P on O.EmployeeID = P.EmployeeID\r\ngo\r\n\r\n--similar sample, with 2 CTEs in the same query\r\nbegin\r\n\twith FirstCTE\r\n\tas (\r\n\t\tselect 1 as FirstColumn\r\n\t\t)\r\n\t\t,SecondCTE (AnotherColumn)\r\n\tas (\r\n\t\tselect 2\r\n\t\t)\r\n\tselect *\r\n\tfrom FirstCTE\r\n\t\r\n\tunion\r\n\t\r\n\tselect *\r\n\tfrom SecondCTE\r\nend\r\ngo"
+"/*\nPackage merkleTree is a generic Merkle Tree implementation, for provably publishing lots\nof data under one succinct tree root.\n\nInstall:\n\n go get github.com/keybase/go-merkle-tree\n\nDesign:\n\nThis package outputs a MerkleTree with two types of nodes: interior index\nnodes, or iNodes, and exterior data nodes, of Leaf nodes. The inodes\nconsist of tables that map prefixes to child pointers. The leafs map a full\nhash to a \"value\".\n\nThis is best demonstrated with a simple example. Let's say you are storing\nthe key-value pair (`0123456789abcdef`, {\"name\" : \"max\"}) in the Merkle tree.\nLet's say that the shape of the tree is to have 256 children per inode.\nThen this key-value pair might be stored under the path\n\n\tat root node: 01 \u2192 aabbccdd\n\tat aabbccdd node: 23 \u2192 eeff5588\n\tat eeff5588 node: 34 \u2192 99331122\n\tat 99331122 node: 0123456789abcdef \u2192 {\"name\" : \"max\" }\n\nMeaning at the root node, we take the first 256-bits of the needed\nkey to get a prefix `01`, and look that up in the node's pointer table\nto get a child pointer, which is `aabbccdd`. This is a hash of an\niNode, which we can fetch from storage, verify it matches the hash,\nand then recursively"
+"Data Protection\n===============\n\nNowadays, one of the most important things in security is data protection. You\ndon't want something like:\n\n\n\nIn a nutshell, data from your web application needs to be protected,. Therefore,\nin this section we will take a look at the different ways to secure it.\n\nOne of the first things you should take care of is creating and implementing the\nright privileges for each user and restrict them to strictly the functions they\nreally need.\n\nFor example, consider a simple online store with the following user roles:\n\n* _Sales user_: Permission only to view catalog\n* _Marketing user_: Allowed to check statistics\n* _Developer_: Allowed to modify pages and web application options\n\nAlso, in the system configuration (aka webserver), you should define the right\npermissions.\n\nThe main thing is to define the right role for each user - web or system.\n\nRole separation and access controls are further discussed in the [Access\nControl][1] section.\n\n## Remove Sensitive Information\n\nTemporary and cache files containing sensitive information should be removed\nas soon as they're not needed. If you still need some of them, move them to\nprotected areas and/or encrypt them. This"
+"# Colourama\n\nColourama used typosquatting to register a package that had similar name to\nColorama, one of is one of the top 20 most downloaded legitimate modules\nin the PyPI registry with 1 million downloads on a daily basis. The colourama\npackage contains a malware which targets Windows machines to implement a\ncryptocurrency clipboard hijacker. As a result, was able to divert any\nBitcoin payment from victim machines to the attacker's bitcoin address.\n\n## Impact\n\nColourama was registered early in December 2017. It is not clear how many times\nthe malicious package have been downlaoded since then. According to a report by\nMedium, it was downloaded 55 times in October 2018.\n\n## Type of compromise\n\nA typosquat attack does not require compromising any type of infrastructure."
+"import { useMemo, useRef } from 'react'\nimport { useLayoutEffect } from 'react-layout-effect'\nimport { Lookup } from '@react-spring/types'\nimport {\n is,\n each,\n usePrev,\n useOnce,\n useForceUpdate,\n} from '@react-spring/shared'\n\nimport {\n ControllerFlushFn,\n ControllerUpdate,\n PickAnimated,\n SpringValues,\n} from '../types'\nimport { UseSpringProps } from './useSpring'\nimport { declareUpdate } from '../SpringValue'\nimport {\n Controller,\n getSprings,\n flushUpdateQueue,\n setSprings,\n} from '../Controller'\nimport { hasProps, detachRefs, replaceRef } from '../helpers'\nimport { useSpringContext } from '../SpringContext'\nimport { SpringRef } from '../SpringRef'\n\nexport type UseSpringsProps = unknown &\n ControllerUpdate & {\n ref?: SpringRef\n }\n\n/**\n * When the `deps` argument exists, the `props` function is called whenever\n * the `deps` change on re-render.\n *\n * Without the `deps` argument, the `props` function is only called once.\n */\nexport function useSprings(\n length: number,\n props: (i: number, ctrl: Controller) => Props,\n deps?: readonly any[]\n): PickAnimated extends infer State\n ? [SpringValues[], SpringRef]\n : never\n\n/**\n * Animations are updated on re-render.\n */\nexport function useSprings(\n length: number,\n props: Props[] & UseSpringsProps>[]\n): SpringValues>[]\n\n/**\n * When the `deps` argument exists, you get the `update` and `stop` function.\n */\nexport function useSprings(\n length: number,\n props: Props[]"
+"package Selenium::Remote::Finders;\n$Selenium::Remote::Finders::VERSION = '1.36';\nuse strict;\nuse warnings;\n\n# ABSTRACT: Handle construction of generic parameter finders\nuse Try::Tiny;\nuse Carp qw/carp/;\nuse Moo::Role;\nuse namespace::clean;\n\n\nsub _build_find_by {\n my ( $self, $by ) = @_;\n\n return sub {\n my ( $driver, $locator ) = @_;\n my $strategy = $by;\n\n return try {\n return $driver->find_element( $locator, $strategy );\n }\n catch {\n carp $_;\n return 0;\n };\n }\n}\n\n1;\n\n__END__\n\n=pod\n\n=encoding UTF-8\n\n=head1 NAME\n\nSelenium::Remote::Finders - Handle construction of generic parameter finders\n\n=head1 VERSION\n\nversion 1.36\n\n=head1 DESCRIPTION\n\nThis package just takes care of setting up parameter finders - that\nis, the C versions of the find element\nfunctions. You probably don't need to do anything with this package;\ninstead, see L documentation\nfor the specific finder functions.\n\n=head1 SEE ALSO\n\nPlease see those modules/websites for more information related to this module.\n\n=over 4\n\n=item *\n\nL\n\n=back\n\n=head1 BUGS\n\nPlease report any bugs or feature requests on the bugtracker website\nL\n\nWhen submitting a bug or request, please include a test-file or a\npatch to an existing test-file that illustrates the bug or desired\nfeature.\n\n=head1 AUTHORS\n\nCurrent Maintainers:\n\n=over 4\n\n=item *\n\nDaniel Gempesaw \n\n=item *"
+"/* ERASE for arrays to free memory\n\n fb_ArrayErase() is called for dynamic arrays and static arrays\n\n if it is known at compile time that the array is static (fixed length)\n then:\n for plain arrays: fbc calls fb_ArrayClear()\n for object arrays: fbc calls fb_ArrayClearObj()\n for FBSTRING arrays: fbc calls fb_ArrayDestructStr()\n\n Otherwise if the array is dynamic or is unknown to be static at \n compile-time, then:\n for plain arrays: fbc calls fb_ArrayErase()\n for object arrays: fbc calls fb_ArrayEraseObj()\n for FBSTRING arrays: fbc calls fb_ArrayStrErase()\n\n fb_ArrayErase() is also called indirectly from rtlib to free the\n memory associated with the array.\n*/\n\n#include \"fb.h\"\n\nFBCALL int fb_ArrayErase( FBARRAY *array )\n{\n\t/* ptr can be NULL, for global dynamic arrays that were never allocated,\n\t but will still be destroyed on program exit */\n\tif( array->ptr ) {\n\n\t\t/* fixed length? then it can't be resized.\n\t\t just clear the elements and leave the descriptor as-is. */\n\t\tif( array->flags & FBARRAY_FLAGS_FIXED_LEN ) {\n\t\t\tfb_ArrayClear( array );\n\n\t\t/* otherwise it's dynamic: free memory */\n\t\t} else {\n\t\t\tfree( array->ptr );\n\t\t\tfb_ArrayResetDesc( array );\n\t\t}\n\t}\n\n\treturn fb_ErrorSetNum( FB_RTERROR_OK );\n}"
+"// Copyright (c) 2020 TypeFox GmbH. All rights reserved.\n// Licensed under the GNU Affero General Public License (AGPL).\n// See License-AGPL.txt in the project root for license information.\n\npackage dropwriter\n\nimport (\n\t\"io\"\n\t\"sync\"\n\t\"time\"\n)\n\n// Clock abstracts time for the bucket limiter\ntype Clock func() time.Time\n\n// NewBucket creates a new bucket limiter with a realtime clock\nfunc NewBucket(capacity, refillRatePerSec int64) *Bucket {\n\treturn NewBucketClock(capacity, refillRatePerSec, time.Now)\n}\n\n// NewBucketClock produces a new bucket limiter with a custom clock. Useful for testing.\nfunc NewBucketClock(capacity, refillRatePerSec int64, clock Clock) *Bucket {\n\treturn &Bucket{\n\t\tclock: clock,\n\t\tcapacity: capacity,\n\t\trefillRate: refillRatePerSec,\n\t}\n}\n\n// Bucket implements a token bucket limiter\ntype Bucket struct {\n\tclock Clock\n\n\t// capacity is the total token capacity of this bucket\n\tcapacity int64\n\n\t// refillRate holds how many tokens we refill per second\n\trefillRate int64\n\n\t// mu syncs bucket access\n\tmu sync.Mutex\n\n\t// availableTokens is the total number of tokens currently available\n\tavailableTokens int64\n\n\t// lastTick is the last time we adjusted the available token count\n\tlastTick time.Time\n}\n\nfunc (b *Bucket) adjustTokens() {\n\tb.mu.Lock()\n\tdefer b.mu.Unlock()\n\n\tnow := b.clock()\n\tdefer func() {\n\t\tb.lastTick = now\n\t}()\n\n\tif b.lastTick.IsZero() {\n\t\t// first adjustment/tick ever -"
+"/*\n * Copyright 2020, Verizon Media.\n * Licensed under the terms of the Apache 2.0 license.\n * Please see LICENSE file in the project root for terms.\n */\n\npackage com.yahoo.oak;\n\nimport org.junit.Test;\n\n\npublic class MemoryReleaseTest {\n\n @Test(timeout = 300_000)\n public void testByteBuffersReleased() {\n// System.gc();\n// String val = String.format(\"-%016000d\", 0);\n//\n// OakMapBuilder builder = new OakMapBuilder()\n// .setChunkMaxItems(1024)\n// .setChunkBytesPerItem(4096)\n// .setKeySerializer(new StringSerializer())\n// .setValueSerializer(new StringSerializer())\n// .setComparator(new StringComparator())\n// .setMinKey(\"\");\n// OakMap oak = builder.build();\n//\n// int firstIteration = 0;\n// try {\n// while (true) {\n// String key = String.format(\"-%01024d\", firstIteration++);\n// oak.put(key, val);\n// }\n// } catch (OutOfMemoryError e) {\n//\n// }\n//\n// oak.close();\n//\n// int secondIteration = 0;\n// oak = builder.build();\n// System.gc();\n//\n// try {\n// while (true) {\n// String key = String.format(\"-%01024d\", secondIteration++);\n// oak.put(key, val);\n// }\n// } catch (OutOfMemoryError e) {\n//\n// }\n// assert(firstIteration <= secondIteration);\n// oak.close();\n// System.gc();\n }\n\n}"
+"'use strict';\n\nangular.module('composeUiApp')\n .factory('logService', function () {\n\n function omitTimestamp(f) {\n\n var colors = [];\n\n return _.map(f, function (item) {\n\n var id = item.container;\n\n if (colors.indexOf(id) < 0) {\n colors.push(id);\n }\n\n return {\n text: item.text.split(' ').splice(1).join(' '),\n container: id,\n color: colors.indexOf(id)\n };\n });\n }\n\n function sortByDate(data) {\n return data.sort(function (a,b) {\n if(a.text < b.text) {\n return -1;\n } else if(a.text > b.text) {\n return 1;\n } else {\n return 0;\n }\n });\n }\n\n function excludeBlankLines(lines) {\n return _.filter(lines, function (line) {\n return line.text.trim().length > 0;\n });\n }\n\n function addContainerInfo(combinedLogs) {\n return _.map(combinedLogs, function (lines, containerId) {\n return _.map(lines, function (line) {\n return {\n text: line,\n container: containerId\n };\n });\n });\n }\n\n var formatLogs = _.flowRight(omitTimestamp,\n sortByDate,\n excludeBlankLines,\n _.flatten,\n addContainerInfo);\n\n return {\n formatLogs: formatLogs\n };\n });"
+"class AccessPermission < ActiveRecord::Base\n belongs_to :swarm\n belongs_to :user\n belongs_to :creator, :class_name => 'User', :foreign_key => 'creator_id'\n\n before_save :downcase_email\n\n validates :email, uniqueness: { scope: :swarm,\n message: \"can only be given permission on a swarm once\" }\n validates :user, uniqueness: { scope: :swarm,\n message: \"can only be given permission on a swarm once\" }, if: :user\n\n def self.update_legacy_permissions_for(user)\n aps = AccessPermission.where(email: user.email)\n aps.each do |ap|\n ap.user = user\n ap.save\n end\n end\n\n def self.can_alter?(swarm, user)\n if user\n user.is_admin? || swarm.users.include?(user) || swarm.access_permissions.find_by(email: user.email)\n end\n end\n\n def self.can_destroy?(swarm,user)\n if user\n user.is_admin? || swarm.owners.include?(user)\n end\n end\n\n def self.can_alter_permissions?(swarm,user)\n if user\n user.is_admin? || swarm.owners.include?(user)\n end\n end\n\n def self.can_see_user_drafts?(current_user, user)\n current_user && ((current_user == user) || current_user.is_admin?)\n end\n private\n\n def downcase_email\n self.email = self.email.downcase\n end\n\nend"
+"# Packers\nPacked programs have often been obfuscated to hide their logic. Since capa cannot handle obfuscation well, results may be misleading or incomplete. If possible, users should unpack input files before analyzing them with capa.\n\nIf capa detects that a program may be packed using its rules it warns the user.\n\n\n# Installers, run-time programs, etc.\ncapa cannot handle installers, run-time programs like .NET applications, or other packaged applications like AutoIt well. This means that the results may be misleading or incomplete.\n\nIf capa detects an installer, run-time program, etc. it warns the user.\n\n\n# Wrapper functions and matches in child functions\nCurrently capa does not handle wrapper functions or other matches in child functions.\n\nConsider this example call tree where `f1` calls a wrapper function `f2` and the `CreateProcess` API. `f2` writes to a file.\n\n```\nf1\n f2 (WriteFile wrapper)\n CreateFile\n WriteFile\n CreateProcess\n```\n\nHere capa does not match a rule that hits on file creation and execution on function `f1`. \n\nSoftware often contains such nested calls because programmers wrap API calls in helper functions or because specific compilers or languages, such as Go, layer calls.\n\nWhile a feature to capture nested functionality is desirable it introduces various"
+"int read(int, unsigned char *, int);\nint write(int, unsigned char *, int);\n\nstatic unsigned isdig[256];\n\nint main()\n{\n\tstatic unsigned char inbuf[4*1048576], outbuf[4*1048576];\n\tregister unsigned char *inp, *outp;\n\tregister unsigned i;\n\n\tfor (i = '0'; i <= '9'; i++) isdig[i] = 1;\n\n\tread(0, inp = inbuf, sizeof(inbuf));\n\toutp = outbuf;\n\n\tfor (;;) {\n\t\twhile (!isdig[*inp]) inp++;\n\t\twhile (isdig[*inp]) inp++;\n\n\t\twhile (!isdig[*inp]) inp++;\n\t\tfor (i = 0; isdig[*inp];)\n\t\t\ti = i * 10 + *inp++ - '0';\n\n\t\twhile (!isdig[*inp]) inp++;\n\t\twhile (*inp == '0') inp++;\n\t\tif (!isdig[*inp]) break;\n\t\twhile (isdig[*inp]) inp++;\n\n\t\tif (i & 1) {\n\t\t\t*(unsigned long *)outp = 0x616B654B; outp += 4;\n\t\t\t*outp++ = '\\n';\n\t\t} else {\n\t\t\t*(unsigned long *)outp = 0x65726147; outp += 4;\n\t\t\t*(unsigned long *)outp = 0x00000A64; outp += 2;\n\t\t}\n\t}\n\n\twrite(1, outbuf, outp - outbuf);\n\treturn 0;\n}"
+"import log from \"loglevel\";\nimport prompts from \"prompts\";\n\nimport { clean } from \"./clean\";\nimport { libsize } from \"./libsize\";\nimport { ammendCommit, getLernaVersion, git, replaceTag, run } from \"./utils\";\nimport { variables } from \"./variables\";\n\nexport type ReleaseType =\n | \"major\"\n | \"minor\"\n | \"patch\"\n | \"premajor\"\n | \"preminor\"\n | \"prepatch\"\n | \"prerelease\"\n | \"\";\n\nexport const RELEASE_TYPES: ReadonlyArray = [\n \"major\",\n \"minor\",\n \"patch\",\n \"premajor\",\n \"preminor\",\n \"prepatch\",\n \"prerelease\",\n];\n\nexport function toReleaseType(value: string): ReleaseType {\n if (RELEASE_TYPES.includes(value as ReleaseType)) {\n return value as ReleaseType;\n }\n\n return \"\";\n}\n\nasync function rollback(): Promise {\n log.error(\"Cancelling this release...\");\n const version = await getLernaVersion();\n git(`reset HEAD^`);\n git(`tag -d v${version}`);\n git(\"checkout .\");\n\n return process.exit(1);\n}\n\nasync function verify(): Promise {\n const { complete } = await prompts({\n type: \"confirm\",\n name: \"complete\",\n message: \"Continue the release?\",\n initial: false,\n });\n\n if (!complete) {\n await rollback();\n }\n\n log.info();\n}\n\nexport async function release(\n type: ReleaseType = \"\",\n blog: boolean = !type.startsWith(\"pre\"),\n autoYes: boolean = false\n): Promise {\n const yes = autoYes ? \" --yes\" : \"\";\n\n // first, update the version since I'll be ammending this commit and tag with\n // libsize changes, prettier changelogs, and adding the themes specifically\n // for the tag only"
+"#' @section Uniqueness:\n#'\n#' By default the tree IDs are numbered from 1 to n, n being the number of trees found. The problem\n#' with such incremental numbering is that, while it ensures a unique ID is assigned for each tree in\n#' a given point-cloud, it also guarantees duplication of tree IDs in different tiles or chunks when\n#' processing a `LAScatalog`. This is because each file is processed independently of the others and potentially\n#' in parallel on different computers. Thus, the index always restarts at 1 on each file or chunk. Worse,\n#' in a tree segmentation process, a tree that is located exactly between 2 files will have two different\n#' IDs for its two halves.\n#'\n#' This is why we introduced some uniqueness strategies that are all imperfect and that should be seen\n#' as experimental. Please report any troubleshooting. Using a uniqueness-safe strategy ensures that\n#' trees from different files will not share the same IDs. Moreover, it also means that two halves of a tree\n#' on the edge of a processing chunk will be assigned the same ID.\n#'\n#' \\describe{\n#' \\item{incremental}{Number from 0 to n. This method"
+"/*\n * The Computer Language Benchmarks Game\n * http://benchmarksgame.alioth.debian.org/\n * \n * modified by Mehmet D. AKIN\n * modified by Daryl Griffith\n */\n\nimport java.io.IOException;\nimport java.io.OutputStream;\nimport java.util.concurrent.ArrayBlockingQueue;\nimport java.util.concurrent.BlockingQueue;\nimport java.util.concurrent.atomic.AtomicInteger;\n\npublic class fasta {\n\n static final int LINE_LENGTH = 60;\n static final int LINE_COUNT = 1024;\n static final NucleotideSelector[] WORKERS \n = new NucleotideSelector[\n Runtime.getRuntime().availableProcessors() > 1 \n ? Runtime.getRuntime().availableProcessors() - 1 \n : 1];\n static final AtomicInteger IN = new AtomicInteger();\n static final AtomicInteger OUT = new AtomicInteger();\n static final int BUFFERS_IN_PLAY = 6;\n static final int IM = 139968;\n static final int IA = 3877;\n static final int IC = 29573;\n static final float ONE_OVER_IM = 1f / IM;\n static int last = 42;\n\n public static void main(String[] args) {\n int n = 1000;\n\n if (args.length > 0) {\n n = Integer.parseInt(args[0]);\n }\n for (int i = 0; i < WORKERS.length; i++) {\n WORKERS[i] = new NucleotideSelector();\n WORKERS[i].setDaemon(true);\n WORKERS[i].start();\n }\n try (OutputStream writer = System.out;) {\n final int bufferSize = LINE_COUNT * LINE_LENGTH;\n\n for (int i = 0; i < BUFFERS_IN_PLAY; i++) {\n lineFillALU(\n new AluBuffer(LINE_LENGTH, bufferSize, i * bufferSize));\n }\n speciesFillALU(writer, n * 2, \">ONE Homo sapiens alu\\n\");\n for (int i = 0; i < BUFFERS_IN_PLAY; i++) {"
+"// Copyright 2016-2020, Pulumi Corporation. All rights reserved.\nusing Pulumi;\nusing Pulumi.AzureNextGen.ContainerInstance.Latest;\nusing Pulumi.AzureNextGen.ContainerInstance.Latest.Inputs;\nusing Pulumi.AzureNextGen.Resources.Latest;\n\nclass MyStack : Stack\n{\n public MyStack()\n {\n var config = new Pulumi.Config();\n var location = config.Get(\"location\") ?? \"WestUS\";\n\n var resourceGroup = new ResourceGroup(\"resourceGroup\", new ResourceGroupArgs\n {\n ResourceGroupName = \"aci-rg\",\n Location = location\n });\n \n var imageName = \"mcr.microsoft.com/azuredocs/aci-helloworld\";\n var containerGroup = new ContainerGroup(\"containerGroup\", new ContainerGroupArgs\n {\n ResourceGroupName = resourceGroup.Name,\n Location = resourceGroup.Location,\n ContainerGroupName = \"helloworld\",\n OsType = \"Linux\",\n Containers =\n {\n new ContainerArgs\n {\n Name = \"acilinuxpublicipcontainergroup\",\n Image = imageName,\n Ports = { new ContainerPortArgs { Port = 80} },\n Resources = new ResourceRequirementsArgs\n {\n Requests = new ResourceRequestsArgs\n {\n Cpu = 1.0,\n MemoryInGB = 1.5,\n }\n }\n }\n },\n IpAddress = new IpAddressArgs\n {\n Ports =\n {\n new PortArgs\n {\n Port = 80,\n Protocol = \"Tcp\",\n }\n },\n Type = \"Public\"\n },\n RestartPolicy = \"always\"\n });\n\n this.ContainerIPv4Address = containerGroup.IpAddress.Apply(ip => ip!.Ip);\n }\n\n [Output(\"containerIPv4Address\")]\n public Output ContainerIPv4Address { get; set; }\n}"
+"/*\n* Part of WCM Commander\n* https://github.com/corporateshark/WCMCommander\n* wcm@linderdaum.com\n*/\n\n#pragma once\n\n#include \n\nusing namespace wal;\n\n\n/**\n * Edit line controll with history and autocomplete support.\n */\nclass clNCEditLine : public ComboBox\n{\nprivate:\n\tstd::vector m_Prefix;\n\tconst char* m_FieldName;\n\t\npublic:\n\tclNCEditLine( const char* FieldName, int Id, Win* Parent, const unicode_t* Txt, int Cols, int Rows, crect* Rect = 0 );\n\n\tvirtual ~clNCEditLine() {}\n\n\tvirtual bool EventKey( cevent_key* pEvent ) override;\n\n\tvirtual bool Command( int Id, int SubId, Win* Win, void* Data ) override;\n\n\tvirtual int UiGetClassId() override;\n\n\tvirtual bool OnOpenBox() override;\n\t\n\tvirtual void OnItemChanged( int ItemIndex ) override;\n\n\tvoid AddCurrentTextToHistory();\n\t\nprivate:\n\tvoid InitBox();\n};"
+"\n\n\n MVC Exceptions\n\n \n Introduction\n\n \n The MVC components in Zend Framework utilize a Front Controller,\n which means that all requests to a given site will go through a\n single entry point. As a result, all exceptions bubble up to the\n Front Controller eventually, allowing the developer to handle them\n in a single location.\n \n\n \n However, exception messages and backtrace information often contain\n sensitive system information, such as SQL statements, file\n locations, and more. To help protect your site, by default\n Zend_Controller_Front catches all exceptions and\n registers them with the response object; in turn, by default, the\n response object does not display exception messages.\n \n \n\n \n Handling Exceptions\n\n \n Several mechanisms are built in to the MVC components already to\n allow you to handle exceptions.\n \n\n \n \n \n By default, the error\n handler plugin is registered and active. This plugin\n was designed to handle:\n \n\n \n Errors due to missing controllers or actions\n Errors occurring within action controllers\n \n\n \n It operates as a postDispatch() plugin, and\n checks to see if a dispatcher, action controller, or\n other exception has occurred. If so, it forwards to an"
+"\ufeffusing Microsoft.Xna.Framework;\nusing Microsoft.Xna.Framework.Graphics;\nusing System;\nusing Terraria;\nusing Terraria.GameInput;\nusing Terraria.UI;\n\nnamespace ExampleMod.UI\n{\n\t// This class wraps the vanilla ItemSlot class into a UIElement. The ItemSlot class was made before the UI system was made, so it can't be used normally with UIState. \n\t// By wrapping the vanilla ItemSlot class, we can easily use ItemSlot.\n\t// ItemSlot isn't very modder friendly and operates based on a \"Context\" number that dictates how the slot behaves when left, right, or shift clicked and the background used when drawn. \n\t// If you want more control, you might need to write your own UIElement.\n\t// I've added basic functionality for validating the item attempting to be placed in the slot via the validItem Func. \n\t// See ExamplePersonUI for usage and use the Awesomify chat option of Example Person to see in action.\n\tinternal class VanillaItemSlotWrapper : UIElement\n\t{\n\t\tinternal Item Item;\n\t\tprivate readonly int _context;\n\t\tprivate readonly float _scale;\n\t\tinternal Func ValidItemFunc;\n\n\t\tpublic VanillaItemSlotWrapper(int context = ItemSlot.Context.BankItem, float scale = 1f) {\n\t\t\t_context = context;\n\t\t\t_scale = scale;\n\t\t\tItem = new Item();\n\t\t\tItem.SetDefaults(0);\n\n\t\t\tWidth.Set(Main.inventoryBack9Texture.Width * scale, 0f);\n\t\t\tHeight.Set(Main.inventoryBack9Texture.Height * scale, 0f);\n\t\t}\n\n\t\tprotected override void DrawSelf(SpriteBatch spriteBatch) {\n\t\t\tfloat oldScale ="
+"si'2. si'8 si'8 |\nla'1 |\ndo''2 do''4 do''4 |\nsi'2 si'4 r4 |\nsi'4 mi''4 dod''4 re''4 |\nsi'2 si'4 dod''4 |\nre''2 re''4 r4 |\nsi'2. si'8 si'8 |\ndo''1 |\nla'2 la'4 sol'4 |\nfad'2 fad'4 r4 |\nsi'4 si'4 sol'4 sol'4 |\nsol'2 sol'4 fad'4 |\nsol'2 sol'4 r4 |\nsi'4 si'4 do''2 |\nla'2 la'4 si'4 |\nsold'4 sold'4 do''4 do''4 |\nla'4 si'4 sold'2 |\nla'2 do''4 re''4 |\nmi''4 si'4 si'4 do''4 |\nre''4 re''4 re''4. re''8 |\nsol'4 sol'4 sol'4. la'8 |\nsi'4 la'4 sol'2 |\nfad'2 fad'4 sol'4 |\nla'4 la'4 la'4 si'4 |\ndo''2 si'4 la'4 |\nsol'4 sol'4 sol'4 la'4 |\nsi'4 si'4 si'4. re''8 |\nsol'4 la'4 fad'2 |\nsol'1 |"
+" 'e13a9d',\n self::KEY_DESCRIPTION_BACKGROUND_COLOR => '9b45e4',\n ],\n [\n self::KEY_NAME_BACKGROUND_COLOR => '222831',\n self::KEY_DESCRIPTION_BACKGROUND_COLOR => '393e46',\n ],\n [\n self::KEY_NAME_BACKGROUND_COLOR => 'da2d2d',\n self::KEY_DESCRIPTION_BACKGROUND_COLOR => '9d0b0b',\n ],\n [\n self::KEY_NAME_BACKGROUND_COLOR => '6f9a8d',\n self::KEY_DESCRIPTION_BACKGROUND_COLOR => '1f6650',\n ],\n [\n self::KEY_NAME_BACKGROUND_COLOR => 'd1274b',\n self::KEY_DESCRIPTION_BACKGROUND_COLOR => '3d0e1e',\n ],\n [\n self::KEY_NAME_BACKGROUND_COLOR => '71a95a',\n self::KEY_DESCRIPTION_BACKGROUND_COLOR => '007944',\n ],\n [\n self::KEY_NAME_BACKGROUND_COLOR => 'e3b04b',\n self::KEY_DESCRIPTION_BACKGROUND_COLOR => '2b2b28',\n ],\n [\n self::KEY_NAME_BACKGROUND_COLOR => 'f6ad7b',\n self::KEY_DESCRIPTION_BACKGROUND_COLOR => 'be7575',\n ],\n [\n self::KEY_NAME_BACKGROUND_COLOR => 'a34a28',\n self::KEY_DESCRIPTION_BACKGROUND_COLOR => '211717',\n ],\n [\n self::KEY_NAME_BACKGROUND_COLOR => 'fc7fb2',\n self::KEY_DESCRIPTION_BACKGROUND_COLOR => '45454d',\n ],\n ];\n\n}"
+"--- cvs-1.11.21/src/diff.c.old\t2005-05-27 19:17:03.000000000 +0200\n+++ cvs-1.11.21/src/diff.c\t2005-12-15 15:22:05.000000000 +0100\n@@ -955,14 +955,16 @@\n \t /* The first revision does not exist. If EMPTY_FILES is\n true, treat this as an added file. Otherwise, warn\n about the missing tag. */\n-\t if( use_rev2 == NULL || RCS_isdead( vers->srcfile, use_rev2 ) )\n+\t if( use_rev2 == NULL || RCS_isdead( vers->srcfile, use_rev2 ) ) {\n \t\t/* At least in the case where DIFF_REV1 and DIFF_REV2\n \t\t * are both numeric (and non-existant (NULL), as opposed to\n \t\t * dead?), we should be returning some kind of error (see\n \t\t * basicb-8a0 in testsuite). The symbolic case may be more\n \t\t * complicated.\n \t\t */\n-\t\treturn DIFF_SAME;\n+\t\terror (0, 0, \"no revision in file %s or missing file %s\", finfo->fullname, finfo->fullname);\n+\t\treturn DIFF_ERROR;\n+\t }\n \t if( empty_files )\n \t\treturn DIFF_ADDED;\n \t if( use_rev1 != NULL )"
+"\ufeffusing System.Linq;\nusing Syncfusion.Windows.Forms.Tools;\n\nnamespace MW5.UI.Helpers\n{\n public static class TreeViewAdvHelper\n {\n public static int GetImageIndex(this TreeNodeAdv node)\n {\n return node.LeftImageIndices.Any() ? node.LeftImageIndices[0] : -1;\n }\n\n public static TreeNodeAdv CreateNode(this TreeNodeAdvCollection nodes, string key, string text, int imageIndex)\n {\n var node = new TreeNodeAdv(text)\n {\n LeftImageIndices = new[] { imageIndex },\n TagObject = key,\n };\n \n return node;\n }\n\n public static TreeNodeAdv Add(this TreeNodeAdvCollection nodes, string key, string text, int imageIndex)\n {\n var node = CreateNode(nodes, key, text, imageIndex);\n \n nodes.Add(node);\n\n return node;\n }\n\n public static TreeNodeAdv Find(this TreeNodeAdvCollection nodes, string key)\n {\n foreach (TreeNodeAdv n in nodes)\n {\n if (n.TagObject == null)\n {\n continue;\n }\n \n if ((string)n.TagObject == key)\n {\n return n;\n }\n }\n\n return null;\n }\n }\n}"
+"Thank you for downloading Gregdumb's time-of-day skydome blueprint, version 1.4! Created with Unreal Engine 4.8 but tested up to 4.18.\r\n\r\nYou can use this work any way you want, commercial or not. I won't sue you if you don't credit me, but it would be nice if you did.\r\n\r\n\r\n//////////////////////////////\r\n\r\nInstallation Instructions:\r\n\r\n- Extract this archive;\r\n- Copy the \"Content\" folder into your project folder (it should merge with the content folder already there);\r\n- When it asks if you want to merge folders, say yes;\r\n- Launch UnrealEd, and load the project you copied that folder to;\r\n- Go to 'File > Open Level' and choose \"TimeOfDayTemplate.umap\" (it should be located under \\YourProject\\Content\\Maps);\r\n- Hit simulate, and watch the sun move!\r\n\r\n//////////////////////////////\r\n\r\n\r\nFor more information and details please visit https://gregbrisebois.com.\r\n\r\nContact me here: https://forums.unrealengine.com/member.php?179-gregdumb Please give me criticism and feedback!\r\n\r\n\r\nCreated by Greg Brisebois, with special thanks to Shoiko and lbraud."
+"// IM has the following fastpaths:\n// - constant index (constant)\n// - need negative int check (neg)\n// - needs hole check (hole)\n// So to test everything we have to do:\n// constant | neg | hole\n// test 1: 0 0 0\n// test 2: 1 0 0\n// test 3: 0 1 0\n// test 4: 1 1 0\n// test 5: 0 0 1\n// test 6: 1 0 1\n// test 7: 0 1 1\n// test 8: 1 1 1\n\nfunction test1(index, a) {\n if (index < 0)\n index = -index\n return index in a;\n}\nassertEq(test1(1, [1,2]), true);\n\nfunction test2(a) {\n return 0 in a;\n}\nassertEq(test2([1,2]), true);\n\nfunction test3(index, a) {\n return index in a;\n}\n\nvar arr3 = [];\narr3[\"-1073741828\"] = 17;\nassertEq(test3(-1073741828, arr3), true);\n\nfunction test4(a) {\n return -1073741828 in a;\n}\nassertEq(test4(arr3), true);\n\n\nfunction test5(index, a) {\n if (index < 0)\n index = -index\n return index in a;\n}\nvar arr5 = [];\narr5[0] = 1\narr5[1] = 1\narr5[2] = 1\narr5[4] = 1\nassertEq(test5(1, arr5), true);\nassertEq(test5(3, arr5), false);\n\nfunction test7a(a) {\n return 3 in a;\n}\nfunction test7b(a) {\n return 4 in a;\n}\nassertEq(test7a(arr5),"
+"C> \\ingroup nwdft\nC> @{\nC>\nC> \\file occup_input.F\nC> Read the occupation numbers\nC>\nC> @}\nC>\nC> \\ingroup nwdft\nC> @{\nC>\nC> \\brief Orbital occupations input reader\nC>\nC> Read the orbital occupation numbers either from the input or from\nC> a file. An example of the input block is\nC>\nC> \\code\nC> occup\nC> 5 3\nC> 1.0 1.0\nC> 1.0 1.0\nC> 1.0 1.0\nC> 1.0\nC> 1.0\nC> end\nC> \\endcode\nC>\nC> The first line with two integers specifies how many occupation\nC> numbers there are to read for each spin channel. Next there are\nC> a number of lines specifying the orbital occupations.\nC>\nC> Similary for reading the occupation numbers from a file\nC>\nC> \\code\nC> occup\nC> 5 3\nC> load file.occup\nC> end\nC> \\endcode\nC>\nC> After reading the occupation numbers they are stored on the \nC> runtime database in the fields:\nC>\nC> - `focc:occup_switch` -- there was an \"occup\" input block\nC>\nC> - `focc:occup` -- the number of alpha- and beta-occupation numbers\nC>\nC> - `focc:occup_list` -- the list of occupation numbers\nC>\nC> The occupation number list is stored essentially as read."
+"\ufeffnamespace Rebus.Routing.TransportMessages\n{\n /// \n /// Options on how to handle exceptions when attempting to forward transport messages\n /// \n public enum ErrorBehavior\n {\n /// \n /// Indicates that no error handling should be done. This puts the burden of handling errors into the hands\n /// of the implementor of the transport message forwarding function, and thus it should handle errors by\n /// forwarding the message somewhere else\n /// \n RetryForever,\n\n /// \n /// Indicates that the transport message should be forwarded to the error queue in the event that there is an error.\n /// This is done in a \"fail fast\"-fashion, so there will be no additional delivery attempts.\n /// \n ForwardToErrorQueue\n }\n}"
+"# Writing actions\n\n## What is an action?\n\nAn action is basically what a controller in an MVC architecture is. It is the glue that interacts with various services and other actions to achieve some business-specific task.\n\nUsing actions is optional (unless you are writing a component which you want other people to use), but they are a great place to put re-usable pieces of logic. You can then call your actions from an http server, or from a CLI interface or from an interactive REPL session. If you put all this logic into an http route handler, you would not be able to easily execute it from other places.\n\nSome common traits of an action:\n\n- They usually interact with many other components (like services or other actions)\n- They frequently contain business-specific logic or accomplish a very specific task\n- They do not need to keep any kind of state (they are stateless - it's just input params->returning results)\n\n> **NOTE**: Contrary to services and hooks, the Action class you implement **is** what you will interface with - when you register an Action into the Atlas instance, the class is instantiated and exposed to you via `atlas.actions.*`.\n\n##"
+"% CVX: Matrix structure definitions and utilities.\n% CVX provides a keyword-based method for definiting matrices\n% with one or more types of structure; e.g.\n% variable X(n,n) symmetric toeplitz tridiagonal;\n% CVX automatically computes an efficient basis for the requested\n% structure. The files in this directory implement those computations.\n%\n% None of these files should be called directly---matrix structure is\n% selected in the VARIABLE declaration; see VARIABLE for more details.\n% Below are the keywords that are available, and the structures they\n% represent. Keywords can be freely combined (see the above example),\n% but of course some combinations are degenerate, yielding only the \n% all-zero matrix; e.g.,\n% variable X(n,n) \n%\n% Structures:\n% banded - (U,L)-banded matrices.\n% complex - Complex variables of all sizes.\n% diagonal - Diagonal matrices.\n% hankel - Hankel matrices.\n% hermitian - Complex Hermitian matrices.\n% lower_bidiagonal - Lower bidiagonal matrices.\n% lower_hessenberg - Lower Hessenberg matrices.\n% lower_triangular - Lower triangular matrices.\n% scaled_identity - Scaled identity: t*eye(n).\n% skew_symmetric - Skew-symmetric matrices.\n% sparse - Matrices with a fixed sparsity pattern.\n% symmetric - Symmetric matrices.\n% toeplitz - Toeplitz matrices.\n% tridiagonal - Tridiagional matrices."
+"---\ntitle: System Props\n---\n\nimport {PropsList, COMMON, LAYOUT, BORDER, TYPOGRAPHY, FLEX, POSITION, GRID} from '../components'\n\nPrimer React components utilize what we call \"system props\" to apply a standard set of props to each component. Using [styled-system](https://github.com/jxnblk/styled-system), groups of props are automatically applied to each component. Most components get the `COMMON` set of props which give the component access to color and space props (margin, padding, color and background color). These groups correspond to the `color` and `space` functions from `styled-system` which can be referenced in the styled system [table of style functions](https://github.com/jxnblk/styled-system/blob/master/docs/table.md#core).\n\nTo check which system props each component includes, check the documentation for that component.\n\n### The `as` prop\nAll Primer React components have access to the `as` prop, provided by [styled-components](https://www.styled-components.com/docs/api#as-polymorphic-prop). We use the `as` prop to render a component with the styles of the passed component in `as`, but with the system props of the base component.\n\nFor example, if you wanted to add some flex utilities to the `Text` component, you could do:\n\n```jsx live\nHello!\n```\n\n\n### System Prop Categories\n\n| Category | Included Props | styled-system docs |\n|-----|--------|--------|\n| `COMMON`| | [styled-system core docs](https://github.com/jxnblk/styled-system/blob/master/docs/table.md#core) |\n| `TYPOGRAPHY`| |"
+"---\ntitle: \"Knowing grids means breaking grids\"\nexcerpt: \"Exploring what it means to develop a grid system that helps facilitate strong design with purpose.\"\nlast_modified_at: 2013-04-26\nimage: \n path: &image /assets/images/knowing-grids-feature.jpg\n width: 1280\n height: 640\n feature: *image\ntwitter:\n card: summary_large_image\ncategories: [notes]\ntags: [design, inspiration]\nsupport: false\ntoc: true\n---\n\nOf all the things to grab my attention, the grid system used in a women's health calendar was certainly not high on my list.\n\nIt's a funny story that will probably make less sense after I describe this awful analogy, but here it goes. You ever do something for so long that you start seeing it creep up in random places? Back in 1997, after I got a Nintendo 64 and played *Super Mario* all night instead of working on my 3D design projects. I started getting these crazy ideas that I could triple jump over ponds or reach the top of buildings by doing a back flip like that mustachioed plumber.\n\nSitting in 2D design and typography classes for 4 years of my life had a similar effect. It was like having a magician expose all his secrets to me. Problem is, I can no longer take these tricks at"
+"/* eslint-disable */\nconst data = [\n {\n \"author\": \"Arkadiy Pilguk(apilguk@gmail.com)\",\n \"license\": \"MIT\"\n },\n {\n \"name\": \"Downsample\",\n \"description\": \"Performance always important, but some algorythms is very expencive to be\\n applyed for original picture size. For this case we need try reduce image\\n size and then apply algorythm, tinycv support a few different ways to reduce\\n demention my meaning pizels or use it maximum value wich is known as MaxPooling\\n layer.\",\n \"examples\": [\n {\n \"title\": \"example\",\n \"description\": \"// this line reduces an input image in 3x\\n downsampleOp(inputImage, 3, 0);\"\n },\n {\n \"title\": \"example\",\n \"description\": \"// this line reduces an input image in 3x\\n downsampleOp(inputImage, 3, 0);\"\n }\n ],\n \"params\": [\n {\n \"name\": \"tSrc\",\n \"description\": \"The source image to be downsampled.\",\n \"type\": [\n {\n \"type\": \"Tensor\"\n }\n ],\n \"optional\": false\n },\n {\n \"name\": \"k\",\n \"description\": \"Downsampling coeficient.\",\n \"type\": [\n {\n \"type\": \"number\"\n }\n ],\n \"optional\": false\n },\n {\n \"name\": \"s\",\n \"description\": \"Downsampling support two possible variants of processing\\n pixels to be downsampled 0 - Max, 1 - Mean.\",\n \"type\": [\n {\n \"type\": \"number\"\n }\n ],\n \"optional\": false\n }\n ]\n }\n]\n\nexport default data;"
+"/**\n * Provides classes representing Python classes.\n */\n\nimport python\n\n/**\n * An (artificial) expression corresponding to a class definition.\n * It is recommended to use `ClassDef` instead.\n */\nclass ClassExpr extends ClassExpr_ {\n /** Gets the metaclass expression */\n Expr getMetaClass() {\n if major_version() = 3\n then\n exists(Keyword metacls |\n this.getAKeyword() = metacls and\n metacls.getArg() = \"metaclass\" and\n result = metacls.getValue()\n )\n else\n exists(Assign a |\n a = this.getInnerScope().getAStmt() and\n a.getATarget().(Name).getId() = \"__metaclass__\" and\n result = a.getValue()\n )\n }\n\n /** Gets the nth keyword argument of this class definition. */\n override DictUnpackingOrKeyword getKeyword(int index) {\n result = this.getKeywords().getItem(index)\n }\n\n /** Gets a keyword argument of this class definition. */\n override DictUnpackingOrKeyword getAKeyword() { result = this.getKeywords().getAnItem() }\n\n override Expr getASubExpression() {\n result = this.getABase() or\n result = this.getAKeyword().getValue() or\n result = this.getKwargs() or\n result = this.getStarargs()\n }\n\n /** Gets a call corresponding to a decorator of this class definition. */\n Call getADecoratorCall() {\n result.getArg(0) = this or\n result.getArg(0) = this.getADecoratorCall()\n }\n\n /** Gets a decorator of this function expression */\n Expr getADecorator() { result = this.getADecoratorCall().getFunc() }\n\n override AstNode getAChildNode() {\n result = this.getASubExpression()\n or\n result = this.getInnerScope()\n }\n\n /** Gets a tuple (*) argument of this class definition."
+"//! A map of all publicly exported items in a crate.\n\nuse std::{cmp::Ordering, fmt, hash::BuildHasherDefault, sync::Arc};\n\nuse base_db::CrateId;\nuse fst::{self, Streamer};\nuse indexmap::{map::Entry, IndexMap};\nuse rustc_hash::{FxHashMap, FxHasher};\nuse smallvec::SmallVec;\nuse syntax::SmolStr;\n\nuse crate::{\n db::DefDatabase,\n item_scope::ItemInNs,\n path::{ModPath, PathKind},\n visibility::Visibility,\n AssocItemId, ModuleDefId, ModuleId, TraitId,\n};\n\ntype FxIndexMap = IndexMap>;\n\n/// Item import details stored in the `ImportMap`.\n#[derive(Debug, Clone, Eq, PartialEq)]\npub struct ImportInfo {\n /// A path that can be used to import the item, relative to the crate's root.\n pub path: ModPath,\n /// The module containing this item.\n pub container: ModuleId,\n}\n\n/// A map from publicly exported items to the path needed to import/name them from a downstream\n/// crate.\n///\n/// Reexports of items are taken into account, ie. if something is exported under multiple\n/// names, the one with the shortest import path will be used.\n///\n/// Note that all paths are relative to the containing crate's root, so the crate name still needs\n/// to be prepended to the `ModPath` before the path is valid.\n#[derive(Default)]\npub struct ImportMap {\n map: FxIndexMap,\n\n /// List of keys stored in `map`, sorted lexicographically by their `ModPath`. Indexed by the\n /// values returned by"
+"package methods;\n\nimport org.openqa.selenium.By;\nimport org.openqa.selenium.support.ui.ExpectedConditions;\nimport org.openqa.selenium.support.ui.WebDriverWait;\n\nimport env.BaseTest;\n\npublic class ProgressMethods extends SelectElementByType implements BaseTest {\n /**\n * Method to wait\n *\n * @param time : String : Time to wait\n * @param method : String : wait by sleep or implicit method\n * @throws NumberFormatException\n * @throws InterruptedException\n */\n public void wait(String time) throws NumberFormatException, InterruptedException {\n // sleep method takes parameter in milliseconds\n Thread.sleep(Integer.parseInt(time) * 1000);\n }\n\n /**\n * Method to Explicitly wait for element to be enabled=click\n *\n * @param accessType : String : Locator type (id, name, class, xpath, css)\n * @param accessName : String : Locator value\n * @param duration : String : Time to wait for element to be clickable\n */\n public void waitForElementToClick(String accessType, String accessName, String duration) {\n final By byEle = getelementbytype(accessType, accessName);\n final WebDriverWait wait = new WebDriverWait(BaseTest.driver, Integer.parseInt(duration) * 1000);\n wait.until(ExpectedConditions.elementToBeClickable(byEle));\n }\n\n /**\n * Method to Explicitly wait for element to be displayed\n *\n * @param accessType : String : Locator type (id, name, class, xpath, css)\n * @param accessName : String : Locator value\n * @param duration : String : Time to wait for element to be displayed\n */\n public void waitForElementToDisplay(String accessType, String accessName, String"
+"---\ntitle: Difference between Git and GitHub\nlocaleTitle: Diferencia entre Git y GitHub\n---\n## Diferencia entre Git y GitHub\n\nGit y Github son dos cosas diferentes. [Git](https://git-scm.com/) es el [sistema de control de versiones](https://en.wikipedia.org/wiki/Version_control) , mientras que [GitHub](https://github.com/) es un servicio para alojar repositorios de Git y ayudar a las personas a colaborar en la escritura de software. Sin embargo, a menudo se confunden por su nombre similar, debido al hecho de que GitHub se construye sobre Git, y porque muchos sitios web y art\u00edculos no hacen la diferencia entre ellos lo suficientemente clara.\n\n\n\n### Git\n\nGit es el sistema de control de versiones distribuido. Git es responsable de realizar un seguimiento de los cambios en el contenido, generalmente los archivos de c\u00f3digo fuente.\n\nPara m\u00e1s informaci\u00f3n, hay un [art\u00edculo completo sobre el propio Git](https://guide.freecodecamp.org/git) .\n\n### GitHub\n\nGitHub es una empresa que proporciona hosting de repositorio Git. Eso significa que proporcionan una soluci\u00f3n llave en mano para alojar repositorios Git en sus servidores. Eso puede ser \u00fatil para mantener una copia de seguridad de su repositorio (Git solo rastrea los cambios realizados en sus archivos a lo largo del tiempo, todav\u00eda se debe hacer"
+"#\n# Step 1: Build sydent and install dependencies\n#\nFROM docker.io/python:3.8-alpine as builder\n\n# Install dev packages\nRUN apk add --no-cache \\\n build-base \\\n libressl-dev \\\n libffi-dev\n\n# Add user sydent\nRUN addgroup -S -g 993 sydent \\\n && adduser -D --home /sydent -S -u 993 -G sydent -s /bin/ash sydent \\\n && echo \"sydent:$(dd if=/dev/random bs=32 count=1 | base64)\" | chpasswd\n\n# Copy resources\nCOPY --chown=sydent:sydent [\"res\", \"/sydent/res\"]\nCOPY --chown=sydent:sydent [\"scripts\", \"/sydent/scripts\"]\nCOPY --chown=sydent:sydent [\"sydent\", \"/sydent/sydent\"]\nCOPY --chown=sydent:sydent [\"README.rst\", \"setup.cfg\", \"setup.py\", \"/sydent/\"]\n\n# Install dependencies\nRUN cd /sydent \\\n && su sydent -c 'pip install --user --upgrade pip setuptools sentry-sdk' \\\n && su sydent -c 'pip install --user -e .' \\\n && rm -rf /sydent/.cache \\\n && find /sydent -name '*.pyc' -delete\n\n#\n# Step 2: Reduce image size and layers\n#\n\nFROM docker.io/python:3.8-alpine\n\n# Install packages\nRUN apk add --no-cache \\\n libressl \\\n libffi\n\n# Add user sydent and create /data directory\nRUN addgroup -S -g 993 sydent \\\n && adduser -D --home /sydent -S -u 993 -G sydent -s /bin/ash sydent \\\n && echo \"sydent:$(dd if=/dev/random bs=32 count=1 | base64)\" | chpasswd \\\n && mkdir /data \\\n && chown sydent:sydent /data\n\n# Copy sydent\nCOPY --from=builder"
+"---\ntitle: Component Manager\n---\n\n# Component Manager\n\nThe Component is a base element of the template. It might be something simple and atomic like an image or a text box, but also complex structures, more probably composed by other components, like sections or pages. The concept of the component was made to allow the developer to bind different behaviors to different elements. For example, opening the Asset Manager on double click of the image is a custom behavior binded to that particular type of element.\n\n::: warning\nThis guide is referring to GrapesJS v0.15.8 or higher\n:::\n\n[[toc]]\n\n\n\n\n\n## How Components work?\n\nLet's see in detail how components work by looking at all the steps from adding an HTML string to the editor.\n\n::: tip\nAll the following snippets can be run directly in console from the [main demo](https://grapesjs.com/demo.html)\n:::\n\nThis is how we can add new components to the canvas:\n\n```js\n// Append components directly to the canvas\neditor.addComponents(`
\n \n Hello world!!!\n
`);\n\n// or into some, already defined, component.\n// For instance, appending to a selected component would be:\neditor.getSelected().append(`
...`);\n\n// Actually, editor.addComponents is an alias of...\neditor.getWrapper().append(`
...`);\n```\n\n::: tip\nIf you need"
+"package define\n\nconst (\n\t// HealthCheckHealthy describes a healthy container\n\tHealthCheckHealthy string = \"healthy\"\n\t// HealthCheckUnhealthy describes an unhealthy container\n\tHealthCheckUnhealthy string = \"unhealthy\"\n\t// HealthCheckStarting describes the time between when the container starts\n\t// and the start-period (time allowed for the container to start and application\n\t// to be running) expires.\n\tHealthCheckStarting string = \"starting\"\n)\n\n// HealthCheckStatus represents the current state of a container\ntype HealthCheckStatus int\n\nconst (\n\t// HealthCheckSuccess means the health worked\n\tHealthCheckSuccess HealthCheckStatus = iota\n\t// HealthCheckFailure means the health ran and failed\n\tHealthCheckFailure HealthCheckStatus = iota\n\t// HealthCheckContainerStopped means the health check cannot\n\t// be run because the container is stopped\n\tHealthCheckContainerStopped HealthCheckStatus = iota\n\t// HealthCheckContainerNotFound means the container could\n\t// not be found in local store\n\tHealthCheckContainerNotFound HealthCheckStatus = iota\n\t// HealthCheckNotDefined means the container has no health\n\t// check defined in it\n\tHealthCheckNotDefined HealthCheckStatus = iota\n\t// HealthCheckInternalError means some something failed obtaining or running\n\t// a given health check\n\tHealthCheckInternalError HealthCheckStatus = iota\n\t// HealthCheckDefined means the healthcheck was found on the container\n\tHealthCheckDefined HealthCheckStatus = iota\n)"
+"\n console.debug \"AnalysisRequestAdd::load\"\n\n # load translations\n jarn.i18n.loadCatalog 'bika'\n @_ = window.jarn.i18n.MessageFactory('bika')\n\n # disable browser autocomplete\n $('input[type=text]').prop 'autocomplete', 'off'\n\n # storage for global Bika settings\n @global_settings = {}\n\n # services data snapshot from recalculate_records\n # returns a mapping of arnum -> services data\n @records_snapshot = {}\n\n # brain for already applied templates\n @applied_templates = {}\n\n # Remove the '.blurrable' class to avoid inline field validation\n $(\".blurrable\").removeClass(\"blurrable\")\n\n # bind the event handler to the elements\n @bind_eventhandler()\n\n # N.B.: The new AR Add form handles File fields like this:\n # - File fields can carry more than one field (see init_file_fields)\n # - All uploaded files are extracted and added as attachments to the new created AR\n # - The file field itself (Plone) will stay empty therefore\n @init_file_fields()\n\n # get the global settings on load\n @get_global_settings()\n\n # recalculate records on load (needed for AR copies)\n @recalculate_records()\n\n\n ### METHODS ###\n\n bind_eventhandler: =>\n ###\n * Binds callbacks on elements\n ###\n console.debug \"AnalysisRequestAdd::bind_eventhandler\"\n # Categories header clicked\n $(\".service-listing-header\").on \"click\", @on_service_listing_header_click\n # Category toggle button clicked\n $(\"tr.category\").on \"click\", @on_service_category_click\n # Save"
+"---\ntitle: Recursion\n---\n\nThere are three places recursion is commonly used: bindings, types, and modules.\n\nFeature | Recursive | Non-recursive\n---------|---------------------|--------------\nBindings | `let rec fn = ...` | `let fn = ...`\nTypes | `type t = ...` | `type nonrec t = ...`\nModules | `module rec A ...` | `module A ...`\n\n## Recursive Bindings\n\nBy default the name of a binding is not available for use on the right side of\nthat binding. This applies to all `let` bindings, including function\ndefinitions. This behavior is required for\n[Binding Shadowing](let-binding.md#binding-shadowing) to work:\n\n```reason\nlet x = 10;\n/* If bindings were recursive by default this would form a cycle and break */\nlet x = x + 10;\n```\n\nThe natural way to write a recursive function will have an error:\n\n```reason\nlet infiniteRecursion = () => {\n /* Error: Unbound value infiniteRecursion */\n infiniteRecursion();\n};\n```\n\nOpt-in to a recursive binding using the `rec` keyword:\n\n```reason\nlet rec infiniteRecursion = () => {\n infiniteRecursion();\n};\n```\n\n## Mutual Recursion\n\nMutually recursive functions use the `and` keyword:\n\n```reason\nlet rec function1 = () => {\n function2();\n}\nand function2 = () => {\n function3();\n}\nand"
+" 'Yao ', 'Yu ', 'Chong ', 'Xi ', 'Xi ', 'Jiu ', 'Yu ', 'Yu ', 'Xing ', 'Ju ', 'Jiu ', 'Xin ', 'She ', 'She ', 'Yadoru ', 'Jiu ',\n 0x10 => 'Shi ', 'Tan ', 'Shu ', 'Shi ', 'Tian ', 'Dan ', 'Pu ', 'Pu ', 'Guan ', 'Hua ', 'Tan ', 'Chuan ', 'Shun ', 'Xia ', 'Wu ', 'Zhou ',\n 0x20 => 'Dao ', 'Gang ', 'Shan ', 'Yi ', null, 'Pa ', 'Tai ', 'Fan ', 'Ban ', 'Chuan ', 'Hang ', 'Fang ', 'Ban ', 'Que ', 'Hesaki ', 'Zhong ',\n 0x30 => 'Jian ', 'Cang ', 'Ling ', 'Zhu ', 'Ze ', 'Duo ', 'Bo ', 'Xian ', 'Ge ', 'Chuan ', 'Jia ', 'Lu ', 'Hong ', 'Pang ', 'Xi ', null,\n 0x40 => 'Fu ', 'Zao ', 'Feng ', 'Li ', 'Shao ', 'Yu ', 'Lang ', 'Ting ', null, 'Wei ', 'Bo ', 'Meng ', 'Nian ', 'Ju ', 'Huang ', 'Shou ',\n 0x50 => 'Zong ', 'Bian ', 'Mao ', 'Die ', null, 'Bang ', 'Cha ', 'Yi ', 'Sao ', 'Cang ', 'Cao ', 'Lou ', 'Dai ', 'Sori ',"
+"#!/usr/bin/env bash\n\n#!/bin/bash\n\nnchains=20 # number of chains to create\nnentries=0 # number of entries to add to each chain\n\nfa1=$(factom-cli importaddress Fs3E9gV6DXsYzf7Fqx1fVBQPQXV695eP3k5XbmHEZVRLkMdD9qCK)\n\nec1=$(factom-cli importaddress Es3LB2YW9bpdWmMnNQYb31kyPzqnecsNqmg5W4K7FKp4UP6omRTa)\n\necho \"Buying\" 1000 $fa1 $ec1\nfactom-cli buyec $fa1 $ec1 100\nsleep 5s\n\naddentries() {\n # create a random datafile\n\tdatalen=$(shuf -i 100-9900 -n 1)\n\tdatafile=$(mktemp)\n\tbase64 /dev/urandom | head -c $datalen > $datafile\n\n\techo \"Entry Length \" $datalen \" bytes, file name: \" $datafile\n\n\tlet y=$(shuf -i 30-120 -n 1)\n\techo \"sleep\" $y \" seconds before writing entries\"\n\tsleep $y\n\tfor ((i=0; i\r\n /// Unlike forms that have a disposable life cycle with a clear chain of ownership, or purely managed\r\n /// libraries that have in a built in reference counting system, disposable members in DotSpatial\r\n /// require the ability to behave in some cases like a layer that should be removed from memory\r\n /// as soon as it is removed from the map, and other times like a persistent object. The DisposeLock\r\n /// concept works like an industrial lock-out system. Each additional lock increases the lock count.\r\n /// When all users have released the lock, the IsDisposedLocked property will be false, and the next\r\n /// action like a removal from the parent layers will properly dispose the item. Users may feel free\r\n /// to add the lock in order to handle disposal on the layers themselves. These methods will not\r\n /// actually prevent Dispose from functioning, but understand that calling dispose when there is an\r\n /// IsDisposeLocked is true means that there will likely be a problem.\r\n /// \r\n public interface IDisposeLock"
+".. algorithm::\n\n.. summary::\n\n.. relatedalgorithms::\n\n.. properties::\n\nDescription\n-----------\n\nGiven an initial UB at some goniometer configuration, this algorithm facilitates\nthe 'linking' of the UBs across orientations - in other words, ensuring the\ncontinuity of the indexing of reflections throughout reciprocal space allowing\nfor grouping of reflections and refinement.\n\nOn chopper instruments, when the sample is lowered into the\nblockhouse there is often no possibility to adjust its position. When rotating the\ncrystal via the goniometer, since the crystal is likely not centred exactly, the\npredicted peaks from the initial UB often do not capture the data. As well as \nconsistently indexing the peaks, the algorithm also effectively carries out a U\nmatrix correction that accounts for sample miscentering. Use of this algorithm \nwill result in a seperate UB matrix for each orientation which can then be used \nfor integration. \n\nThe algorithm requires a set of predicted peaks that have been generated from\nthe initial UB via goniometer rotation, a set of observed (found) peaks, and\nthe lattice parameters in order to calculate the B matrix. A search within a\nQ-envelope is carried out in which all peaks within the envelope are screened\nas potential 'matches' to the observed"
+"// Utilies for testing\n\nvar assert = require('assert');\nvar Buffer = require('buffer').Buffer;\nvar EventEmitter = require('events').EventEmitter;\nvar strtok = require('../lib/strtok');\nvar sys = require('sys');\n\n// A mock stream implementation that breaks up provided data into\n// random-sized chunks and emits 'data' events. This is used to simulate\n// data arriving with arbitrary packet boundaries.\nvar SourceStream = function(str, min, max) {\n EventEmitter.call(this);\n\n str = str || '';\n min = min || 1;\n max = max || str.length;\n\n var self = this;\n var buf = new Buffer(str, 'binary');\n\n var emitData = function() {\n var len = Math.min(\n min + Math.floor(Math.random() * (max - min)),\n buf.length\n );\n\n var b = buf.slice(0, len);\n\n if (len < buf.length) {\n buf = buf.slice(len, buf.length);\n process.nextTick(emitData);\n } else {\n process.nextTick(function() {\n self.emit('end')\n });\n }\n\n self.emit('data', b);\n };\n\n process.nextTick(emitData);\n};\nsys.inherits(SourceStream, EventEmitter);\nexports.SourceStream = SourceStream;\n\n// Stream to accept write() calls and track them in its own buffer rather\n// than dumping them to a file descriptor\nvar SinkStream = function(bufSz) {\n var self = this;\n\n bufSz = bufSz || 1024;\n var buf = new Buffer(bufSz);\n var bufOffset = 0;\n\n self.write = function() {\n var bl = (typeof arguments[0] === 'string') ?\n Buffer.byteLength(arguments[0], arguments[1]) :\n arguments[0].length;\n\n if"
+"
\nRadley is based on lettering originally drawn and designed for woodcarved titling work.\nIt was later digitized and extended to be used on the web.\nRadley is a practical face, based on letterforms used by hand carvers who cut letters quickly, efficiently, and with style.\nIt can be used for both titling and text typography.\n
\n
\nThe basic letterforms in Radley grew out of sketching and designing directly into wood with traditional carving chisels.\nThese were scanned and traced into FontForge and cleaned up digitally, then the character set was expanded.\nThere is something unique about carving letters into wood with traditional hand tools, and hopefully Radley carries some of the original spirit of these hand carved letterforms.\n
\n
\nSince the initial launch in 2012, Radley was updated by Vernon Adams adding an Italic and support for more Latin languages.\nHe made many glyph refinements throughout the family based on user feedback.\nIn 2017 the family was updated by Marc Foley to complete the work started by Vernon.\n
"
+"\ufeffusing System;\nusing System.Collections.Generic;\nusing System.ComponentModel.Composition;\nusing System.Globalization;\nusing MadsKristensen.EditorExtensions.Settings;\nusing Microsoft.CSS.Core;\nusing Microsoft.VisualStudio.Utilities;\nusing Microsoft.CSS.Core.Checker;\nusing Microsoft.CSS.Core.Parser;\nusing Microsoft.CSS.Core.TreeItems.Selectors;\n\nnamespace MadsKristensen.EditorExtensions.Css\n{\n [Export(typeof(ICssItemChecker))]\n [Name(\"StarSelectorErrorTagProvider\")]\n [Order(After = \"Default Declaration\")]\n internal class StarSelectorErrorTagProvider : ICssItemChecker\n {\n public ItemCheckResult CheckItem(ParseItem item, ICssCheckerContext context)\n {\n SimpleSelector sel = (SimpleSelector)item;\n\n if (!WESettings.Instance.Css.ValidateStarSelector || !sel.IsValid || context == null)\n return ItemCheckResult.Continue;\n\n if (sel.Text == \"*\")\n {\n //string afterStar = sel.Text.Length > index + 1 ? sel.Text.Substring(index + 1) : null;\n //if (afterStar == null || !afterStar.Trim().StartsWith(\"html\", StringComparison.OrdinalIgnoreCase))\n //{\n string errorMessage = string.Format(CultureInfo.InvariantCulture, Resources.PerformanceDontUseStarSelector);\n\n SimpleErrorTag tag = new SimpleErrorTag(sel, errorMessage);\n\n context.AddError(tag);\n //}\n }\n\n return ItemCheckResult.Continue;\n }\n\n\n public IEnumerable ItemTypes\n {\n get { return new[] { typeof(SimpleSelector) }; }\n }\n }\n}"
+"@charset \"UTF-8\";\n/// Creates a grid column of requested size.\n///\n/// @group features\n///\n/// @name Grid column\n///\n/// @argument {number (unitless)} $columns [null]\n/// Specifies the number of columns an element should span based on the total\n/// columns of the grid.\n///\n/// This can also be defined in a shorthand syntax which also contains the\n/// total column count such as `3 of 5`.\n///\n/// @argument {map} $grid [$neat-grid]\n/// The grid to be used to generate the column.\n/// By default, the global `$neat-grid` will be used.\n///\n/// @example scss\n/// .element {\n/// @include grid-column(3);\n/// }\n///\n/// @example css\n/// .element {\n/// width: calc(25% - 25px);\n/// float: left;\n/// margin-left: 20px;\n/// }\n\n@mixin grid-column($columns: null, $grid: $neat-grid) {\n $columns: _neat-column-default($grid, $columns);\n $_grid-columns: _retrieve-neat-setting($grid, columns);\n $_grid-gutter: _retrieve-neat-setting($grid, gutter);\n\n width: calc(#{_neat-column-width($grid, $columns)});\n float: _neat-float-direction($grid);\n margin-#{_neat-float-direction($grid)}: $_grid-gutter;\n}"
+"# Interop with an existing React Redux application\n\nThis recipe will guide you through the process of integrating Easy Peasy into your existing React Redux application. It is possible to slowly migrate an existing React Redux application to Easy Peasy without doing a full rewrite. \n\nEasy Peasy outputs a standard Redux store, and allows customisation of the store via the [StoreConfig](/docs/api/store-config.html). Therefore it is possible to configure the Easy Peasy redux store to match the needs of your existing application. You will likely be able to move your store into Easy Peasy without the need to make any changes to your components.\n\nThis would grant you the ability to slowly and carefully refactor your existing React Redux reducers into Easy Peasy models when needed, though there is nothing preventing you from keeping the concepts (Easy Peasy models, and React Redux reducers) living side by side indefinitely.\n\n## Refactoring the creation of your store\n\nImagine you had a Redux store being configured similarly to the following.\n\n```javascript\nimport { createStore, combineReducers, applyMiddleware } from 'redux';\nimport productsReducer from './reducers/products';\nimport basketReducer from './reducers/basket';\nimport loggerMiddleware from './middleware/logger';\n\nconst rootReducer = combineReducers({\n products: productsReducer,\n basket: basketReducer\n});\n\nconst store = createStore(rootReducer, applyMiddleware(loggerMiddleware));"
+"# Changelog\n\n## [v0.11.2]\n\nMinor:\n\n* Switch to standard MIT SPDX license\n\n## [v0.11.0]\n\nFeatures:\n\n* Add support for `Expect-CT` header. Allows excluding domains that will not have the `Expect-CT` header applied. By default, the `Expect-CT` header will not be applied to localhost. It is also only applied to HTTPS requests \n* Add support for `worker-src` directive for `Content-Security-Policy` header\n\n## [v0.10.0]\n\nBreaking Changes:\n\n* Drop support for ASP.NET Core 1.x\n* Add support for ASP.NET Core 3.0\n\n## [v0.9.0]\n\nFeatures:\n\n* Add support for Nonce generation for `Content-Security-Policy` headers. See [README.md](https://github.com/andrewlock/NetEscapades.AspNetCore.SecurityHeaders/blob/master/README.md#using-nonces-and-generated-hashes-with-content-security-policy) for details\n* Add [TagHelpers](https://www.nuget.org/packages/NetEscapades.AspNetCore.SecurityHeaders.TagHelpers/) library for adding nonces and generating hashes for Razor elements. \n* Allow using HSTS preload with `Strict-Transport-Security`\n* Allow excluding domains from `Strict-Transport-Security`. Similar to the [Microsoft `HstsMiddleware`](https://github.com/aspnet/BasicMiddleware/blob/master/src/Microsoft.AspNetCore.HttpsPolicy/HstsMiddleware.cs), you can skip applying `Strict-Transport-Security` to specific hosts\n\nBreaking Changes:\n\n* All obsolete classes have been removed.\n* Many classes have changed namespace to better reflect their location in the project, and also to aid discovery. If you're using the recommended builders and extension methods, you should not have any build-time breaking changes, but the package is not runtime-compatible with previous versions\n* The `Strict-Transport-Security` header is no longer applied to `localhost` by default. Generally"
+"//\n// Takes a screenshot of the given URL, uses named arguments passed in like so: phantomjs raster.js arg=value arg2=value2\n//\n// Arguments:\n// - url - URL to screenshot\n// - output - page to output (e.g. /tmp/output.png)\n// - width [optional] - default 1024 - viewport width\n// - height [optional] - viewport height (see note below on using height)\n// - debug [optional] - default false - whether to do some extra debugging\n// - div [optional] - a selector to use to screenshot to a specific element\n// - resourceWait [optional] - default 300 - the time to wait after the last resource has loaded in MS before taking the screenshot\n// - maxRenderWait [optional] - default 10000 - the maximum time to wait before taking the screenshot, regardless of whether resources are waiting to be loaded\n// - cutoffWait [optional] - default null - the maximum time to wait before cutting everything off and failing...this helps if there is a page taking a long time to load\n// - top, left, width, height [optional] - dimensions to use to screenshot a specific area of the screen\n//\n// == Important notice when providing height =="
+"This project is **NOT MAINTAINED**.\n\n------\n\n\n\n\n\n**Hermes is a simple and robust in-app notification system for iOS written in Swift.** It supports posting Notifications with styled or unstyled text, an icon, sound, color, and an action closure. You can easily build your own notification template and add any number of attributes and features to a HermesNotification.\n\nHermes shows all queued up notifications at once, with an easy way to swipe through them (and will animate through them automatically if you don't touch any notifications for 3 seconds)\n\n##Installation\n###Cocoapods Installation\nHermes is available on CocoaPods. Just add the following to your project Podfile:\n\n```\npod 'Hermes', '~> 1.0'\n```\n\n###Non-Cocoapods Installation\nYou can drop Hermes' files directly into your project, or drag the Hermes project into your workspace.\n\n###Usage\nImport in **Swift**\n```swift\nimport Hermes\n```\nor **Objective-C**\n```objective-c\n#import \n```\n\n##Getting Started\n###Components\n- **Hermes** (public)\n\n You will use Hermes.sharedInstance to post Notifications. You can tell Hermes when to *wait()* and collect notifications and when to *go()* and post notifications as soon as Hermes has any.\n \n- **Notification** (public, extendable)\n\n A Notification is a model that has attributes like text, image, sound, and color."
+"\n * @license https://opensource.org/licenses/MIT MIT License\n * @link http://github.com/kevinfiol\n */\n\nclass Fuzz\n{\n private $_source;\n private $_sourceLen;\n private $_maxResults;\n private $_searchMode;\n private $_useLCS;\n\n /**\n * Fuzz Object Constructor\n * Initialize private variables\n *\n * @param array $source An array of associative arrays\n * @param int $maxResults The maximum number of results to retrieve upon a search\n * @param int $searchMode 0 = Levenshtein, 1 = Jaro-Winkler\n * @param boolean $useLCS Factor in Longest Common Substring in search results\n */\n public function __construct($source, $maxResults, $searchMode, $useLCS)\n {\n $this->_source = $source;\n $this->_sourceLen = count($source);\n $this->_maxResults = max($maxResults, 1);\n $this->_useLCS = $useLCS;\n\n if ($searchMode < 0 || $searchMode > 1) {\n throw new \\Exception('Invalid search mode');\n } else {\n $this->_searchMode = $searchMode;\n }\n }\n\n /**\n * Search Method\n * Initiate Search\n *\n * @param string $search Term to search for\n * @param int $minLCS (if using LCS) Specify the minimum longest common substring\n * @param int $maxDistance (if using Levenshtein) Specify the maximum distance allowed\n *\n * @return array $results Array of associative arrays"
+"grammar Jnu;\noptions {\n output = AST; // build trees\n ASTLabelType = JnuAST;\n}\n\ntokens {\n METHOD_DECL; // function definition\n ARG_DECL; // parameter\n BLOCK;\n MEMBERS; // class body\n VAR_DECL;\n FIELD_DECL;\n CALL;\n ELIST; // expression list\n EXPR; \t // root of an expression\n ASSIGN='=';\n EXTENDS;\n}\n\ncompilationUnit\n : ( classDefinition | varDeclaration | methodDeclaration )+ EOF\n ;\n\n// START: class\nclassDefinition\n : 'class' ID superClass? '{' classMember+ '}' ';'\n -> ^('class' ID superClass? ^(MEMBERS classMember+))\n ;\nsuperClass\n\t:\t':' 'public' ID -> ^(EXTENDS ID)\n\t;\n// END: class\n\nclassMember\n\t:\ttype ID ('=' expression)? ';' -> ^(FIELD_DECL type ID expression?)\n\t|\tmethodDeclaration\n\t|\t'public' ':' -> // throw away; just making input valid C++\n\t;\n\t\n// START: method\nmethodDeclaration\n : type ID '(' formalParameters? ')' block\n -> ^(METHOD_DECL type ID formalParameters? block)\n ;\n// END: method\n\nformalParameters\n : type ID (',' type ID)* -> ^(ARG_DECL type ID)+\n ;\n\ntype: 'float'\n | 'int'\n |\t'void'\n |\tID // class type name\n ;\n\n// START: block\nblock\n : '{' statement* '}' -> ^(BLOCK statement*)\n ;\n// END: block\n\n// START: var\nvarDeclaration\n : type ID ('=' expression)? ';' -> ^(VAR_DECL type ID expression?)\n ;\n// END: var\n\nstatement\n : block\n |\tvarDeclaration\n | 'return'"
+"import org.jetbrains.numkt.array\nimport org.jetbrains.numkt.columnStack\nimport org.jetbrains.numkt.core.reshape\nimport org.jetbrains.numkt.hstack\nimport org.jetbrains.numkt.vstack\nimport kotlin.test.Test\nimport kotlin.test.assertEquals\n\nclass TestStackingArrays {\n\n @Test\n fun testStackTwoArrays() {\n val checkVStack = array(arrayOf(0, 1, 2, 3, 4, 5, 6, 7)).reshape(4, 2)\n val checkHStack = array(arrayOf(0, 1, 4, 5, 2, 3, 6, 7)).reshape(2, 4)\n\n val a = array(arrayOf(0, 1, 2, 3)).reshape(2, 2)\n\n val b = array(arrayOf(4, 5, 6, 7)).reshape(2, 2)\n\n println(a)\n println(b)\n\n println(\"vstack:\")\n println(vstack(a, b))\n assertEquals(checkVStack, vstack(a, b))\n\n println(\"hstack:\")\n println(hstack(a, b))\n assertEquals(checkHStack, hstack(a, b))\n }\n\n @Test\n fun testNewAxis() {\n val a = array(arrayOf(0, 1, 2, 3)).reshape(2, 2)\n val b = array(arrayOf(4, 5, 6, 7)).reshape(2, 2)\n\n println(columnStack(a, b))\n\n val q = array(arrayOf(4.0, 2.0))\n val w = array(arrayOf(3.0, 8.0))\n\n println(columnStack(q, w))\n println(hstack(q, w))\n }\n}"
+"package com.alibaba.doris.admin.service.impl;\n\nimport java.net.InetAddress;\nimport java.net.UnknownHostException;\n\nimport org.apache.commons.lang.StringUtils;\nimport org.apache.commons.logging.Log;\nimport org.apache.commons.logging.LogFactory;\n\nimport com.alibaba.doris.admin.service.AdminService;\nimport com.alibaba.doris.common.util.IPAddressUtil;\n\n/**\n * @project :Doris\n * @author : len.liu\n * @datetime : 2011-7-4 \u4e0b\u534806:14:43\n * @version :\n * @Modification:\n */\npublic class AdminServiceImp implements AdminService {\n\n\tprivate static final Log logger = LogFactory.getLog(AdminServiceImp.class);\n\n private String masterIP;\n\n public boolean isMasterAdmin() {\n String ip = IPAddressUtil.getIPAddress();\n return isMasterAdmin(ip);\n }\n\n public boolean isMasterAdmin(String ip) {\n \t\n \tif( StringUtils.isBlank(ip) || StringUtils.isBlank(masterIP)) {\n \t\treturn false;\n \t}\n \t\n \tInetAddress address;\n\t\ttry {\n\t\t\taddress = InetAddress.getByName( masterIP );\n\t\t\tString aIP = address.getHostAddress();\n\t return StringUtils.equals(ip, aIP);\n\t\t} catch (UnknownHostException e) {\n\t\t\tlogger.error(\"Invalid master ip/domain: \" + masterIP, e);\n\t\t\tthrow new IllegalArgumentException(\"Invalid master ip/domain: \" + masterIP, e);\n\t\t}\n \n }\n\n public void setMasterIP(String masterIP) {\n this.masterIP = masterIP;\n }\n \n public static void main(String[] args) {\n \tAdminServiceImp adminServiceImp = new AdminServiceImp();\n \t\n \tadminServiceImp.setMasterIP(\"doris-test.alibaba-inc.com\");\n \tlogger.info(\"Local is master ? \" + adminServiceImp.isMasterAdmin() ) ;\n \t\n\t}\n\n}"
+"Exercise nbextension history\n----------------------------\n\nUpdate december 30, 2015:\n(@jfbercher) Updated to jupyter notebook 4.1.x\n\nUpdate december 22, 2015:\n(@jfbercher)\n Added the metadata solution_first to mark the beginning of an exercise. It is now possible to have several consecutive exercises. \n\nOctober 21-27,2015: \n(@jfbercher)\n\n1- the extension now works with the multicell API, that is\n - several cells can be selected either via the rubberband extension \n - or via Shift-J (select next) or Shift-K (select previous) keyboard shortcuts\n(probably Shit-up and down will work in a near future) \nNote: previously, the extension required the selected cells to be marked with a \"selected\" key in metadata. This is no more necessary with the new API.\nThen clicking on the toolbar button turns these cells into a \"solution\" which is hidden by default ** Do not forget to keep the Shift key pressed down while clicking on the menu button (otherwise selected cells will be lost)** \n2- the \"state\" of solutions, hidden or shown, is saved and restored at reload/restart. We use the \"solution\" metadata to store the current state.\n3- A small issue (infinite loop when a solution was defined at the bottom edge of the notebook have been corrected)\n4- Added a"
+"import fridgets.*;\nimport javax.swing.*;\nimport java.util.ArrayList;\nimport nz.sodium.*;\n\npublic class textfield {\n public static void main(String[] args) {\n JFrame frame = new JFrame(\"button\");\n frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);\n frame.setContentPane(Transaction.run(() -> {\n FrTextField firstName = new FrTextField(\"Joe\");\n FrTextField lastName = new FrTextField(\"Bloggs\");\n FrButton ok = new FrButton(new Cell<>(\"OK\"));\n FrButton cancel = new FrButton(new Cell<>(\"Cancel\"));\n ArrayList fridgets = new ArrayList<>();\n fridgets.add(ok);\n fridgets.add(cancel);\n Fridget buttons = new FrFlow(FrFlow.Direction.HORIZONTAL,\n fridgets);\n fridgets = new ArrayList<>();\n fridgets.add(firstName);\n fridgets.add(lastName);\n fridgets.add(buttons);\n Fridget dialog =\n new FrFlow(FrFlow.Direction.VERTICAL, fridgets);\n Listener l =\n ok.sClicked\n .map(u -> firstName.text.sample()+\" \"+\n lastName.text.sample())\n .listen(name -> System.out.println(\"OK: \"+name))\n .append(\n cancel.sClicked.listen(\n u -> System.out.println(\"Cancel\")\n )\n );\n return new FrView(frame, dialog) {\n public void removeNotify() {\n super.removeNotify();\n l.unlisten();\n }\n };\n }));\n frame.setSize(360,120);\n frame.setVisible(true);\n }\n}"
+"\ufeffusing RuriLib;\nusing RuriLib.Interfaces;\nusing RuriLib.ViewModels;\nusing System;\nusing System.Collections.Generic;\nusing System.Collections.ObjectModel;\nusing System.IO;\nusing System.Threading.Tasks;\nusing System.Windows;\nusing System.Windows.Data;\n\nnamespace OpenBullet\n{\n public enum Components\n {\n Main,\n RunnerManager,\n Runner,\n ProxyManager,\n WordlistManager,\n HitsDB,\n ConfigManager,\n Stacker,\n OtherOptions,\n Settings,\n ListGenerator,\n SeleniumTools,\n Database,\n About,\n Unknown\n }\n\n public class LoggerViewModel : ViewModelBase, ILogger\n {\n public ObservableCollection EntriesCollection { get; set; }\n\n public IEnumerable Entries => EntriesCollection;\n\n public bool Enabled\n {\n get\n {\n try\n {\n // The settings might be null\n return OB.OBSettings.General.EnableLogging;\n }\n catch\n {\n return false;\n }\n }\n }\n\n public int BufferSize\n {\n get\n {\n try\n {\n // The settings might be null\n return OB.OBSettings.General.LogBufferSize;\n }\n catch\n {\n return 0;\n }\n }\n }\n\n public LoggerViewModel()\n {\n EntriesCollection = new ObservableCollection();\n\n CollectionView view = (CollectionView)CollectionViewSource.GetDefaultView(EntriesCollection);\n view.Filter = ErrorFilter;\n }\n\n public void Refresh()\n {\n try\n {\n CollectionViewSource.GetDefaultView(EntriesCollection).Refresh();\n }\n catch { }\n }\n\n #region Filters\n private bool onlyErrors = false;\n public bool OnlyErrors { get { return onlyErrors; } set { onlyErrors = value; OnPropertyChanged(); Refresh(); } }\n \n private string searchString = \"\";\n public string SearchString { get { return searchString; } set { searchString = value; OnPropertyChanged(); } }\n\n private bool ErrorFilter(object item)\n {\n // If search box not empty, filter out all the stuff"
+"module T8603 where\n\nimport Control.Monad\nimport Data.Functor\nimport Control.Monad.Trans.Class( lift )\nimport Control.Monad.Trans.State( StateT )\n\nnewtype RV a = RV { getPDF :: [(Rational,a)] } deriving (Show, Eq)\n\ninstance Functor RV where\n fmap f = RV . map (\\(x,y) -> (x, f y)) . getPDF\n\ninstance Applicative RV where\n pure = return\n (<*>) = ap\n\ninstance Monad RV where\n return x = RV [(1,x)]\n rv >>= f = RV $\n do (p,a) <- getPDF rv\n guard (p > 0)\n (q,b) <- getPDF $ f a\n guard (q > 0)\n return (p*q, b)\n\ntype RVState s a = StateT s RV a\n\nuniform :: [a] -> RV a\nuniform x = RV [(1/fromIntegral (length x), y) | y <- x]\n\ntestRVState1 :: RVState s Bool\ntestRVState1\n = do prize <- lift uniform [1,2,3]\n return False\n\n-- lift :: (MonadTrans t, Monad m) => m a -> t m a"
+"---\ndescription: A Metadata Provider is a JavaScript function that acts as an interface for accessing metadata related to Images in Cornerstone.\n---\n\n# Metadata Providers\n\n> A **Metadata Provider** is a JavaScript function that acts as an interface for accessing metadata related to Images in Cornerstone. Users can define their own provider functions in order to return any metadata they wish for each specific image.\n\nMedical images typically come with lots of non-pixel-wise metadata such as for example, the pixel spacing of the image, the patient ID, or the scan acquisition date. With some file types (e.g. DICOM), this information is stored within the file header and can be read and parsed and passed around your application. With others (e.g. JPEG, PNG), this information needs to be provided independently from the actual pixel data. Even for DICOM images, however, it is common for application developers to provide metadata independently from the transmission of pixel data from the server to the client since this can considerably improve performance.\n\nTo handle these scenarios, Cornerstone provides infrastructure for the definition and usage of *Metadata Providers*. Metadata Providers are simply functions which take in an [Image Id](image-ids.md) and specified metadata type, and return"
+"# plex2netflix\n\nThis simple tool checks how much of your media from Plex is available to watch on Netflix, and gives you a nice summary with the percentage of media that is available.\n\nI made this tool because I someday want to make the jump to Netflix, but I want to know beforehand how much of the media I have is available there.\n\nIt works by using the Plex Web API to get a list of all media from given library section. If an item has an IMDb ID (you need to enable the IMDb agent for this in Plex), it uses this to search in the [uNoGS](http://unogs.com/) database which has Netflix availability data. If there is no IMDb ID, the item title and year are used.\n\n[**Powered by uNoGS**](http://unogs.com/).\n\n\n\n## Install\n\nYou need to have [Node.js](https://nodejs.org) (4.0 or higher). Install the tool with the node package manager:\n\n```\nnpm install -g plex2netflix\n```\n\nTo update, just run the command above again.\n\n## Usage\n\nFirst, you need to get your [API token from Plex](https://support.plex.tv/hc/en-us/articles/204059436-Finding-your-account-token-X-Plex-Token).\n\n```\nplex2netflix --host 192.168.0.42 --token=xxx --country=us --section=Movies\n```\n\nBy default it searches the Netflix library of the US. You can specify `--country`"
+"# Selection Sort\n\n#### Problem Statement\n\nGiven an unsorted array of n elements, write a function to sort the array\n\n#### Approach\n\n- select the smallest element from the array\n- put it at the beginning of the array\n- then select the smallest array from the remaining unsorted list\n- append it to the sorted array at the beginning\n- keep doing this for every element of the array\n- repeat the above process n times\n\n#### Time Complexity\n\nO(n^2) Worst case performance\n\nO(n^2) Best-case performance\n\nO(n^2) Average performance\n\n#### Space Complexity\n\nO(1) Worst case\n\n\n#### Example\n\n```\narr[] = {80, 10, 40, 30}\nIndexes: 0 1 2 3 \n\n1. Index = 0 \n\tSelect the minimum number from the array (between index 0-3), ie, 10\n2. Swap 10 and 80 (arr[0])\n3. The array now is {10, 80, 40, 30}\n\n4. Index = 1\n\tSelect the minimum number from the array (between index 1-3), ie, 30\n5. Swap 30 and 80 (arr[1])\n6. The array now is {10, 30, 40, 80}\n\n7. Index = 2\n\tSelect the minimum number from the array (between index 2-3), ie, 40\n8. Swap 40 and 40 (arr[2])\n9. The array now is {10,"
+"\n * $ingest = Omeka_File_Ingest_AbstractIngest::factory('Url', $item);\n * $fileRecords = $ingest->ingest('http://www.example.com');\n * \n * \n * @package Omeka\\File\\Ingest\n */\nabstract class Omeka_File_Ingest_AbstractIngest\n{\n /**\n * @var Item\n */\n protected $_item;\n\n /**\n * Set of arbitrary options to use when ingesting files.\n *\n * @var array\n */\n protected $_options = array();\n\n /**\n * Set of validators implementing Zend_Validate_Interface.\n * \n * @var array\n * @see Omeka_File_Ingest_AbstractIngest::addValidator()\n */\n private $_validators = array();\n\n /**\n * The current validated file MIME type.\n * \n * @see Omeka_Validate_File_MimeType::isValid()\n * @var string\n */"
+"struct intx {\n intx() { normalize(1); }\n intx(string n) { init(n); }\n intx(int n) { stringstream ss; ss << n; init(ss.str()); }\n intx(const intx& other)\n : sign(other.sign), data(other.data) { }\n int sign;\n vector data;\n static const int dcnt = 9;\n static const unsigned int radix = 1000000000U;\n int size() const { return data.size(); }\n void init(string n) {\n intx res; res.data.clear();\n if (n.empty()) n = \"0\";\n if (n[0] == '-') res.sign = -1, n = n.substr(1);\n for (int i = n.size() - 1; i >= 0; i -= intx::dcnt) {\n unsigned int digit = 0;\n for (int j = intx::dcnt - 1; j >= 0; j--) {\n int idx = i - j;\n if (idx < 0) continue;\n digit = digit * 10 + (n[idx] - '0'); }\n res.data.push_back(digit); }\n data = res.data;\n normalize(res.sign); }\n intx& normalize(int nsign) {\n if (data.empty()) data.push_back(0);\n for (int i = data.size() - 1; i > 0 && data[i] == 0; i--)\n data.erase(data.begin() + i);\n sign = data.size() == 1 && data[0] == 0 ? 1 : nsign;\n return *this; }\n friend ostream& operator <<(ostream& outs, const intx& n) {\n if (n.sign < 0) outs << '-';\n bool first = true;\n for (int i"
+"---\n# CRD in version v1beta1. Use this for Kubernetes clusters < 1.16\n\n# CRD connecting a ConfigMap with a set of pods which needs to\n# be restarted when the ConfigMap changes\n# CRD connecting a ConfigMap with a set of pods which needs to\n# be restarted when the ConfigMap changes\napiVersion: apiextensions.k8s.io/v1beta1\nkind: CustomResourceDefinition\nmetadata:\n name: configwatchers.k8spatterns.io\nspec:\n scope: Namespaced\n group: k8spatterns.io\n # Additional columns to print when in kubectl get\n additionalPrinterColumns:\n - name: configmap\n description: Name of ConfigMap to watch\n type: string\n JSONPath: .spec.configMap\n - name: podselector\n description: Selector for Pods to restart\n type: string\n JSONPath: .spec.podSelector\n versions:\n - name: v1\n # Enabled\n served: true\n # The version stored in the backend\n storage: true\n names:\n # Kind of this CRD\n kind: ConfigWatcher\n # How to access them via client and REST api\n singular: configwatcher\n plural: configwatchers\n # How to access the CRDs as well (e.g. with \"kubectl get cw\")\n shortNames: [ cw ]\n # Adds Configwatcher to the \"all\" category (e.g. \"kubectl get all\")\n categories: [ all ]\n validation:\n # Validation schema\n openAPIV3Schema:\n properties:\n spec:\n properties:\n configMap:\n type: string\n description: Name of the ConfigMap to monitor for changes\n podSelector:\n type: object\n description: Label selector used for"
+"// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.\npackage org.jetbrains.io.jsonRpc;\n\nimport com.intellij.diagnostic.PluginException;\nimport com.intellij.openapi.components.ComponentManager;\nimport com.intellij.openapi.components.ServiceManager;\nimport com.intellij.openapi.extensions.ExtensionPointName;\nimport com.intellij.openapi.extensions.PluginDescriptor;\nimport com.intellij.serviceContainer.BaseKeyedLazyInstance;\nimport com.intellij.util.xmlb.annotations.Attribute;\nimport org.jetbrains.annotations.NotNull;\nimport org.jetbrains.annotations.Nullable;\n\npublic final class JsonRpcDomainBean extends BaseKeyedLazyInstance